Explainable-Federated-Learn.../activity_only_basic_FI.ipynb

344 KiB
Raw Blame History

In [45]:
#IMPORTS

import numpy as np
import random
import tensorflow as tf
import tensorflow.keras as kr
import tensorflow.keras.backend as K
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense
from tensorflow.keras.datasets import mnist
import os
import csv

from scipy.spatial.distance import euclidean
from sklearn.metrics import confusion_matrix

from time import sleep
from tqdm import tqdm

import copy
import numpy
from sklearn.datasets import make_classification
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
import matplotlib.pyplot as plt
import math
import seaborn as sns
from numpy.random import RandomState
import scipy as scp
from sklearn.model_selection import train_test_split
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder, LabelEncoder
from keras.models import Sequential
from keras.layers import Dense
from keras import optimizers
from keras.callbacks import EarlyStopping,ModelCheckpoint
from keras.utils import to_categorical
from keras import backend as K
from itertools import product
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score
from sklearn.metrics import roc_auc_score
from sklearn.metrics import confusion_matrix

from sklearn import mixture

from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
%matplotlib inline
In [46]:
feature_attacked = [3,5,8]
rs = RandomState(92) #To reproduce the same results each time we run this notebook
In [47]:
#Load dataset into a pandas DataFrame
activity = pd.read_csv("D:/explaineblity/activity_3_original.csv", sep=',')
In [48]:
to_drop = ['subject', 'timestamp', 'heart_rate','activityID']
activity.drop(axis=1, columns=to_drop, inplace=True)
In [49]:
display(activity.head())
motion temp_hand acceleration_16_x_hand acceleration_16_y_hand acceleration_16_z_hand acceleration_6_x_hand acceleration_6_y_hand acceleration_6_z_hand gyroscope_x_hand gyroscope_y_hand ... acceleration_16_z_ankle acceleration_6_x_ankle acceleration_6_y_ankle acceleration_6_z_ankle gyroscope_x_ankle gyroscope_y_ankle gyroscope_z_ankle magnetometer_x_ankle magnetometer_y_ankle magnetometer_z_ankle
0 n 30.375 2.21530 8.27915 5.58753 2.24689 8.55387 5.77143 -0.004750 0.037579 ... 0.095156 9.63162 -1.76757 0.265761 0.002908 -0.027714 0.001752 -61.1081 -36.8636 -58.3696
1 n 30.375 2.29196 7.67288 5.74467 2.27373 8.14592 5.78739 -0.171710 0.025479 ... -0.020804 9.58649 -1.75247 0.250816 0.020882 0.000945 0.006007 -60.8916 -36.3197 -58.3656
2 n 30.375 2.29090 7.14240 5.82342 2.26966 7.66268 5.78846 -0.238241 0.011214 ... -0.059173 9.60196 -1.73721 0.356632 -0.035392 -0.052422 -0.004882 -60.3407 -35.7842 -58.6119
3 n 30.375 2.21800 7.14365 5.89930 2.22177 7.25535 5.88000 -0.192912 0.019053 ... 0.094385 9.58674 -1.78264 0.311453 -0.032514 -0.018844 0.026950 -60.7646 -37.1028 -57.8799
4 n 30.375 2.30106 7.25857 6.09259 2.20720 7.24042 5.95555 -0.069961 -0.018328 ... 0.095775 9.64677 -1.75240 0.295902 0.001351 -0.048878 -0.006328 -60.2040 -37.1225 -57.8847

5 rows × 40 columns

In [50]:
activity = pd.concat([activity,pd.get_dummies(activity['motion'], prefix='motion')],axis=1)
activity.drop('motion', axis=1, inplace=True)
In [51]:
display(activity.head())
temp_hand acceleration_16_x_hand acceleration_16_y_hand acceleration_16_z_hand acceleration_6_x_hand acceleration_6_y_hand acceleration_6_z_hand gyroscope_x_hand gyroscope_y_hand gyroscope_z_hand ... acceleration_6_y_ankle acceleration_6_z_ankle gyroscope_x_ankle gyroscope_y_ankle gyroscope_z_ankle magnetometer_x_ankle magnetometer_y_ankle magnetometer_z_ankle motion_n motion_y
0 30.375 2.21530 8.27915 5.58753 2.24689 8.55387 5.77143 -0.004750 0.037579 -0.011145 ... -1.76757 0.265761 0.002908 -0.027714 0.001752 -61.1081 -36.8636 -58.3696 1 0
1 30.375 2.29196 7.67288 5.74467 2.27373 8.14592 5.78739 -0.171710 0.025479 -0.009538 ... -1.75247 0.250816 0.020882 0.000945 0.006007 -60.8916 -36.3197 -58.3656 1 0
2 30.375 2.29090 7.14240 5.82342 2.26966 7.66268 5.78846 -0.238241 0.011214 0.000831 ... -1.73721 0.356632 -0.035392 -0.052422 -0.004882 -60.3407 -35.7842 -58.6119 1 0
3 30.375 2.21800 7.14365 5.89930 2.22177 7.25535 5.88000 -0.192912 0.019053 0.013374 ... -1.78264 0.311453 -0.032514 -0.018844 0.026950 -60.7646 -37.1028 -57.8799 1 0
4 30.375 2.30106 7.25857 6.09259 2.20720 7.24042 5.95555 -0.069961 -0.018328 0.004582 ... -1.75240 0.295902 0.001351 -0.048878 -0.006328 -60.2040 -37.1225 -57.8847 1 0

5 rows × 41 columns

In [52]:
class_label = [ 'motion_n', 'motion_y']
predictors = [a for a in activity.columns.values if a not in class_label]

for p in predictors:
    activity[p].fillna(activity[p].mean(), inplace=True)

display(predictors)
for p in predictors:
    activity[p] = (activity[p]-activity[p].min()) / (activity[p].max() - activity[p].min())
    activity[p].astype('float32')
    
['temp_hand',
 'acceleration_16_x_hand',
 'acceleration_16_y_hand',
 'acceleration_16_z_hand',
 'acceleration_6_x_hand',
 'acceleration_6_y_hand',
 'acceleration_6_z_hand',
 'gyroscope_x_hand',
 'gyroscope_y_hand',
 'gyroscope_z_hand',
 'magnetometer_x_hand',
 'magnetometer_y_hand',
 'magnetometer_z_hand',
 'temp_chest',
 'acceleration_16_x_chest',
 'acceleration_16_y_chest',
 'acceleration_16_z_chest',
 'acceleration_6_x_chest',
 'acceleration_6_y_chest',
 'acceleration_6_z_chest',
 'gyroscope_x_chest',
 'gyroscope_y_chest',
 'gyroscope_z_chest',
 'magnetometer_x_chest',
 'magnetometer_y_chest',
 'magnetometer_z_chest',
 'temp_ankle',
 'acceleration_16_x_ankle',
 'acceleration_16_y_ankle',
 'acceleration_16_z_ankle',
 'acceleration_6_x_ankle',
 'acceleration_6_y_ankle',
 'acceleration_6_z_ankle',
 'gyroscope_x_ankle',
 'gyroscope_y_ankle',
 'gyroscope_z_ankle',
 'magnetometer_x_ankle',
 'magnetometer_y_ankle',
 'magnetometer_z_ankle']
In [53]:
activity = activity.to_numpy()
In [54]:
activity.shape
Out[54]:
(1942872, 41)
In [55]:
X_train, X_test, y_train, y_test = train_test_split(activity[:,:-2],activity[:,-2:], test_size=0.07, random_state=rs)
In [56]:
#begin federated

earlystopping = EarlyStopping(monitor = 'val_loss',
                              min_delta = 0.01,
                              patience = 50,
                              verbose = 1,
                              baseline = 2,
                              restore_best_weights = True)

checkpoint = ModelCheckpoint('test.h8',
                             monitor='val_loss',
                             mode='min',
                             save_best_only=True,
                             verbose=1)
    
model = Sequential()
model.add(Dense(70, input_dim=39, activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(2, activation='softmax'))
#sgd = optimizers.SGD(learning_rate=0.0001, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
# def train_shard(i):
history = model.fit(X_train, y_train,
epochs=2,
validation_data=(X_test, y_test),
callbacks = [checkpoint, earlystopping],
shuffle=True)
# return history
# for i in range(len(shard1_traintest)):
#     train_shard(i)
#get_3rd_layer_output = K.function([model.layers[0].input],
#                                 [model.layers[2].output])
#layer_output = get_3rd_layer_output(shard_traintest[i]["X_train"])[0]
Train on 1806870 samples, validate on 136002 samples
Epoch 1/2
 468416/1806870 [======>.......................] - ETA: 1:54:54 - loss: 0.6740 - accuracy: 0.625 - ETA: 2:24 - loss: 0.6243 - accuracy: 0.6502  - ETA: 1:29 - loss: 0.5579 - accuracy: 0.71 - ETA: 1:09 - loss: 0.4978 - accuracy: 0.75 - ETA: 1:00 - loss: 0.4544 - accuracy: 0.78 - ETA: 55s - loss: 0.4229 - accuracy: 0.8029 - ETA: 52s - loss: 0.3976 - accuracy: 0.816 - ETA: 49s - loss: 0.3798 - accuracy: 0.826 - ETA: 47s - loss: 0.3648 - accuracy: 0.834 - ETA: 46s - loss: 0.3515 - accuracy: 0.841 - ETA: 45s - loss: 0.3383 - accuracy: 0.847 - ETA: 44s - loss: 0.3275 - accuracy: 0.854 - ETA: 43s - loss: 0.3230 - accuracy: 0.856 - ETA: 42s - loss: 0.3164 - accuracy: 0.859 - ETA: 41s - loss: 0.3093 - accuracy: 0.863 - ETA: 41s - loss: 0.3022 - accuracy: 0.867 - ETA: 40s - loss: 0.2951 - accuracy: 0.870 - ETA: 40s - loss: 0.2895 - accuracy: 0.873 - ETA: 39s - loss: 0.2853 - accuracy: 0.875 - ETA: 39s - loss: 0.2801 - accuracy: 0.878 - ETA: 39s - loss: 0.2769 - accuracy: 0.879 - ETA: 38s - loss: 0.2723 - accuracy: 0.882 - ETA: 38s - loss: 0.2683 - accuracy: 0.884 - ETA: 38s - loss: 0.2645 - accuracy: 0.886 - ETA: 38s - loss: 0.2614 - accuracy: 0.887 - ETA: 38s - loss: 0.2584 - accuracy: 0.889 - ETA: 37s - loss: 0.2558 - accuracy: 0.890 - ETA: 37s - loss: 0.2524 - accuracy: 0.892 - ETA: 37s - loss: 0.2500 - accuracy: 0.893 - ETA: 37s - loss: 0.2470 - accuracy: 0.895 - ETA: 37s - loss: 0.2446 - accuracy: 0.896 - ETA: 37s - loss: 0.2424 - accuracy: 0.897 - ETA: 37s - loss: 0.2401 - accuracy: 0.898 - ETA: 36s - loss: 0.2375 - accuracy: 0.900 - ETA: 36s - loss: 0.2349 - accuracy: 0.901 - ETA: 36s - loss: 0.2333 - accuracy: 0.902 - ETA: 36s - loss: 0.2313 - accuracy: 0.903 - ETA: 36s - loss: 0.2291 - accuracy: 0.904 - ETA: 36s - loss: 0.2273 - accuracy: 0.905 - ETA: 36s - loss: 0.2252 - accuracy: 0.906 - ETA: 36s - loss: 0.2240 - accuracy: 0.907 - ETA: 35s - loss: 0.2229 - accuracy: 0.907 - ETA: 35s - loss: 0.2211 - accuracy: 0.908 - ETA: 35s - loss: 0.2195 - accuracy: 0.909 - ETA: 35s - loss: 0.2176 - accuracy: 0.910 - ETA: 35s - loss: 0.2159 - accuracy: 0.911 - ETA: 35s - loss: 0.2140 - accuracy: 0.912 - ETA: 35s - loss: 0.2125 - accuracy: 0.913 - ETA: 35s - loss: 0.2107 - accuracy: 0.913 - ETA: 35s - loss: 0.2090 - accuracy: 0.914 - ETA: 34s - loss: 0.2075 - accuracy: 0.915 - ETA: 34s - loss: 0.2062 - accuracy: 0.916 - ETA: 34s - loss: 0.2049 - accuracy: 0.917 - ETA: 34s - loss: 0.2040 - accuracy: 0.917 - ETA: 34s - loss: 0.2029 - accuracy: 0.917 - ETA: 34s - loss: 0.2018 - accuracy: 0.918 - ETA: 34s - loss: 0.2007 - accuracy: 0.919 - ETA: 34s - loss: 0.1996 - accuracy: 0.919 - ETA: 34s - loss: 0.1985 - accuracy: 0.919 - ETA: 34s - loss: 0.1973 - accuracy: 0.920 - ETA: 34s - loss: 0.1963 - accuracy: 0.921 - ETA: 33s - loss: 0.1952 - accuracy: 0.921 - ETA: 33s - loss: 0.1944 - accuracy: 0.922 - ETA: 33s - loss: 0.1932 - accuracy: 0.922 - ETA: 33s - loss: 0.1924 - accuracy: 0.922 - ETA: 33s - loss: 0.1916 - accuracy: 0.923 - ETA: 33s - loss: 0.1904 - accuracy: 0.923 - ETA: 33s - loss: 0.1892 - accuracy: 0.924 - ETA: 33s - loss: 0.1881 - accuracy: 0.925 - ETA: 33s - loss: 0.1869 - accuracy: 0.925 - ETA: 33s - loss: 0.1860 - accuracy: 0.926 - ETA: 33s - loss: 0.1852 - accuracy: 0.926 - ETA: 33s - loss: 0.1847 - accuracy: 0.926 - ETA: 33s - loss: 0.1839 - accuracy: 0.927 - ETA: 32s - loss: 0.1831 - accuracy: 0.927 - ETA: 32s - loss: 0.1824 - accuracy: 0.927 - ETA: 32s - loss: 0.1817 - accuracy: 0.928 - ETA: 32s - loss: 0.1811 - accuracy: 0.928 - ETA: 32s - loss: 0.1803 - accuracy: 0.928 - ETA: 32s - loss: 0.1796 - accuracy: 0.929 - ETA: 32s - loss: 0.1790 - accuracy: 0.929 - ETA: 32s - loss: 0.1782 - accuracy: 0.929 - ETA: 32s - loss: 0.1774 - accuracy: 0.930 - ETA: 32s - loss: 0.1766 - accuracy: 0.930 - ETA: 32s - loss: 0.1761 - accuracy: 0.930 - ETA: 32s - loss: 0.1754 - accuracy: 0.931 - ETA: 32s - loss: 0.1747 - accuracy: 0.931 - ETA: 32s - loss: 0.1740 - accuracy: 0.931 - ETA: 31s - loss: 0.1732 - accuracy: 0.932 - ETA: 31s - loss: 0.1728 - accuracy: 0.932 - ETA: 31s - loss: 0.1722 - accuracy: 0.932 - ETA: 31s - loss: 0.1715 - accuracy: 0.932 - ETA: 31s - loss: 0.1712 - accuracy: 0.933 - ETA: 31s - loss: 0.1707 - accuracy: 0.933 - ETA: 31s - loss: 0.1702 - accuracy: 0.933 - ETA: 31s - loss: 0.1695 - accuracy: 0.933 - ETA: 31s - loss: 0.1688 - accuracy: 0.934 - ETA: 31s - loss: 0.1683 - accuracy: 0.934 - ETA: 31s - loss: 0.1676 - accuracy: 0.934 - ETA: 31s - loss: 0.1670 - accuracy: 0.935 - ETA: 31s - loss: 0.1664 - accuracy: 0.935 - ETA: 31s - loss: 0.1663 - accuracy: 0.935 - ETA: 31s - loss: 0.1660 - accuracy: 0.935 - ETA: 31s - loss: 0.1655 - accuracy: 0.935 - ETA: 30s - loss: 0.1649 - accuracy: 0.936 - ETA: 30s - loss: 0.1646 - accuracy: 0.936 - ETA: 30s - loss: 0.1640 - accuracy: 0.936 - ETA: 30s - loss: 0.1635 - accuracy: 0.936 - ETA: 30s - loss: 0.1629 - accuracy: 0.936 - ETA: 30s - loss: 0.1623 - accuracy: 0.937 - ETA: 30s - loss: 0.1620 - accuracy: 0.937 - ETA: 30s - loss: 0.1613 - accuracy: 0.937 - ETA: 30s - loss: 0.1609 - accuracy: 0.937 - ETA: 30s - loss: 0.1604 - accuracy: 0.938 - ETA: 30s - loss: 0.1599 - accuracy: 0.938 - ETA: 30s - loss: 0.1598 - accuracy: 0.938 - ETA: 30s - loss: 0.1591 - accuracy: 0.938 - ETA: 30s - loss: 0.1586 - accuracy: 0.938 - ETA: 30s - loss: 0.1583 - accuracy: 0.939 - ETA: 30s - loss: 0.1578 - accuracy: 0.939 - ETA: 30s - loss: 0.1577 - accuracy: 0.939 - ETA: 29s - loss: 0.1574 - accuracy: 0.939 - ETA: 29s - loss: 0.1571 - accuracy: 0.939 - ETA: 29s - loss: 0.1567 - accuracy: 0.939 - ETA: 29s - loss: 0.1562 - accuracy: 0.940 - ETA: 29s - loss: 0.1556 - accuracy: 0.940 - ETA: 29s - loss: 0.1552 - accuracy: 0.940 - ETA: 29s - loss: 0.1549 - accuracy: 0.940 - ETA: 29s - loss: 0.1548 - accuracy: 0.940 - ETA: 29s - loss: 0.1545 - accuracy: 0.940 - ETA: 29s - loss: 0.1540 - accuracy: 0.940 - ETA: 29s - loss: 0.1536 - accuracy: 0.941 - ETA: 29s - loss: 0.1532 - accuracy: 0.941 - ETA: 29s - loss: 0.1530 - accuracy: 0.941 - ETA: 29s - loss: 0.1526 - accuracy: 0.941 - ETA: 29s - loss: 0.1522 - accuracy: 0.941 - ETA: 29s - loss: 0.1520 - accuracy: 0.941 - ETA: 29s - loss: 0.1517 - accuracy: 0.942 - ETA: 29s - loss: 0.1513 - accuracy: 0.942 - ETA: 29s - loss: 0.1509 - accuracy: 0.942 - ETA: 28s - loss: 0.1505 - accuracy: 0.942 - ETA: 28s - loss: 0.1501 - accuracy: 0.942 - ETA: 28s - loss: 0.1497 - accuracy: 0.942 - ETA: 28s - loss: 0.1494 - accuracy: 0.943 - ETA: 28s - loss: 0.1490 - accuracy: 0.943 - ETA: 28s - loss: 0.1487 - accuracy: 0.943 - ETA: 28s - loss: 0.1483 - accuracy: 0.943 - ETA: 28s - loss: 0.1481 - accuracy: 0.943 - ETA: 28s - loss: 0.1478 - accuracy: 0.943 - ETA: 28s - loss: 0.1475 - accuracy: 0.943 - ETA: 28s - loss: 0.1473 - accuracy: 0.943 - ETA: 28s - loss: 0.1470 - accuracy: 0.944 - ETA: 28s - loss: 0.1467 - accuracy: 0.944 - ETA: 28s - loss: 0.1465 - accuracy: 0.944 - ETA: 28s - loss: 0.1461 - accuracy: 0.944 - ETA: 28s - loss: 0.1457 - accuracy: 0.944 - ETA: 28s - loss: 0.1452 - accuracy: 0.944 - ETA: 27s - loss: 0.1449 - accuracy: 0.945 - ETA: 27s - loss: 0.1445 - accuracy: 0.945 - ETA: 27s - loss: 0.1440 - accuracy: 0.945 - ETA: 27s - loss: 0.1438 - accuracy: 0.945 - ETA: 27s - loss: 0.1436 - accuracy: 0.945 - ETA: 27s - loss: 0.1433 - accuracy: 0.945 - ETA: 27s - loss: 0.1430 - accuracy: 0.945 - ETA: 27s - loss: 0.1429 - accuracy: 0.945 - ETA: 27s - loss: 0.1426 - accuracy: 0.946 - ETA: 27s - loss: 0.1425 - accuracy: 0.946 - ETA: 27s - loss: 0.1421 - accuracy: 0.946 - ETA: 27s - loss: 0.1419 - accuracy: 0.946 - ETA: 27s - loss: 0.1417 - accuracy: 0.946 - ETA: 27s - loss: 0.1414 - accuracy: 0.946 - ETA: 27s - loss: 0.1411 - accuracy: 0.946 - ETA: 27s - loss: 0.1408 - accuracy: 0.946 - ETA: 27s - loss: 0.1405 - accuracy: 0.947 - ETA: 27s - loss: 0.1402 - accuracy: 0.947 - ETA: 26s - loss: 0.1404 - accuracy: 0.947 - ETA: 26s - loss: 0.1401 - accuracy: 0.947 - ETA: 26s - loss: 0.1398 - accuracy: 0.947 - ETA: 26s - loss: 0.1396 - accuracy: 0.947 - ETA: 26s - loss: 0.1393 - accuracy: 0.947 - ETA: 26s - loss: 0.1390 - accuracy: 0.947 - ETA: 26s - loss: 0.1387 - accuracy: 0.947 - ETA: 26s - loss: 0.1386 - accuracy: 0.947 - ETA: 26s - loss: 0.1383 - accuracy: 0.9479
 948576/1806870 [==============>...............] - ETA: 26s - loss: 0.1380 - accuracy: 0.948 - ETA: 26s - loss: 0.1379 - accuracy: 0.948 - ETA: 26s - loss: 0.1377 - accuracy: 0.948 - ETA: 26s - loss: 0.1374 - accuracy: 0.948 - ETA: 26s - loss: 0.1372 - accuracy: 0.948 - ETA: 26s - loss: 0.1369 - accuracy: 0.948 - ETA: 26s - loss: 0.1366 - accuracy: 0.948 - ETA: 26s - loss: 0.1363 - accuracy: 0.948 - ETA: 26s - loss: 0.1360 - accuracy: 0.948 - ETA: 25s - loss: 0.1357 - accuracy: 0.949 - ETA: 25s - loss: 0.1354 - accuracy: 0.949 - ETA: 25s - loss: 0.1352 - accuracy: 0.949 - ETA: 25s - loss: 0.1350 - accuracy: 0.949 - ETA: 25s - loss: 0.1347 - accuracy: 0.949 - ETA: 25s - loss: 0.1344 - accuracy: 0.949 - ETA: 25s - loss: 0.1341 - accuracy: 0.949 - ETA: 25s - loss: 0.1340 - accuracy: 0.949 - ETA: 25s - loss: 0.1337 - accuracy: 0.950 - ETA: 25s - loss: 0.1335 - accuracy: 0.950 - ETA: 25s - loss: 0.1333 - accuracy: 0.950 - ETA: 25s - loss: 0.1331 - accuracy: 0.950 - ETA: 25s - loss: 0.1328 - accuracy: 0.950 - ETA: 25s - loss: 0.1326 - accuracy: 0.950 - ETA: 25s - loss: 0.1323 - accuracy: 0.950 - ETA: 25s - loss: 0.1321 - accuracy: 0.950 - ETA: 25s - loss: 0.1320 - accuracy: 0.950 - ETA: 25s - loss: 0.1318 - accuracy: 0.950 - ETA: 25s - loss: 0.1315 - accuracy: 0.950 - ETA: 24s - loss: 0.1313 - accuracy: 0.951 - ETA: 24s - loss: 0.1311 - accuracy: 0.951 - ETA: 24s - loss: 0.1310 - accuracy: 0.951 - ETA: 24s - loss: 0.1307 - accuracy: 0.951 - ETA: 24s - loss: 0.1304 - accuracy: 0.951 - ETA: 24s - loss: 0.1302 - accuracy: 0.951 - ETA: 24s - loss: 0.1301 - accuracy: 0.951 - ETA: 24s - loss: 0.1299 - accuracy: 0.951 - ETA: 24s - loss: 0.1298 - accuracy: 0.951 - ETA: 24s - loss: 0.1296 - accuracy: 0.951 - ETA: 24s - loss: 0.1293 - accuracy: 0.951 - ETA: 24s - loss: 0.1291 - accuracy: 0.951 - ETA: 24s - loss: 0.1289 - accuracy: 0.952 - ETA: 24s - loss: 0.1286 - accuracy: 0.952 - ETA: 24s - loss: 0.1284 - accuracy: 0.952 - ETA: 24s - loss: 0.1282 - accuracy: 0.952 - ETA: 24s - loss: 0.1279 - accuracy: 0.952 - ETA: 24s - loss: 0.1277 - accuracy: 0.952 - ETA: 23s - loss: 0.1275 - accuracy: 0.952 - ETA: 23s - loss: 0.1273 - accuracy: 0.952 - ETA: 23s - loss: 0.1272 - accuracy: 0.952 - ETA: 23s - loss: 0.1270 - accuracy: 0.952 - ETA: 23s - loss: 0.1269 - accuracy: 0.952 - ETA: 23s - loss: 0.1267 - accuracy: 0.953 - ETA: 23s - loss: 0.1265 - accuracy: 0.953 - ETA: 23s - loss: 0.1262 - accuracy: 0.953 - ETA: 23s - loss: 0.1260 - accuracy: 0.953 - ETA: 23s - loss: 0.1258 - accuracy: 0.953 - ETA: 23s - loss: 0.1256 - accuracy: 0.953 - ETA: 23s - loss: 0.1253 - accuracy: 0.953 - ETA: 23s - loss: 0.1251 - accuracy: 0.953 - ETA: 23s - loss: 0.1249 - accuracy: 0.953 - ETA: 23s - loss: 0.1248 - accuracy: 0.953 - ETA: 23s - loss: 0.1245 - accuracy: 0.953 - ETA: 23s - loss: 0.1243 - accuracy: 0.954 - ETA: 23s - loss: 0.1241 - accuracy: 0.954 - ETA: 23s - loss: 0.1240 - accuracy: 0.954 - ETA: 22s - loss: 0.1238 - accuracy: 0.954 - ETA: 22s - loss: 0.1236 - accuracy: 0.954 - ETA: 22s - loss: 0.1234 - accuracy: 0.954 - ETA: 22s - loss: 0.1232 - accuracy: 0.954 - ETA: 22s - loss: 0.1230 - accuracy: 0.954 - ETA: 22s - loss: 0.1229 - accuracy: 0.954 - ETA: 22s - loss: 0.1227 - accuracy: 0.954 - ETA: 22s - loss: 0.1225 - accuracy: 0.954 - ETA: 22s - loss: 0.1223 - accuracy: 0.954 - ETA: 22s - loss: 0.1221 - accuracy: 0.954 - ETA: 22s - loss: 0.1219 - accuracy: 0.954 - ETA: 22s - loss: 0.1218 - accuracy: 0.955 - ETA: 22s - loss: 0.1217 - accuracy: 0.955 - ETA: 22s - loss: 0.1215 - accuracy: 0.955 - ETA: 22s - loss: 0.1214 - accuracy: 0.955 - ETA: 22s - loss: 0.1211 - accuracy: 0.955 - ETA: 22s - loss: 0.1210 - accuracy: 0.955 - ETA: 22s - loss: 0.1207 - accuracy: 0.955 - ETA: 22s - loss: 0.1206 - accuracy: 0.955 - ETA: 21s - loss: 0.1204 - accuracy: 0.955 - ETA: 21s - loss: 0.1203 - accuracy: 0.955 - ETA: 21s - loss: 0.1200 - accuracy: 0.955 - ETA: 21s - loss: 0.1199 - accuracy: 0.955 - ETA: 21s - loss: 0.1197 - accuracy: 0.955 - ETA: 21s - loss: 0.1196 - accuracy: 0.955 - ETA: 21s - loss: 0.1195 - accuracy: 0.956 - ETA: 21s - loss: 0.1193 - accuracy: 0.956 - ETA: 21s - loss: 0.1191 - accuracy: 0.956 - ETA: 21s - loss: 0.1189 - accuracy: 0.956 - ETA: 21s - loss: 0.1188 - accuracy: 0.956 - ETA: 21s - loss: 0.1187 - accuracy: 0.956 - ETA: 21s - loss: 0.1185 - accuracy: 0.956 - ETA: 21s - loss: 0.1184 - accuracy: 0.956 - ETA: 21s - loss: 0.1183 - accuracy: 0.956 - ETA: 21s - loss: 0.1181 - accuracy: 0.956 - ETA: 21s - loss: 0.1179 - accuracy: 0.956 - ETA: 21s - loss: 0.1177 - accuracy: 0.956 - ETA: 21s - loss: 0.1176 - accuracy: 0.956 - ETA: 21s - loss: 0.1173 - accuracy: 0.956 - ETA: 20s - loss: 0.1172 - accuracy: 0.956 - ETA: 20s - loss: 0.1171 - accuracy: 0.957 - ETA: 20s - loss: 0.1169 - accuracy: 0.957 - ETA: 20s - loss: 0.1167 - accuracy: 0.957 - ETA: 20s - loss: 0.1166 - accuracy: 0.957 - ETA: 20s - loss: 0.1165 - accuracy: 0.957 - ETA: 20s - loss: 0.1163 - accuracy: 0.957 - ETA: 20s - loss: 0.1162 - accuracy: 0.957 - ETA: 20s - loss: 0.1160 - accuracy: 0.957 - ETA: 20s - loss: 0.1158 - accuracy: 0.957 - ETA: 20s - loss: 0.1156 - accuracy: 0.957 - ETA: 20s - loss: 0.1154 - accuracy: 0.957 - ETA: 20s - loss: 0.1152 - accuracy: 0.957 - ETA: 20s - loss: 0.1150 - accuracy: 0.957 - ETA: 20s - loss: 0.1149 - accuracy: 0.957 - ETA: 20s - loss: 0.1147 - accuracy: 0.957 - ETA: 20s - loss: 0.1146 - accuracy: 0.958 - ETA: 20s - loss: 0.1145 - accuracy: 0.958 - ETA: 20s - loss: 0.1143 - accuracy: 0.958 - ETA: 19s - loss: 0.1142 - accuracy: 0.958 - ETA: 19s - loss: 0.1140 - accuracy: 0.958 - ETA: 19s - loss: 0.1139 - accuracy: 0.958 - ETA: 19s - loss: 0.1137 - accuracy: 0.958 - ETA: 19s - loss: 0.1135 - accuracy: 0.958 - ETA: 19s - loss: 0.1133 - accuracy: 0.958 - ETA: 19s - loss: 0.1132 - accuracy: 0.958 - ETA: 19s - loss: 0.1130 - accuracy: 0.958 - ETA: 19s - loss: 0.1129 - accuracy: 0.958 - ETA: 19s - loss: 0.1128 - accuracy: 0.958 - ETA: 19s - loss: 0.1127 - accuracy: 0.958 - ETA: 19s - loss: 0.1126 - accuracy: 0.958 - ETA: 19s - loss: 0.1124 - accuracy: 0.958 - ETA: 19s - loss: 0.1123 - accuracy: 0.958 - ETA: 19s - loss: 0.1121 - accuracy: 0.959 - ETA: 19s - loss: 0.1120 - accuracy: 0.959 - ETA: 19s - loss: 0.1118 - accuracy: 0.959 - ETA: 19s - loss: 0.1117 - accuracy: 0.959 - ETA: 19s - loss: 0.1116 - accuracy: 0.959 - ETA: 19s - loss: 0.1115 - accuracy: 0.959 - ETA: 18s - loss: 0.1113 - accuracy: 0.959 - ETA: 18s - loss: 0.1112 - accuracy: 0.959 - ETA: 18s - loss: 0.1110 - accuracy: 0.959 - ETA: 18s - loss: 0.1108 - accuracy: 0.959 - ETA: 18s - loss: 0.1107 - accuracy: 0.959 - ETA: 18s - loss: 0.1105 - accuracy: 0.959 - ETA: 18s - loss: 0.1104 - accuracy: 0.959 - ETA: 18s - loss: 0.1103 - accuracy: 0.959 - ETA: 18s - loss: 0.1101 - accuracy: 0.959 - ETA: 18s - loss: 0.1099 - accuracy: 0.959 - ETA: 18s - loss: 0.1098 - accuracy: 0.959 - ETA: 18s - loss: 0.1097 - accuracy: 0.960 - ETA: 18s - loss: 0.1096 - accuracy: 0.960 - ETA: 18s - loss: 0.1095 - accuracy: 0.960 - ETA: 18s - loss: 0.1094 - accuracy: 0.960 - ETA: 18s - loss: 0.1093 - accuracy: 0.960 - ETA: 18s - loss: 0.1092 - accuracy: 0.960 - ETA: 18s - loss: 0.1090 - accuracy: 0.960 - ETA: 18s - loss: 0.1089 - accuracy: 0.960 - ETA: 17s - loss: 0.1088 - accuracy: 0.960 - ETA: 17s - loss: 0.1086 - accuracy: 0.960 - ETA: 17s - loss: 0.1085 - accuracy: 0.960 - ETA: 17s - loss: 0.1085 - accuracy: 0.960 - ETA: 17s - loss: 0.1084 - accuracy: 0.960 - ETA: 17s - loss: 0.1082 - accuracy: 0.960 - ETA: 17s - loss: 0.1080 - accuracy: 0.960 - ETA: 17s - loss: 0.1079 - accuracy: 0.960 - ETA: 17s - loss: 0.1078 - accuracy: 0.960 - ETA: 17s - loss: 0.1077 - accuracy: 0.960 - ETA: 17s - loss: 0.1075 - accuracy: 0.960 - ETA: 17s - loss: 0.1073 - accuracy: 0.961 - ETA: 17s - loss: 0.1071 - accuracy: 0.961 - ETA: 17s - loss: 0.1070 - accuracy: 0.961 - ETA: 17s - loss: 0.1069 - accuracy: 0.961 - ETA: 17s - loss: 0.1068 - accuracy: 0.961 - ETA: 17s - loss: 0.1066 - accuracy: 0.961 - ETA: 17s - loss: 0.1066 - accuracy: 0.961 - ETA: 17s - loss: 0.1065 - accuracy: 0.961 - ETA: 17s - loss: 0.1064 - accuracy: 0.961 - ETA: 16s - loss: 0.1062 - accuracy: 0.961 - ETA: 16s - loss: 0.1061 - accuracy: 0.961 - ETA: 16s - loss: 0.1060 - accuracy: 0.961 - ETA: 16s - loss: 0.1058 - accuracy: 0.9616
1435872/1806870 [======================>.......] - ETA: 16s - loss: 0.1057 - accuracy: 0.961 - ETA: 16s - loss: 0.1055 - accuracy: 0.961 - ETA: 16s - loss: 0.1055 - accuracy: 0.961 - ETA: 16s - loss: 0.1054 - accuracy: 0.961 - ETA: 16s - loss: 0.1052 - accuracy: 0.961 - ETA: 16s - loss: 0.1051 - accuracy: 0.961 - ETA: 16s - loss: 0.1050 - accuracy: 0.961 - ETA: 16s - loss: 0.1048 - accuracy: 0.961 - ETA: 16s - loss: 0.1047 - accuracy: 0.962 - ETA: 16s - loss: 0.1046 - accuracy: 0.962 - ETA: 16s - loss: 0.1045 - accuracy: 0.962 - ETA: 16s - loss: 0.1043 - accuracy: 0.962 - ETA: 16s - loss: 0.1043 - accuracy: 0.962 - ETA: 16s - loss: 0.1041 - accuracy: 0.962 - ETA: 16s - loss: 0.1041 - accuracy: 0.962 - ETA: 15s - loss: 0.1039 - accuracy: 0.962 - ETA: 15s - loss: 0.1038 - accuracy: 0.962 - ETA: 15s - loss: 0.1037 - accuracy: 0.962 - ETA: 15s - loss: 0.1035 - accuracy: 0.962 - ETA: 15s - loss: 0.1034 - accuracy: 0.962 - ETA: 15s - loss: 0.1033 - accuracy: 0.962 - ETA: 15s - loss: 0.1031 - accuracy: 0.962 - ETA: 15s - loss: 0.1030 - accuracy: 0.962 - ETA: 15s - loss: 0.1028 - accuracy: 0.962 - ETA: 15s - loss: 0.1027 - accuracy: 0.962 - ETA: 15s - loss: 0.1026 - accuracy: 0.962 - ETA: 15s - loss: 0.1025 - accuracy: 0.962 - ETA: 15s - loss: 0.1024 - accuracy: 0.962 - ETA: 15s - loss: 0.1023 - accuracy: 0.962 - ETA: 15s - loss: 0.1022 - accuracy: 0.963 - ETA: 15s - loss: 0.1021 - accuracy: 0.963 - ETA: 15s - loss: 0.1020 - accuracy: 0.963 - ETA: 15s - loss: 0.1019 - accuracy: 0.963 - ETA: 15s - loss: 0.1018 - accuracy: 0.963 - ETA: 15s - loss: 0.1017 - accuracy: 0.963 - ETA: 14s - loss: 0.1015 - accuracy: 0.963 - ETA: 14s - loss: 0.1014 - accuracy: 0.963 - ETA: 14s - loss: 0.1014 - accuracy: 0.963 - ETA: 14s - loss: 0.1013 - accuracy: 0.963 - ETA: 14s - loss: 0.1011 - accuracy: 0.963 - ETA: 14s - loss: 0.1010 - accuracy: 0.963 - ETA: 14s - loss: 0.1009 - accuracy: 0.963 - ETA: 14s - loss: 0.1008 - accuracy: 0.963 - ETA: 14s - loss: 0.1007 - accuracy: 0.963 - ETA: 14s - loss: 0.1006 - accuracy: 0.963 - ETA: 14s - loss: 0.1005 - accuracy: 0.963 - ETA: 14s - loss: 0.1004 - accuracy: 0.963 - ETA: 14s - loss: 0.1003 - accuracy: 0.963 - ETA: 14s - loss: 0.1002 - accuracy: 0.963 - ETA: 14s - loss: 0.1002 - accuracy: 0.963 - ETA: 14s - loss: 0.1000 - accuracy: 0.963 - ETA: 14s - loss: 0.0999 - accuracy: 0.963 - ETA: 14s - loss: 0.0998 - accuracy: 0.963 - ETA: 14s - loss: 0.0997 - accuracy: 0.963 - ETA: 13s - loss: 0.0996 - accuracy: 0.964 - ETA: 13s - loss: 0.0995 - accuracy: 0.964 - ETA: 13s - loss: 0.0994 - accuracy: 0.964 - ETA: 13s - loss: 0.0993 - accuracy: 0.964 - ETA: 13s - loss: 0.0993 - accuracy: 0.964 - ETA: 13s - loss: 0.0992 - accuracy: 0.964 - ETA: 13s - loss: 0.0990 - accuracy: 0.964 - ETA: 13s - loss: 0.0989 - accuracy: 0.964 - ETA: 13s - loss: 0.0988 - accuracy: 0.964 - ETA: 13s - loss: 0.0987 - accuracy: 0.964 - ETA: 13s - loss: 0.0986 - accuracy: 0.964 - ETA: 13s - loss: 0.0985 - accuracy: 0.964 - ETA: 13s - loss: 0.0984 - accuracy: 0.964 - ETA: 13s - loss: 0.0983 - accuracy: 0.964 - ETA: 13s - loss: 0.0982 - accuracy: 0.964 - ETA: 13s - loss: 0.0981 - accuracy: 0.964 - ETA: 13s - loss: 0.0980 - accuracy: 0.964 - ETA: 13s - loss: 0.0979 - accuracy: 0.964 - ETA: 13s - loss: 0.0978 - accuracy: 0.964 - ETA: 13s - loss: 0.0977 - accuracy: 0.964 - ETA: 12s - loss: 0.0977 - accuracy: 0.964 - ETA: 12s - loss: 0.0976 - accuracy: 0.964 - ETA: 12s - loss: 0.0974 - accuracy: 0.964 - ETA: 12s - loss: 0.0973 - accuracy: 0.964 - ETA: 12s - loss: 0.0972 - accuracy: 0.964 - ETA: 12s - loss: 0.0971 - accuracy: 0.965 - ETA: 12s - loss: 0.0970 - accuracy: 0.965 - ETA: 12s - loss: 0.0969 - accuracy: 0.965 - ETA: 12s - loss: 0.0968 - accuracy: 0.965 - ETA: 12s - loss: 0.0968 - accuracy: 0.965 - ETA: 12s - loss: 0.0967 - accuracy: 0.965 - ETA: 12s - loss: 0.0966 - accuracy: 0.965 - ETA: 12s - loss: 0.0965 - accuracy: 0.965 - ETA: 12s - loss: 0.0964 - accuracy: 0.965 - ETA: 12s - loss: 0.0963 - accuracy: 0.965 - ETA: 12s - loss: 0.0962 - accuracy: 0.965 - ETA: 12s - loss: 0.0961 - accuracy: 0.965 - ETA: 12s - loss: 0.0960 - accuracy: 0.965 - ETA: 12s - loss: 0.0959 - accuracy: 0.965 - ETA: 11s - loss: 0.0958 - accuracy: 0.965 - ETA: 11s - loss: 0.0957 - accuracy: 0.965 - ETA: 11s - loss: 0.0956 - accuracy: 0.965 - ETA: 11s - loss: 0.0955 - accuracy: 0.965 - ETA: 11s - loss: 0.0954 - accuracy: 0.965 - ETA: 11s - loss: 0.0953 - accuracy: 0.965 - ETA: 11s - loss: 0.0952 - accuracy: 0.965 - ETA: 11s - loss: 0.0952 - accuracy: 0.965 - ETA: 11s - loss: 0.0951 - accuracy: 0.965 - ETA: 11s - loss: 0.0950 - accuracy: 0.965 - ETA: 11s - loss: 0.0949 - accuracy: 0.965 - ETA: 11s - loss: 0.0948 - accuracy: 0.965 - ETA: 11s - loss: 0.0947 - accuracy: 0.965 - ETA: 11s - loss: 0.0946 - accuracy: 0.965 - ETA: 11s - loss: 0.0945 - accuracy: 0.966 - ETA: 11s - loss: 0.0944 - accuracy: 0.966 - ETA: 11s - loss: 0.0943 - accuracy: 0.966 - ETA: 11s - loss: 0.0942 - accuracy: 0.966 - ETA: 11s - loss: 0.0941 - accuracy: 0.966 - ETA: 11s - loss: 0.0940 - accuracy: 0.966 - ETA: 10s - loss: 0.0939 - accuracy: 0.966 - ETA: 10s - loss: 0.0938 - accuracy: 0.966 - ETA: 10s - loss: 0.0937 - accuracy: 0.966 - ETA: 10s - loss: 0.0936 - accuracy: 0.966 - ETA: 10s - loss: 0.0935 - accuracy: 0.966 - ETA: 10s - loss: 0.0934 - accuracy: 0.966 - ETA: 10s - loss: 0.0933 - accuracy: 0.966 - ETA: 10s - loss: 0.0932 - accuracy: 0.966 - ETA: 10s - loss: 0.0931 - accuracy: 0.966 - ETA: 10s - loss: 0.0930 - accuracy: 0.966 - ETA: 10s - loss: 0.0929 - accuracy: 0.966 - ETA: 10s - loss: 0.0928 - accuracy: 0.966 - ETA: 10s - loss: 0.0927 - accuracy: 0.966 - ETA: 10s - loss: 0.0926 - accuracy: 0.966 - ETA: 10s - loss: 0.0925 - accuracy: 0.966 - ETA: 10s - loss: 0.0924 - accuracy: 0.966 - ETA: 10s - loss: 0.0923 - accuracy: 0.966 - ETA: 10s - loss: 0.0923 - accuracy: 0.966 - ETA: 10s - loss: 0.0922 - accuracy: 0.966 - ETA: 10s - loss: 0.0921 - accuracy: 0.966 - ETA: 9s - loss: 0.0920 - accuracy: 0.966 - ETA: 9s - loss: 0.0920 - accuracy: 0.96 - ETA: 9s - loss: 0.0919 - accuracy: 0.96 - ETA: 9s - loss: 0.0918 - accuracy: 0.96 - ETA: 9s - loss: 0.0917 - accuracy: 0.96 - ETA: 9s - loss: 0.0917 - accuracy: 0.96 - ETA: 9s - loss: 0.0916 - accuracy: 0.96 - ETA: 9s - loss: 0.0915 - accuracy: 0.96 - ETA: 9s - loss: 0.0914 - accuracy: 0.96 - ETA: 9s - loss: 0.0913 - accuracy: 0.96 - ETA: 9s - loss: 0.0912 - accuracy: 0.96 - ETA: 9s - loss: 0.0911 - accuracy: 0.96 - ETA: 9s - loss: 0.0910 - accuracy: 0.96 - ETA: 9s - loss: 0.0909 - accuracy: 0.96 - ETA: 9s - loss: 0.0908 - accuracy: 0.96 - ETA: 9s - loss: 0.0907 - accuracy: 0.96 - ETA: 9s - loss: 0.0906 - accuracy: 0.96 - ETA: 9s - loss: 0.0905 - accuracy: 0.96 - ETA: 9s - loss: 0.0905 - accuracy: 0.96 - ETA: 9s - loss: 0.0904 - accuracy: 0.96 - ETA: 8s - loss: 0.0904 - accuracy: 0.96 - ETA: 8s - loss: 0.0903 - accuracy: 0.96 - ETA: 8s - loss: 0.0902 - accuracy: 0.96 - ETA: 8s - loss: 0.0901 - accuracy: 0.96 - ETA: 8s - loss: 0.0900 - accuracy: 0.96 - ETA: 8s - loss: 0.0899 - accuracy: 0.96 - ETA: 8s - loss: 0.0898 - accuracy: 0.96 - ETA: 8s - loss: 0.0897 - accuracy: 0.96 - ETA: 8s - loss: 0.0896 - accuracy: 0.96 - ETA: 8s - loss: 0.0896 - accuracy: 0.96 - ETA: 8s - loss: 0.0895 - accuracy: 0.96 - ETA: 8s - loss: 0.0894 - accuracy: 0.96 - ETA: 8s - loss: 0.0893 - accuracy: 0.96 - ETA: 8s - loss: 0.0892 - accuracy: 0.96 - ETA: 8s - loss: 0.0891 - accuracy: 0.96 - ETA: 8s - loss: 0.0890 - accuracy: 0.96 - ETA: 8s - loss: 0.0889 - accuracy: 0.96 - ETA: 8s - loss: 0.0889 - accuracy: 0.96 - ETA: 8s - loss: 0.0888 - accuracy: 0.96 - ETA: 7s - loss: 0.0887 - accuracy: 0.96 - ETA: 7s - loss: 0.0886 - accuracy: 0.96 - ETA: 7s - loss: 0.0885 - accuracy: 0.96 - ETA: 7s - loss: 0.0885 - accuracy: 0.96 - ETA: 7s - loss: 0.0884 - accuracy: 0.96 - ETA: 7s - loss: 0.0883 - accuracy: 0.96 - ETA: 7s - loss: 0.0882 - accuracy: 0.96 - ETA: 7s - loss: 0.0881 - accuracy: 0.96 - ETA: 7s - loss: 0.0880 - accuracy: 0.96 - ETA: 7s - loss: 0.0879 - accuracy: 0.96 - ETA: 7s - loss: 0.0878 - accuracy: 0.96 - ETA: 7s - loss: 0.0878 - accuracy: 0.96 - ETA: 7s - loss: 0.0877 - accuracy: 0.96 - ETA: 7s - loss: 0.0876 - accuracy: 0.96 - ETA: 7s - loss: 0.0876 - accuracy: 0.96 - ETA: 7s - loss: 0.0875 - accuracy: 0.9687
1806870/1806870 [==============================] - ETA: 7s - loss: 0.0874 - accuracy: 0.96 - ETA: 7s - loss: 0.0873 - accuracy: 0.96 - ETA: 7s - loss: 0.0873 - accuracy: 0.96 - ETA: 7s - loss: 0.0872 - accuracy: 0.96 - ETA: 6s - loss: 0.0871 - accuracy: 0.96 - ETA: 6s - loss: 0.0870 - accuracy: 0.96 - ETA: 6s - loss: 0.0870 - accuracy: 0.96 - ETA: 6s - loss: 0.0869 - accuracy: 0.96 - ETA: 6s - loss: 0.0868 - accuracy: 0.96 - ETA: 6s - loss: 0.0867 - accuracy: 0.96 - ETA: 6s - loss: 0.0867 - accuracy: 0.96 - ETA: 6s - loss: 0.0866 - accuracy: 0.96 - ETA: 6s - loss: 0.0866 - accuracy: 0.96 - ETA: 6s - loss: 0.0865 - accuracy: 0.96 - ETA: 6s - loss: 0.0864 - accuracy: 0.96 - ETA: 6s - loss: 0.0863 - accuracy: 0.96 - ETA: 6s - loss: 0.0863 - accuracy: 0.96 - ETA: 6s - loss: 0.0862 - accuracy: 0.96 - ETA: 6s - loss: 0.0861 - accuracy: 0.96 - ETA: 6s - loss: 0.0860 - accuracy: 0.96 - ETA: 6s - loss: 0.0860 - accuracy: 0.96 - ETA: 6s - loss: 0.0859 - accuracy: 0.96 - ETA: 6s - loss: 0.0858 - accuracy: 0.96 - ETA: 6s - loss: 0.0857 - accuracy: 0.96 - ETA: 6s - loss: 0.0857 - accuracy: 0.96 - ETA: 5s - loss: 0.0856 - accuracy: 0.96 - ETA: 5s - loss: 0.0856 - accuracy: 0.96 - ETA: 5s - loss: 0.0855 - accuracy: 0.96 - ETA: 5s - loss: 0.0854 - accuracy: 0.96 - ETA: 5s - loss: 0.0853 - accuracy: 0.96 - ETA: 5s - loss: 0.0852 - accuracy: 0.96 - ETA: 5s - loss: 0.0852 - accuracy: 0.96 - ETA: 5s - loss: 0.0851 - accuracy: 0.96 - ETA: 5s - loss: 0.0850 - accuracy: 0.96 - ETA: 5s - loss: 0.0849 - accuracy: 0.96 - ETA: 5s - loss: 0.0849 - accuracy: 0.96 - ETA: 5s - loss: 0.0848 - accuracy: 0.96 - ETA: 5s - loss: 0.0847 - accuracy: 0.96 - ETA: 5s - loss: 0.0846 - accuracy: 0.96 - ETA: 5s - loss: 0.0846 - accuracy: 0.96 - ETA: 5s - loss: 0.0845 - accuracy: 0.96 - ETA: 5s - loss: 0.0844 - accuracy: 0.96 - ETA: 5s - loss: 0.0844 - accuracy: 0.96 - ETA: 5s - loss: 0.0843 - accuracy: 0.96 - ETA: 5s - loss: 0.0842 - accuracy: 0.96 - ETA: 4s - loss: 0.0841 - accuracy: 0.96 - ETA: 4s - loss: 0.0840 - accuracy: 0.97 - ETA: 4s - loss: 0.0839 - accuracy: 0.97 - ETA: 4s - loss: 0.0838 - accuracy: 0.97 - ETA: 4s - loss: 0.0838 - accuracy: 0.97 - ETA: 4s - loss: 0.0837 - accuracy: 0.97 - ETA: 4s - loss: 0.0837 - accuracy: 0.97 - ETA: 4s - loss: 0.0836 - accuracy: 0.97 - ETA: 4s - loss: 0.0835 - accuracy: 0.97 - ETA: 4s - loss: 0.0834 - accuracy: 0.97 - ETA: 4s - loss: 0.0833 - accuracy: 0.97 - ETA: 4s - loss: 0.0832 - accuracy: 0.97 - ETA: 4s - loss: 0.0832 - accuracy: 0.97 - ETA: 4s - loss: 0.0831 - accuracy: 0.97 - ETA: 4s - loss: 0.0830 - accuracy: 0.97 - ETA: 4s - loss: 0.0830 - accuracy: 0.97 - ETA: 4s - loss: 0.0829 - accuracy: 0.97 - ETA: 4s - loss: 0.0828 - accuracy: 0.97 - ETA: 4s - loss: 0.0828 - accuracy: 0.97 - ETA: 4s - loss: 0.0827 - accuracy: 0.97 - ETA: 3s - loss: 0.0826 - accuracy: 0.97 - ETA: 3s - loss: 0.0826 - accuracy: 0.97 - ETA: 3s - loss: 0.0825 - accuracy: 0.97 - ETA: 3s - loss: 0.0825 - accuracy: 0.97 - ETA: 3s - loss: 0.0824 - accuracy: 0.97 - ETA: 3s - loss: 0.0823 - accuracy: 0.97 - ETA: 3s - loss: 0.0822 - accuracy: 0.97 - ETA: 3s - loss: 0.0822 - accuracy: 0.97 - ETA: 3s - loss: 0.0821 - accuracy: 0.97 - ETA: 3s - loss: 0.0821 - accuracy: 0.97 - ETA: 3s - loss: 0.0820 - accuracy: 0.97 - ETA: 3s - loss: 0.0819 - accuracy: 0.97 - ETA: 3s - loss: 0.0819 - accuracy: 0.97 - ETA: 3s - loss: 0.0818 - accuracy: 0.97 - ETA: 3s - loss: 0.0818 - accuracy: 0.97 - ETA: 3s - loss: 0.0817 - accuracy: 0.97 - ETA: 3s - loss: 0.0816 - accuracy: 0.97 - ETA: 3s - loss: 0.0815 - accuracy: 0.97 - ETA: 3s - loss: 0.0814 - accuracy: 0.97 - ETA: 3s - loss: 0.0814 - accuracy: 0.97 - ETA: 3s - loss: 0.0813 - accuracy: 0.97 - ETA: 2s - loss: 0.0812 - accuracy: 0.97 - ETA: 2s - loss: 0.0811 - accuracy: 0.97 - ETA: 2s - loss: 0.0810 - accuracy: 0.97 - ETA: 2s - loss: 0.0809 - accuracy: 0.97 - ETA: 2s - loss: 0.0809 - accuracy: 0.97 - ETA: 2s - loss: 0.0808 - accuracy: 0.97 - ETA: 2s - loss: 0.0808 - accuracy: 0.97 - ETA: 2s - loss: 0.0807 - accuracy: 0.97 - ETA: 2s - loss: 0.0806 - accuracy: 0.97 - ETA: 2s - loss: 0.0805 - accuracy: 0.97 - ETA: 2s - loss: 0.0805 - accuracy: 0.97 - ETA: 2s - loss: 0.0804 - accuracy: 0.97 - ETA: 2s - loss: 0.0803 - accuracy: 0.97 - ETA: 2s - loss: 0.0803 - accuracy: 0.97 - ETA: 2s - loss: 0.0802 - accuracy: 0.97 - ETA: 2s - loss: 0.0802 - accuracy: 0.97 - ETA: 2s - loss: 0.0801 - accuracy: 0.97 - ETA: 2s - loss: 0.0800 - accuracy: 0.97 - ETA: 2s - loss: 0.0800 - accuracy: 0.97 - ETA: 1s - loss: 0.0799 - accuracy: 0.97 - ETA: 1s - loss: 0.0798 - accuracy: 0.97 - ETA: 1s - loss: 0.0797 - accuracy: 0.97 - ETA: 1s - loss: 0.0797 - accuracy: 0.97 - ETA: 1s - loss: 0.0796 - accuracy: 0.97 - ETA: 1s - loss: 0.0795 - accuracy: 0.97 - ETA: 1s - loss: 0.0795 - accuracy: 0.97 - ETA: 1s - loss: 0.0794 - accuracy: 0.97 - ETA: 1s - loss: 0.0793 - accuracy: 0.97 - ETA: 1s - loss: 0.0793 - accuracy: 0.97 - ETA: 1s - loss: 0.0792 - accuracy: 0.97 - ETA: 1s - loss: 0.0791 - accuracy: 0.97 - ETA: 1s - loss: 0.0790 - accuracy: 0.97 - ETA: 1s - loss: 0.0790 - accuracy: 0.97 - ETA: 1s - loss: 0.0789 - accuracy: 0.97 - ETA: 1s - loss: 0.0788 - accuracy: 0.97 - ETA: 1s - loss: 0.0788 - accuracy: 0.97 - ETA: 1s - loss: 0.0787 - accuracy: 0.97 - ETA: 1s - loss: 0.0787 - accuracy: 0.97 - ETA: 1s - loss: 0.0786 - accuracy: 0.97 - ETA: 0s - loss: 0.0786 - accuracy: 0.97 - ETA: 0s - loss: 0.0785 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0782 - accuracy: 0.97 - ETA: 0s - loss: 0.0782 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0780 - accuracy: 0.97 - ETA: 0s - loss: 0.0779 - accuracy: 0.97 - ETA: 0s - loss: 0.0779 - accuracy: 0.97 - ETA: 0s - loss: 0.0778 - accuracy: 0.97 - ETA: 0s - loss: 0.0777 - accuracy: 0.97 - ETA: 0s - loss: 0.0776 - accuracy: 0.97 - ETA: 0s - loss: 0.0776 - accuracy: 0.97 - ETA: 0s - loss: 0.0775 - accuracy: 0.97 - ETA: 0s - loss: 0.0774 - accuracy: 0.97 - ETA: 0s - loss: 0.0774 - accuracy: 0.97 - ETA: 0s - loss: 0.0773 - accuracy: 0.97 - 37s 20us/step - loss: 0.0773 - accuracy: 0.9725 - val_loss: 0.0357 - val_accuracy: 0.9882

Epoch 00001: val_loss improved from inf to 0.03567, saving model to test.h8
Epoch 2/2
 474496/1806870 [======>.......................] - ETA: 51:48 - loss: 0.0022 - accuracy: 1.000 - ETA: 1:13 - loss: 0.0302 - accuracy: 0.990 - ETA: 53s - loss: 0.0260 - accuracy: 0.9923 - ETA: 47s - loss: 0.0219 - accuracy: 0.993 - ETA: 43s - loss: 0.0236 - accuracy: 0.993 - ETA: 41s - loss: 0.0304 - accuracy: 0.991 - ETA: 40s - loss: 0.0327 - accuracy: 0.990 - ETA: 39s - loss: 0.0327 - accuracy: 0.990 - ETA: 38s - loss: 0.0317 - accuracy: 0.990 - ETA: 38s - loss: 0.0308 - accuracy: 0.990 - ETA: 37s - loss: 0.0303 - accuracy: 0.990 - ETA: 37s - loss: 0.0307 - accuracy: 0.990 - ETA: 37s - loss: 0.0311 - accuracy: 0.990 - ETA: 36s - loss: 0.0312 - accuracy: 0.990 - ETA: 36s - loss: 0.0306 - accuracy: 0.990 - ETA: 36s - loss: 0.0319 - accuracy: 0.989 - ETA: 36s - loss: 0.0315 - accuracy: 0.989 - ETA: 35s - loss: 0.0318 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 35s - loss: 0.0323 - accuracy: 0.989 - ETA: 35s - loss: 0.0319 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 34s - loss: 0.0316 - accuracy: 0.989 - ETA: 34s - loss: 0.0318 - accuracy: 0.989 - ETA: 34s - loss: 0.0327 - accuracy: 0.989 - ETA: 34s - loss: 0.0325 - accuracy: 0.989 - ETA: 34s - loss: 0.0323 - accuracy: 0.989 - ETA: 34s - loss: 0.0324 - accuracy: 0.989 - ETA: 34s - loss: 0.0321 - accuracy: 0.989 - ETA: 34s - loss: 0.0323 - accuracy: 0.989 - ETA: 34s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0318 - accuracy: 0.989 - ETA: 33s - loss: 0.0318 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0321 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0320 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0324 - accuracy: 0.989 - ETA: 33s - loss: 0.0323 - accuracy: 0.989 - ETA: 32s - loss: 0.0327 - accuracy: 0.988 - ETA: 32s - loss: 0.0325 - accuracy: 0.988 - ETA: 32s - loss: 0.0324 - accuracy: 0.989 - ETA: 32s - loss: 0.0325 - accuracy: 0.988 - ETA: 32s - loss: 0.0324 - accuracy: 0.989 - ETA: 32s - loss: 0.0326 - accuracy: 0.988 - ETA: 32s - loss: 0.0330 - accuracy: 0.988 - ETA: 32s - loss: 0.0330 - accuracy: 0.988 - ETA: 32s - loss: 0.0331 - accuracy: 0.988 - ETA: 32s - loss: 0.0334 - accuracy: 0.988 - ETA: 32s - loss: 0.0333 - accuracy: 0.988 - ETA: 32s - loss: 0.0332 - accuracy: 0.988 - ETA: 32s - loss: 0.0331 - accuracy: 0.988 - ETA: 32s - loss: 0.0329 - accuracy: 0.988 - ETA: 32s - loss: 0.0329 - accuracy: 0.988 - ETA: 32s - loss: 0.0327 - accuracy: 0.988 - ETA: 32s - loss: 0.0326 - accuracy: 0.988 - ETA: 32s - loss: 0.0328 - accuracy: 0.988 - ETA: 32s - loss: 0.0328 - accuracy: 0.988 - ETA: 31s - loss: 0.0328 - accuracy: 0.988 - ETA: 31s - loss: 0.0326 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0326 - accuracy: 0.988 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0321 - accuracy: 0.989 - ETA: 31s - loss: 0.0320 - accuracy: 0.989 - ETA: 31s - loss: 0.0319 - accuracy: 0.989 - ETA: 31s - loss: 0.0318 - accuracy: 0.989 - ETA: 31s - loss: 0.0317 - accuracy: 0.989 - ETA: 31s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0317 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0317 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0311 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0309 - accuracy: 0.989 - ETA: 29s - loss: 0.0309 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0306 - accuracy: 0.989 - ETA: 28s - loss: 0.0306 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0306 - accuracy: 0.989 - ETA: 27s - loss: 0.0306 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0303 - accuracy: 0.989 - ETA: 27s - loss: 0.0302 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.9898
 949216/1806870 [==============>...............] - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0301 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0297 - accuracy: 0.989 - ETA: 25s - loss: 0.0296 - accuracy: 0.989 - ETA: 25s - loss: 0.0296 - accuracy: 0.989 - ETA: 24s - loss: 0.0297 - accuracy: 0.989 - ETA: 24s - loss: 0.0296 - accuracy: 0.989 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0295 - accuracy: 0.990 - ETA: 24s - loss: 0.0295 - accuracy: 0.990 - ETA: 24s - loss: 0.0294 - accuracy: 0.990 - ETA: 24s - loss: 0.0294 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0288 - accuracy: 0.990 - ETA: 23s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0285 - accuracy: 0.990 - ETA: 22s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0283 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0283 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0280 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 20s - loss: 0.0282 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 19s - loss: 0.0278 - accuracy: 0.990 - ETA: 19s - loss: 0.0278 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0273 - accuracy: 0.990 - ETA: 18s - loss: 0.0273 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0270 - accuracy: 0.990 - ETA: 18s - loss: 0.0270 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0269 - accuracy: 0.990 - ETA: 16s - loss: 0.0269 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0267 - accuracy: 0.9909
1434176/1806870 [======================>.......] - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0265 - accuracy: 0.991 - ETA: 16s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 15s - loss: 0.0261 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0258 - accuracy: 0.991 - ETA: 14s - loss: 0.0258 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 9s - loss: 0.0250 - accuracy: 0.991 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.9918
1806870/1806870 [==============================] - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0240 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0238 - accuracy: 0.99 - ETA: 5s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - 37s 20us/step - loss: 0.0229 - accuracy: 0.9923 - val_loss: 0.0129 - val_accuracy: 0.9955

Epoch 00002: val_loss improved from 0.03567 to 0.01290, saving model to test.h8
In [57]:
model.summary()
Model: "sequential_4"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_13 (Dense)             (None, 70)                2800      
_________________________________________________________________
dense_14 (Dense)             (None, 50)                3550      
_________________________________________________________________
dense_15 (Dense)             (None, 50)                2550      
_________________________________________________________________
dense_16 (Dense)             (None, 2)                 102       
=================================================================
Total params: 9,002
Trainable params: 9,002
Non-trainable params: 0
_________________________________________________________________
In [58]:
#AUXILIARY METHODS FOR FEDERATED LEARNING

# RETURN INDICES TO LAYERS WITH WEIGHTS AND BIASES
def trainable_layers(model):
    return [i for i, layer in enumerate(model.layers) if len(layer.get_weights()) > 0]

# RETURN WEIGHTS AND BIASES OF A MODEL
def get_parameters(model):
    weights = []
    biases = []
    index = trainable_layers(model)
    for i in index:
        weights.append(copy.deepcopy(model.layers[i].get_weights()[0]))
        biases.append(copy.deepcopy(model.layers[i].get_weights()[1]))           
    
    return weights, biases
        
# SET WEIGHTS AND BIASES OF A MODEL
def set_parameters(model, weights, biases):
    index = trainable_layers(model)
    for i, j in enumerate(index):
        model.layers[j].set_weights([weights[i], biases[i]])
    
# DEPRECATED: RETURN THE GRADIENTS OF THE MODEL AFTER AN UPDATE 
def get_gradients(model, inputs, outputs):
    """ Gets gradient of model for given inputs and outputs for all weights"""
    grads = model.optimizer.get_gradients(model.total_loss, model.trainable_weights)
    symb_inputs = (model._feed_inputs + model._feed_targets + model._feed_sample_weights)
    f = K.function(symb_inputs, grads)
    x, y, sample_weight = model._standardize_user_data(inputs, outputs)
    output_grad = f(x + y + sample_weight)
    
    w_grad = [w for i,w in enumerate(output_grad) if i%2==0]
    b_grad = [w for i,w in enumerate(output_grad) if i%2==1]
    
    return w_grad, b_grad

# RETURN THE DIFFERENCE OF MODELS' WEIGHTS AND BIASES AFTER AN UPDATE 
# NOTE: LEARNING RATE IS APPLIED, SO THE UPDATE IS DIFFERENT FROM THE
# GRADIENTS. IN CASE VANILLA SGD IS USED, THE GRADIENTS ARE OBTAINED
# AS (UPDATES / LEARNING_RATE)
def get_updates(model, inputs, outputs, batch_size, epochs):
    w, b = get_parameters(model)
    #model.train_on_batch(inputs, outputs)
    model.fit(inputs, outputs, batch_size=batch_size, epochs=epochs, verbose=0)
    w_new, b_new = get_parameters(model)
    
    weight_updates = [old - new for old,new in zip(w, w_new)]
    bias_updates = [old - new for old,new in zip(b, b_new)]
    
    return weight_updates, bias_updates

# UPDATE THE MODEL'S WEIGHTS AND PARAMETERS WITH AN UPDATE
def apply_updates(model, eta, w_new, b_new):
    w, b = get_parameters(model)
    new_weights = [theta - eta*delta for theta,delta in zip(w, w_new)]
    new_biases = [theta - eta*delta for theta,delta in zip(b, b_new)]
    set_parameters(model, new_weights, new_biases)
    
# FEDERATED AGGREGATION FUNCTION
def aggregate(n_layers, n_peers, f, w_updates, b_updates):
    agg_w = [f([w_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]
    agg_b = [f([b_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]
    return agg_w, agg_b

# SOLVE NANS
def nans_to_zero(W, B):
    W0 = [np.nan_to_num(w, nan=0.0, posinf=0.0, neginf=0.0) for w in W]
    B0 = [np.nan_to_num(b, nan=0.0, posinf=0.0, neginf=0.0) for b in B]
    return W0, B0

def build_forest(X,y):
    clf=RandomForestClassifier(n_estimators=1000, max_depth=7, random_state=0, verbose = 1)
    clf.fit(X,y)
    return clf
    
In [59]:
def scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local):
    sum_feature_improtance= 0
    overal_wrong_feature_importance = 0
    counter = 0
    second_counter = 0
    never_seen = 0
    avr_wrong_importance = 0
    counter1 = 0
    for i in range (len(FL_predict1)):
        if(FL_predict1[i][0] < 0.5):
            FL_predict1[i][0] = 0
            FL_predict1[i][1] = 1
        if(FL_predict1[i][0] >= 0.5):
            FL_predict1[i][0] = 1
            FL_predict1[i][1] = 0
    for i in range (len(FL_predict1)):
        i_tree = 0
    #     print(i)
        if (FL_predict1[i][0] != y_test_local[i][0]):
            counter1+=1
#             print(i)
        #         print("the test sample number ",i ," have been niss classified by the blackbox" )
            for tree_in_forest in forest.estimators_:
                temp = forest.estimators_[i_tree].predict([X_test_local[i]])
                i_tree = i_tree + 1
                inttemp = temp[0].astype(int)
                if(FL_predict1[i][0] == inttemp[0]):
                    sum_feature_improtance = sum_feature_improtance + tree_in_forest.feature_importances_
                    counter = counter + 1
        if(counter>0):
            ave_feature_importence = sum_feature_improtance/counter
            overal_wrong_feature_importance = ave_feature_importence + overal_wrong_feature_importance
            second_counter = second_counter + 1
#             print(ave_feature_importence)
#             print("numbers of the trees predect the wrong predection as the blackbox is ", counter)
            counter = 0
            sum_feature_improtance = 0
        #         print("------------------------------------------------------------------------------------")
        else:
            if(FL_predict1[i][0] != y_test_local[i][0]):
        #       print("the test sample number ", i," never have been miss classified by the forest.")
                never_seen = never_seen +1
    if(second_counter>0):
#         print(second_counter)
    #         print("the number of sampels that was miss classifed by the blackbox and classified correctly by the all forest is", never_seen)
        #     print(overal_wrong_feature_importance)
        avr_wrong_importance = overal_wrong_feature_importance / second_counter
    #         print("the average wrong dessition cosed by the feature", avr_wrong_importance)
    #         print("=====================================================================================")
    print("the number of miss classified sampels is ", counter1)
    return forest.feature_importances_
In [60]:
def attack_data(inputs, feature_attacked):
    z=0
    C=0
    z=inputs.max(axis = 0)
    C=inputs.min(axis = 0)
    for i in range(len(inputs)):
        for j in range(len(inputs[0])):
            inputs[i][j]= random.uniform(z[j], C[j])
#         inputs[i][feature_attacked[1]]= random.uniform(z[feature_attacked[1]], C[feature_attacked[1]])        
#         inputs[i][feature_attacked[1]]= random.uniform(z[feature_attacked[1]], C[feature_attacked[1]])
#         inputs[i][feature_attacked] = random.randrange(z[feature_attacked]+1)
#         print(X_test_attacked[i][att])
#         if(X_test_attacked[i][att] == X_test[i][att]):
#             feat_same = feat_same + 1
    return inputs
In [61]:
trainable_layers(model)
Out[61]:
[0, 1, 2, 3]
In [62]:
get_updates(model, X_train, y_train, 32, 2)
Out[62]:
([array([[-0.19031775,  0.06523454, -0.14931396, ...,  0.        ,
           0.        , -0.29390967],
         [ 0.033116  , -0.08946543,  0.01096467, ...,  0.        ,
           0.        , -0.015468  ],
         [ 0.01199894,  0.02766883,  0.01122332, ...,  0.        ,
           0.        , -0.0217886 ],
         ...,
         [-0.00909673, -0.04540771, -0.20296723, ...,  0.        ,
           0.        , -0.13601643],
         [-0.2878099 ,  0.23572141,  0.23246315, ...,  0.        ,
           0.        ,  0.10910301],
         [ 0.20305228,  0.28390008,  0.5178579 , ...,  0.        ,
           0.        , -0.20772421]], dtype=float32),
  array([[ 0.03976321,  0.00334442, -0.1938284 , ...,  0.12357424,
           0.        , -0.4720744 ],
         [-0.05385096, -0.19503492, -0.11490272, ..., -0.06129656,
           0.        , -0.07149667],
         [ 0.09047867, -0.00144058, -0.30648926, ..., -0.06290518,
           0.        , -0.20281315],
         ...,
         [ 0.        ,  0.        ,  0.        , ...,  0.        ,
           0.        ,  0.        ],
         [ 0.        ,  0.        ,  0.        , ...,  0.        ,
           0.        ,  0.        ],
         [ 0.03847116,  0.3318082 , -0.12002158, ...,  0.0102334 ,
           0.        ,  0.19092064]], dtype=float32),
  array([[-2.42347077e-01, -8.76481831e-03, -3.90259176e-02, ...,
          -6.67591989e-02,  5.05356491e-03,  1.02399185e-01],
         [-8.08794439e-01, -3.27832878e-01, -5.26163459e-01, ...,
          -4.11394715e-01,  0.00000000e+00,  2.40460098e-01],
         [ 2.49932185e-01, -3.12010467e-01, -4.72681373e-01, ...,
           6.11434951e-02,  0.00000000e+00, -8.85864496e-02],
         ...,
         [-1.02717876e-02,  4.91040945e-02,  2.08511353e-01, ...,
           2.92169333e-01,  3.78781557e-03, -7.43160397e-02],
         [ 0.00000000e+00,  0.00000000e+00,  0.00000000e+00, ...,
           0.00000000e+00,  0.00000000e+00,  0.00000000e+00],
         [-2.40534097e-01, -1.12464696e-01, -6.61374629e-02, ...,
          -2.46955007e-01,  2.02164054e-04, -1.38975129e-01]], dtype=float32),
  array([[-3.04001868e-01,  3.04008663e-01],
         [-3.18321645e-01,  3.18326294e-01],
         [-6.49204254e-02,  6.49255514e-02],
         [ 2.38253117e-01, -2.38266587e-01],
         [-2.33515322e-01,  2.33533710e-01],
         [ 2.04552054e-01, -2.04541385e-01],
         [-2.16077894e-01,  2.16096789e-01],
         [-9.29667503e-02,  9.29654241e-02],
         [ 8.22023153e-02, -8.21979046e-02],
         [-1.02218628e-01,  1.02235526e-01],
         [-2.78488606e-01,  2.78504372e-01],
         [-2.50967979e-01,  2.50985503e-01],
         [ 6.83438182e-02, -6.83349371e-02],
         [ 1.24650508e-01, -1.24644592e-01],
         [ 6.33001328e-05, -6.16163015e-05],
         [ 1.41896963e-01, -1.41895413e-01],
         [-1.02908731e-01,  1.02926940e-01],
         [ 5.35694361e-02, -5.35876751e-02],
         [ 8.15955400e-02, -8.15925598e-02],
         [-1.03019953e-01,  1.03019953e-01],
         [-1.25430942e-01,  1.25459164e-01],
         [-3.38193893e-01,  3.38200092e-01],
         [-1.09561086e-02,  1.09702498e-02],
         [-2.82736778e-01,  2.82758176e-01],
         [-4.44638729e-01,  4.44639683e-01],
         [-6.57172203e-02,  6.57169819e-02],
         [-1.72389388e-01,  1.72392488e-01],
         [-5.41939139e-02,  5.42033315e-02],
         [-5.90079725e-02,  5.90344965e-02],
         [ 3.66447330e-01, -3.66433740e-01],
         [-2.08910614e-01,  2.08919704e-01],
         [-2.05386773e-01,  2.05394983e-01],
         [ 2.22557023e-01, -2.22551197e-01],
         [ 1.47694349e-01, -1.47691488e-01],
         [-1.98568344e-01,  1.98586702e-01],
         [ 3.86301279e-02, -3.86058390e-02],
         [-3.03680480e-01,  3.03690165e-01],
         [ 1.28941819e-01, -1.28929913e-01],
         [-1.06625021e-01,  1.06641471e-01],
         [ 2.11793751e-01, -2.11783320e-01],
         [-1.20612228e+00,  1.20613194e+00],
         [ 2.32780397e-01, -2.32766151e-01],
         [ 1.07535511e-01, -1.07531980e-01],
         [-1.68534845e-01,  1.68567598e-01],
         [ 1.57701567e-01, -1.57669455e-01],
         [ 2.91942716e-01, -2.91933715e-01],
         [ 2.90191770e-02, -2.90192217e-02],
         [ 4.56188083e-01, -4.56171900e-01],
         [ 2.81581283e-03, -2.81581283e-03],
         [-1.18135691e-01,  1.18147850e-01]], dtype=float32)],
 [array([ 3.1866699e-03, -7.6422125e-02,  3.3879340e-02,  1.4779243e-01,
          0.0000000e+00,  0.0000000e+00,  1.3526529e-05,  1.0225922e-06,
         -2.2294750e-03, -2.1711849e-03, -9.2923865e-02, -3.4378596e-02,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00,  7.9754189e-02,
          9.6828096e-02,  0.0000000e+00,  6.9650002e-02, -5.8602124e-02,
         -8.6331740e-05,  0.0000000e+00,  1.2667640e-01,  0.0000000e+00,
          0.0000000e+00, -2.6648588e-02,  0.0000000e+00, -3.3291716e-02,
          0.0000000e+00,  0.0000000e+00,  2.2499807e-02,  6.8744346e-02,
         -5.0262503e-02,  0.0000000e+00, -8.9680202e-02,  0.0000000e+00,
         -4.2678282e-02,  0.0000000e+00, -1.7710961e-02,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00, -6.1750807e-02,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00,  5.8094569e-02,  1.2169727e-02,
          1.9261871e-10,  0.0000000e+00, -7.9979539e-02,  0.0000000e+00,
          0.0000000e+00,  7.6093122e-02,  0.0000000e+00,  0.0000000e+00,
          1.6243661e-02,  0.0000000e+00, -9.8505989e-03,  0.0000000e+00,
          0.0000000e+00, -4.3590821e-02], dtype=float32),
  array([-0.10964979,  0.1273394 ,  0.05826145,  0.11250992, -0.00930649,
          0.08082695, -0.10440034,  0.02672271,  0.14781642,  0.27572772,
         -0.0397696 ,  0.18053436,  0.        , -0.04786159,  0.        ,
          0.15616408,  0.0424803 ,  0.        , -0.0375067 , -0.00756753,
          0.01335342, -0.08301416, -0.07382136,  0.02766102, -0.21604276,
          0.04904766, -0.00283363,  0.01198358,  0.17403054, -0.08457427,
          0.06056517, -0.00864101,  0.02029612,  0.12778968,  0.14824837,
          0.17251332,  0.03519725,  0.05309688,  0.1472145 , -0.18282993,
          0.10138815,  0.01851342,  0.03132945,  0.        , -0.19095713,
          0.07761121,  0.17995   ,  0.16866425,  0.        , -0.02218707],
        dtype=float32),
  array([ 0.34423378,  0.44879648, -0.04138201,  0.48468772,  0.27873045,
         -0.30794245,  0.15183273,  0.08334076,  0.18456669, -0.178191  ,
         -0.14348036,  0.57680756,  0.60798985,  0.41697726, -0.00238903,
         -0.26822644, -0.10311142,  0.11896864,  0.29975793,  0.23435119,
          0.23297757,  0.00584415,  0.31422138,  0.4213791 , -0.1971436 ,
         -0.19466306, -0.08916584,  0.11673178,  0.06989807, -0.05853122,
         -0.16468427,  0.20225608,  0.4647019 , -0.20615612, -0.06997643,
         -0.16313455,  0.3702965 ,  0.03510106, -0.06537277, -0.03514916,
          0.20234883,  0.42463556, -0.00177155,  0.18248317, -0.00233091,
          0.16106895,  0.03673995, -0.28332692,  0.03024025, -0.32204401],
        dtype=float32),
  array([-0.14419317,  0.14419758], dtype=float32)])
In [63]:
W = get_parameters(model)[0]
B = get_parameters(model)[1]
In [64]:
#AUXILIARY METHODS FOR FL INSPECTION

# TRANSFORM ALL WEIGHT TENSORS TO 1D ARRAY
def flatten_weights(w_in):
    h = w_in[0].reshape(-1)
    for w in w_in[1:]:
        h = np.append(h, w.reshape(-1))
    return h

# TRANSFORM ALL BIAS TENSORS TO 1D ARRAY
def flatten_biases(b_in):
    h = b_in[0].reshape(-1)
    for b in b_in[1:]:
        h = np.append(h, b.reshape(-1))
    return h

# TRANSFORM WEIGHT AND BIAS TENSORS TO 1D ARRAY
def flatten_parameters(w_in, b_in):
    w = flatten_weights(w_in)
    b = flatten_biases(b_in)
    return w, b

# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS
def dist_weights(w_a, w_b):
    wf_a = flatten_weights(w_a)
    wf_b = flatten_weights(w_b)
    return euclidean(wf_a, wf_b)

# COMPUTE EUCLIDEAN DISTANCE OF BIASES
def dist_biases(b_a, b_b):
    bf_a = flatten_biases(b_a)
    bf_b = flatten_biases(b_b)
    return euclidean(bf_a, bf_b)

# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS AND BIASES
def dist_parameters(w_a, b_a, w_b, b_b):
    wf_a, bf_a = flatten_parameters(w_a, b_a)
    wf_b, bf_b = flatten_parameters(w_b, b_b)
    return euclidean(np.append(wf_a, bf_a), np.append(wf_b, bf_b))
In [65]:
len(W[0])
Out[65]:
39
In [66]:
# BASELINE SCENARIO
#buid the model as base line for the shards (sequential)
# Number of peers
#accordin to what we need
n_peers = 100
ss = int(len(X_train)/n_peers)
inputs_in = X_train[0*ss:0*ss+ss]
outputs_in = y_train[0*ss:0*ss+ss]
def build_model(X_t, y_t):
    model = Sequential()
    model.add(Dense(70, input_dim=39, activation='relu'))
    model.add(Dense(64, activation='relu'))
    model.add(Dense(50, activation='relu'))
    model.add(Dense(2, activation='softmax'))
    #sgd = optimizers.SGD(learning_rate=0.0001, momentum=0.9, nesterov=True)
    model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
    model.fit(X_t,
              y_t,        
        #inputs_in, 
              #outputs_in, 
#               X_train,
#               y_train,
              batch_size=32, 
              epochs=100, 
              verbose=1,
              validation_data=((X_test, y_test)))
    return model

# model = build_model(inputs_in, outputs_in)
In [67]:
display(model.summary())
Model: "sequential_4"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_13 (Dense)             (None, 70)                2800      
_________________________________________________________________
dense_14 (Dense)             (None, 50)                3550      
_________________________________________________________________
dense_15 (Dense)             (None, 50)                2550      
_________________________________________________________________
dense_16 (Dense)             (None, 2)                 102       
=================================================================
Total params: 9,002
Trainable params: 9,002
Non-trainable params: 0
_________________________________________________________________
None
In [68]:
# predict probabilities for test set
yhat_probs = model.predict(X_test, verbose=0)
# predict crisp classes for test set
yhat_classes = model.predict_classes(X_test, verbose=0)
In [69]:
# accuracy: (tp + tn) / (p + n)
accuracy = accuracy_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('Accuracy: %f' % accuracy)
# precision tp / (tp + fp)
precision = precision_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('Precision: %f' % precision)
# recall: tp / (tp + fn)
recall = recall_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('Recall: %f' % recall)
# f1: 2 tp / (2 tp + fp + fn)
f1 = f1_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('F1 score: %f' % f1)
Accuracy: 0.998316
Precision: 0.998423
Recall: 0.998711
F1 score: 0.998567
In [70]:
# ROC AUC
# auc = roc_auc_score(shard1_traintest[i]["y_test"], yhat_probs)
# print('ROC AUC: %f' % auc)
# confusion matrix
mat = confusion_matrix(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))

display(mat)
plt.matshow(mat);
plt.colorbar()
plt.show()
array([[55991,   126],
       [  103, 79782]], dtype=int64)
No description has been provided for this image
In [71]:
def savecsv(lists, filename):
    #print lists
    if os.path.isfile(filename):
        os.remove(filename)
    with open(filename, 'a') as csvfile:
        w = csv.DictWriter(csvfile, lists.keys())
        w.writeheader()
        w.writerow(lists)
#         fwriter = csv.writer(csvfile, delimiter=',',lineterminator='\n')
#         fwriter.writerows(lists)
        csvfile.close()
        
        
#         import csv

# my_dict = {"test": 1, "testing": 2}

# with open('mycsvfile.csv', 'wb') as f:  # Just use 'w' mode in 3.x
#     w = csv.DictWriter(f, my_dict.keys())
#     w.writeheader()
#     w.writerow(my_dict)
In [72]:
FI_dic1= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}#,10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
FI_dic2= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}#,10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic3= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic4= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic5= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic6= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic7= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic8= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic9= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic10= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}
# FI_dic11= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],
#          21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[],30:[]}
dic = {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[]}
In [73]:
# FL Begain
# TESTBED

#####################
# SYSTEM PARAMETERS #
#####################
for c in range(1):
    number_attackers = c
    # threshold = 0.011
    threshold =0.00034
    counter = 0 
    peers_selected=random.sample(range(n_peers), number_attackers+1)
    scaner = peers_selected[0]
    mal = peers_selected[1 :]
    # Percentage and number of peers participating at each global training epoch
    percentage_participants = 1.0
    n_participants = int(n_peers * percentage_participants)

    # Number of global training epochs
    n_rounds = 10
    start_attack_round = 4
    end_attack_round = 7
    # Number of local training epochs per global training epoch
    n_local_rounds = 5

    # Local batch size
    local_batch_size = 32

    # Local learning rate
    local_lr = 0.001

    # Global learning rate or 'gain'
    model_substitution_rate = 1.0

    # Attack detection / prevention mechanism = {None, 'distance', 'median', 'accuracy', 'krum'}
    discard_outliers = None

    # Used in 'dist' attack detection, defines how far the outliers are (1.5 is a typical value)
    tau = 1.5

    # Used in 'accuracy' attack detection, defines the error margin for the accuracy improvement
    sensitivity = 0.05

    # Used in 'krum' attack detection, defines how many byzantine attackers we want to defend against
    tolerance=4

    # Prevent suspicious peers from participating again, only valid for 'dist' and 'accuracy'
    ban_malicious = False

    # Clear nans and infinites in model updates
    clear_nans = True

    number_for_threshold1 = numpy.empty(20, dtype=float)
    number_for_threshold2 = numpy.empty(20, dtype=float)
    for r in range(len(number_for_threshold1)):
        number_for_threshold1[r] = 0
        number_for_threshold2[r] = 0

    ########################
    # ATTACK CONFIGURATION #
    ########################

    # Percentage of malicious peers
    r_malicious_peers = 0.0

    # Number of malicious peers (absolute or relative to total number of peers)
    n_malicious_peers = int(n_peers * r_malicious_peers)
    #n_malicious_peers = 1

    # Malicious peers
    malicious_peer = range(n_malicious_peers)

    # Target for coalitions
    common_attack_target = [4,7]

    # Target class of the attack, per each malicious peer
    malicious_targets = dict([(p, t) for p,t in zip(malicious_peer, [common_attack_target]*n_malicious_peers)])

    # Boosting parameter per each malicious peer
    common_malicious_boost = 12
    malicious_boost = dict([(p, b) for p,b in zip(malicious_peer, [common_malicious_boost]*n_malicious_peers)])

    ###########
    # METRICS #
    ###########
    metrics = {'accuracy': [],
              'atk_effectivity': [],
              'update_distances': [],
              'outliers_detected': [],

              'acc_no_target': []}

    ####################################
    # MODEL AND NETWORK INITIALIZATION #
    ####################################
    inputs = X_train[0*ss:0*ss+ss]
    outputs = y_train[0*ss:0*ss+ss]
    global_model = build_model(inputs,outputs)
    n_layers = len(trainable_layers(global_model))

    print('Initializing network.')
    sleep(1)
    network = []
    for i in tqdm(range(n_peers)):
        ss = int(len(X_train)/n_peers)
        inputs = X_train[i*ss:i*ss+ss]
        outputs = y_train[i*ss:i*ss+ss]
    #     network.append(build_model(inputs, outputs))
        network.append(global_model)


    banned_peers = set()

    ##################
    # BEGIN TRAINING #
    ##################
    for t in range(n_rounds):
        print(f'Round {t+1}.')
        sleep(1)

        ## SERVER SIDE #################################################################
        # Fetch global model parameters
        global_weights, global_biases = get_parameters(global_model)

        if clear_nans:
            global_weights, global_biases = nans_to_zero(global_weights, global_biases)

        # Initialize peer update lists
        network_weight_updates = []
        network_bias_updates = []

        # Selection of participant peers in this global training epoch
        if ban_malicious:
            good_peers = list([p for i,p in enumerate(network) if i not in banned_peers])
            n_participants = n_participants if n_participants <= len(good_peers) else int(len(good_peers) * percentage_participants)
            participants = random.sample(list(enumerate(good_peers)), n_participants)
        else:
            participants = random.sample(list(enumerate(network)),n_participants)
        ################################################################################


        ## CLIENT SIDE #################################################################
        for i, local_model in tqdm(participants):

            # Update local model with global parameters 
            set_parameters(local_model, global_weights, global_biases)

            # Initialization of user data
            ss = int(len(X_train)/n_peers)
            inputs = X_train[i*ss:i*ss+ss]
            outputs = y_train[i*ss:i*ss+ss]

    #         print("worker number ", i," from ", n_peers)
    #         print(" number of data in worker ", i ," is ", len(inputs))


    #         do the forest here




    #         counter = counter+1

            if(i == scaner):
                X_train_local, X_test_local, y_train_local, y_test_local = train_test_split(inputs,outputs, test_size=0.7, random_state=rs)
                inputs = X_train_local
                outputs = y_train_local
                if(t == 0):
                    forest = build_forest(X_train_local,y_train_local)
                forest_predictions = forest.predict(X_test_local)
                acc_forest = np.mean([t==p for t,p in zip(y_test_local, forest_predictions)])
    #             imp = forest.feature_importances_
    #             FI_dic1[t] = imp
                FL_predict1 = global_model.predict(X_test_local)
                imp = scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local)
                FI_dic1[t] = imp
#                 if(t > 0):
#                     different_rouneds = FI_dic1[t-1] - FI_dic1[t]
#                     different_rouneds = abs(different_rouneds)
#                     number_for_threshold=0
#                     print("lenght of different ",len(different_rouneds))
#                     for H in range(len(different_rouneds)):
#                         number_for_threshold1[t] = number_for_threshold1[t] + different_rouneds[H]
#                     number_for_threshold = number_for_threshold1[t] - number_for_threshold1[t-1]
#                     if(t > 1):
#                         print(number_for_threshold)
#                         dic[c].append(abs(number_for_threshold))
#                         if(abs(number_for_threshold)>threshold):
#                             print("---------------------------------------------------------")
#                             print("attack happened , in the round before which is ", t+1)
#                             print("from peer ", i)
#                             print(different_rouneds)
#                             print(number_for_threshold)
#                             print("---------------------------------------------------------")

    # number_for_threshold1 = numpy.empty(19, dtype=float)
    # for i in range(len(number_for_threshold1)):
    #     number_for_threshold1[i] = 0
    # for j in range(len(FI_dic1)-1):
    #     number_for_threshold1 = numpy.empty(19, dtype=float)
    #     different_rouneds = FI_dic2[j] - FI_dic2[j]
    #     different_rouneds = abs(different_rouneds)
    #     # number_for_threshold=0
    #     for i in range(len(different_rouneds)):
    #         number_for_threshold1[j] = number_for_threshold1[j] + different_rouneds[i]





    #         if(i == 12):
    #             X_train_local, X_test_local, y_train_local, y_test_local = train_test_split(inputs,outputs, test_size=0.7, random_state=rs)
    #             inputs = X_train_local
    #             outputs = y_train_local
    #             forest = build_forest(X_train_local,y_train_local)
    #             forest_predictions = forest.predict(X_test_local)
    #             acc_forest = np.mean([t==p for t,p in zip(y_test_local, forest_predictions)])
    #             imp = forest.feature_importances_
    # #             FI_dic2[t] = imp
    # #             FL_predict2 = global_model.predict(X_test_local)
    #             FL_predict2 = global_model.predict(X_test_local)
    #             imp = scan_wrong(forest_predictions, FL_predict2, forest , y_test_local, X_test_local)
    #             FI_dic2[t] = imp
    #             if(t > 0):
    #                 different_rouneds = FI_dic2[t-1] - FI_dic2[t]
    #                 different_rouneds = abs(different_rouneds)
    #                 number_for_threshold=0
    #                 for H in range(len(different_rouneds)):
    #                     number_for_threshold2[t] = number_for_threshold2[t] + different_rouneds[H]
    #                 number_for_threshold = number_for_threshold1[t] - number_for_threshold1[t-1]
    #                 if(t>1):
    #                     if(abs(number_for_threshold)>threshold):
    #                         print("---------------------------------------------------------")
    #                         print("attack happened, in the round before which is ", t)
    #                         print("from peer ", i)
    #                         print(different_rouneds)
    #                         print(number_for_threshold)
    #                         print("---------------------------------------------------------")














            # Local model update
            if i in malicious_peer:
                # Malicious peer
                # Modify target class labels
                outputs = poison(outputs, malicious_targets[i][0], malicious_targets[i][1])

                # Train local model with benign and malicious data
                local_weight_updates, local_bias_updates = get_updates(local_model, 
                                                                       inputs, outputs, 
                                                                       local_batch_size, n_local_rounds)

                # Boost update
                local_weight_updates, local_bias_updates = boosting(local_weight_updates, local_bias_updates, malicious_boost[i])

                if clear_nans:
                    local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)



                    #here atttack done by Rami



            elif(t+1>=start_attack_round and t+1<=end_attack_round):    
                if (i in mal):
    #                 print("peer ",i, "round", t)
                    print("I am peer ",i,"I started the attack, at round", t+1)
                    #attack
                    inputs = attack_data(inputs, feature_attacked)

                    local_weight_updates, local_bias_updates = get_updates(local_model, 
                                                                           inputs, outputs, 
                                                                           local_batch_size, n_local_rounds)
                    if clear_nans:
                        local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)



            else:
                # Benign peer
                # Train local model 
                local_weight_updates, local_bias_updates = get_updates(local_model, 
                                                                       inputs, outputs, 
                                                                       local_batch_size, n_local_rounds)
                if clear_nans:
                    local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)

            # Send updates to the server
            network_weight_updates.append(local_weight_updates)
            network_bias_updates.append(local_bias_updates)

        ## END OF CLIENT SIDE ##########################################################

        ######################################
        # SERVER SIDE AGGREGATION MECHANISMS #
        ######################################

        # No detection of outliers
        if discard_outliers == None:
            # Aggregate client updates
            aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                              n_participants, 
                                                              np.mean, 
                                                              network_weight_updates, 
                                                              network_bias_updates)

            if clear_nans:
                aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

            # Apply updates to global model
            apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

        # Detection of outliers via distance metric
        elif discard_outliers == 'distance':
            # Compute the provisional aggregate
            prov_agg_w, prov_agg_b = aggregate(n_layers, 
                                               n_participants, 
                                               np.mean, 
                                               network_weight_updates, 
                                               network_bias_updates)

            # Compute distances and IQR of individual updates to the provisional aggregate
            distances = [dist_weights(prov_agg_w, w_i) for w_i in network_weight_updates]
            q1 = np.percentile(distances, 25)
            q3 = np.percentile(distances, 75)
            iqr = q3 - q1
            low = q1 - tau * iqr
            high = q3 + tau * iqr

            # Discard outliers
            good_updates = [i for i,v in enumerate(distances) if low <= v <= high]
            agg_participants = len(good_updates)
            network_weight_updates = [w for i,w in enumerate(network_weight_updates) if i in good_updates]
            network_bias_updates = [b for i,b in enumerate(network_bias_updates) if i in good_updates]

            bad_participants = [i for i in range(n_participants) if i not in good_updates]
            bad_participants = [participants[i][0] for i in bad_participants]

            # Flag offenders
            banned_peers.update(bad_participants)

            metrics['outliers_detected'].append(bad_participants)

            # Compute definitive update
            aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                              agg_participants, 
                                                              np.mean, 
                                                              network_weight_updates, 
                                                              network_bias_updates)
            if clear_nans:
                aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

            # Apply update
            apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

        #Detection via GMM
        #elif discard_otliers == 'gmm':
            #flatten_parameters = [flatten_parameters(w, b)[
                #for w,b in zip(network_weight_updates, network_bias_updates)
                #]]

        # Detection of outliers via accuracy metrics
        elif discard_outliers == 'accuracy':
            if t == 0:
                # In the first epoch, all contributions are accepted
                aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                              n_participants, 
                                                              np.mean, 
                                                              network_weight_updates, 
                                                              network_bias_updates)
                if clear_nans:
                    aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

                apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)
            else: 
                # Check the change in accuracy for every contribution
                test_accuracies = []
                previous_epoch_global_accuracy = metrics['accuracy'][-1]
                for k in range(len(local_weight_updates)):
                    test_model = build_model(local_lr)
                    set_parameters(test_model, global_weights, global_biases)
                    apply_updates(test_model, model_substitution_rate, network_weight_updates[k], network_bias_updates[k])
                    _, test_accuracy = test_model.evaluate(x_test, y_test, verbose=0)
                    test_accuracies.append(test_accuracy - previous_epoch_global_accuracy)


                # An update is good if it improves (with some margin) the accuracy of the
                # global model
                good_updates = [i for i,v in enumerate(test_accuracies) if v + sensitivity >= 0.0]
                agg_participants = len(good_updates)
                network_weight_updates = [w for i,w in enumerate(network_weight_updates) if i in good_updates]
                network_bias_updates = [b for i,b in enumerate(network_bias_updates) if i in good_updates]

                bad_participants = [i for i in range(n_participants) if i not in good_updates]
                bad_participants = [participants[i][0] for i in bad_participants]

                # Flag offenders
                banned_peers.update(bad_participants)

                metrics['outliers_detected'].append(bad_participants)

                # Compute definitive update
                aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                              agg_participants, 
                                                              np.mean, 
                                                              network_weight_updates, 
                                                              network_bias_updates)
                if clear_nans:
                    aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

                # Apply update
                apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

        # Robust aggregation via median
        elif discard_outliers == 'median':
            # Compute the aggregate as the component-wise median of local updates
            aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                              n_participants, 
                                                              np.median,
                                                              network_weight_updates, 
                                                              network_bias_updates)

            if clear_nans:
                aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

            # Apply update    
            apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

        # KRUM
        elif discard_outliers == 'krum':
            # First, we build a distance matrix for parameters
            P = list(zip(network_weight_updates, network_bias_updates))
            dist_matrix = [[dist_parameters(wi,bi,wj,bj) for wj,bj in P] for wi,bi in P]
            scores = []
            for index in range(len(P)):
                distances_to_index = np.array(dist_matrix[index])
                closest_to_index = np.argpartition(distances_to_index, n_participants-tolerance-1)[:n_participants-tolerance-1]
                scores.append(np.sum(distances_to_index[closest_to_index]))
            best = np.argmin(scores)
            aggregated_weights = network_weight_updates[best]
            aggregated_biases = network_bias_updates[best]

            if clear_nans:
                aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

            # Apply update
            apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

        # Fallback case: no detection of outliers
        else:
            # Proceed as in first case
            aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                              n_participants, 
                                                              np.mean, 
                                                              network_weight_updates, 
                                                              network_bias_updates)
            if clear_nans:
                aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

            apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

        ###################
        # COMPUTE METRICS #
        ###################

        # Global model accuracy
        score = global_model.evaluate(X_test, y_test, verbose=0)
        print(f'Global model loss: {score[0]}; global model accuracy: {score[1]}')
        metrics['accuracy'].append(score[1])


        # Accuracy without the target
        score = global_model.evaluate(X_test, y_test, verbose=0)
        metrics['acc_no_target'].append(score[1])

        # Proportion of instances of the target class misclassified (a.k.a success of the attack)
        #mat = confusion_matrix(np.argmax(shard1_traintest[i]["y_test"], axis=1), np.argmax(global_model.predict(shard1_traintest[i]["X_test"]), axis=1))
        #trans_4_7 = (mat[4,7] - mat[4,4]) / (2 * (mat[4,4]+mat[4,7])) + 0.5
        #metrics['atk_effectivity'].append(trans_4_7)

        # Distance of individual updates to the final aggregation
        metrics['update_distances'].append([dist_weights(aggregated_weights, w_i) for w_i in network_weight_updates])
savecsv(dic,"random_activity_100.csv")
    
Train on 18068 samples, validate on 136002 samples
Epoch 1/100
18068/18068 [==============================] - ETA: 39s - loss: 0.6788 - accuracy: 0.562 - ETA: 0s - loss: 0.6006 - accuracy: 0.691 - ETA: 0s - loss: 0.5092 - accuracy: 0.76 - ETA: 0s - loss: 0.4488 - accuracy: 0.79 - ETA: 0s - loss: 0.4155 - accuracy: 0.81 - ETA: 0s - loss: 0.3891 - accuracy: 0.82 - ETA: 0s - loss: 0.3593 - accuracy: 0.84 - ETA: 0s - loss: 0.3467 - accuracy: 0.85 - 2s 105us/step - loss: 0.3436 - accuracy: 0.8538 - val_loss: 0.2597 - val_accuracy: 0.8811
Epoch 2/100
18068/18068 [==============================] - ETA: 0s - loss: 0.2291 - accuracy: 0.90 - ETA: 0s - loss: 0.2141 - accuracy: 0.91 - ETA: 0s - loss: 0.2275 - accuracy: 0.90 - ETA: 0s - loss: 0.2265 - accuracy: 0.90 - ETA: 0s - loss: 0.2190 - accuracy: 0.91 - ETA: 0s - loss: 0.2150 - accuracy: 0.91 - ETA: 0s - loss: 0.2104 - accuracy: 0.91 - ETA: 0s - loss: 0.2042 - accuracy: 0.91 - 2s 101us/step - loss: 0.2036 - accuracy: 0.9200 - val_loss: 0.1762 - val_accuracy: 0.9202
Epoch 3/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1665 - accuracy: 0.93 - ETA: 0s - loss: 0.1756 - accuracy: 0.93 - ETA: 0s - loss: 0.1940 - accuracy: 0.92 - ETA: 0s - loss: 0.1856 - accuracy: 0.92 - ETA: 0s - loss: 0.1829 - accuracy: 0.92 - ETA: 0s - loss: 0.1831 - accuracy: 0.92 - ETA: 0s - loss: 0.1887 - accuracy: 0.92 - ETA: 0s - loss: 0.1854 - accuracy: 0.92 - 2s 102us/step - loss: 0.1833 - accuracy: 0.9264 - val_loss: 0.1747 - val_accuracy: 0.9309
Epoch 4/100
18068/18068 [==============================] - ETA: 0s - loss: 0.3563 - accuracy: 0.78 - ETA: 0s - loss: 0.1465 - accuracy: 0.94 - ETA: 0s - loss: 0.1444 - accuracy: 0.94 - ETA: 0s - loss: 0.1452 - accuracy: 0.94 - ETA: 0s - loss: 0.1484 - accuracy: 0.94 - ETA: 0s - loss: 0.1492 - accuracy: 0.94 - ETA: 0s - loss: 0.1500 - accuracy: 0.94 - ETA: 0s - loss: 0.1510 - accuracy: 0.94 - 2s 96us/step - loss: 0.1506 - accuracy: 0.9438 - val_loss: 0.1533 - val_accuracy: 0.9412
Epoch 5/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1071 - accuracy: 0.93 - ETA: 0s - loss: 0.1414 - accuracy: 0.95 - ETA: 0s - loss: 0.1402 - accuracy: 0.95 - ETA: 0s - loss: 0.1363 - accuracy: 0.95 - ETA: 0s - loss: 0.1369 - accuracy: 0.95 - ETA: 0s - loss: 0.1350 - accuracy: 0.95 - ETA: 0s - loss: 0.1391 - accuracy: 0.94 - 2s 99us/step - loss: 0.1414 - accuracy: 0.9487 - val_loss: 0.1322 - val_accuracy: 0.9524
Epoch 6/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1931 - accuracy: 0.90 - ETA: 0s - loss: 0.1383 - accuracy: 0.94 - ETA: 0s - loss: 0.1314 - accuracy: 0.95 - ETA: 0s - loss: 0.1356 - accuracy: 0.95 - ETA: 0s - loss: 0.1363 - accuracy: 0.95 - ETA: 0s - loss: 0.1372 - accuracy: 0.95 - ETA: 0s - loss: 0.1372 - accuracy: 0.95 - 2s 100us/step - loss: 0.1405 - accuracy: 0.9487 - val_loss: 0.1310 - val_accuracy: 0.9576
Epoch 7/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0796 - accuracy: 1.00 - ETA: 0s - loss: 0.1566 - accuracy: 0.94 - ETA: 0s - loss: 0.1336 - accuracy: 0.95 - ETA: 0s - loss: 0.1296 - accuracy: 0.95 - ETA: 0s - loss: 0.1308 - accuracy: 0.95 - ETA: 0s - loss: 0.1323 - accuracy: 0.95 - ETA: 0s - loss: 0.1299 - accuracy: 0.95 - ETA: 0s - loss: 0.1291 - accuracy: 0.95 - 2s 100us/step - loss: 0.1291 - accuracy: 0.9533 - val_loss: 0.1146 - val_accuracy: 0.9584
Epoch 8/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0266 - accuracy: 1.00 - ETA: 0s - loss: 0.1207 - accuracy: 0.96 - ETA: 0s - loss: 0.1266 - accuracy: 0.95 - ETA: 0s - loss: 0.1208 - accuracy: 0.96 - ETA: 0s - loss: 0.1163 - accuracy: 0.96 - ETA: 0s - loss: 0.1196 - accuracy: 0.95 - ETA: 0s - loss: 0.1160 - accuracy: 0.96 - ETA: 0s - loss: 0.1160 - accuracy: 0.96 - 2s 97us/step - loss: 0.1161 - accuracy: 0.9610 - val_loss: 0.1006 - val_accuracy: 0.9674
Epoch 9/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0423 - accuracy: 1.00 - ETA: 0s - loss: 0.1142 - accuracy: 0.96 - ETA: 0s - loss: 0.1189 - accuracy: 0.95 - ETA: 0s - loss: 0.1210 - accuracy: 0.95 - ETA: 0s - loss: 0.1186 - accuracy: 0.95 - ETA: 0s - loss: 0.1148 - accuracy: 0.95 - ETA: 0s - loss: 0.1155 - accuracy: 0.95 - 2s 99us/step - loss: 0.1134 - accuracy: 0.9593 - val_loss: 0.1345 - val_accuracy: 0.9470
Epoch 10/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1500 - accuracy: 0.93 - ETA: 0s - loss: 0.1266 - accuracy: 0.94 - ETA: 0s - loss: 0.1323 - accuracy: 0.94 - ETA: 0s - loss: 0.1268 - accuracy: 0.95 - ETA: 0s - loss: 0.1307 - accuracy: 0.95 - ETA: 0s - loss: 0.1280 - accuracy: 0.95 - ETA: 0s - loss: 0.1250 - accuracy: 0.95 - ETA: 0s - loss: 0.1211 - accuracy: 0.95 - 2s 100us/step - loss: 0.1208 - accuracy: 0.9557 - val_loss: 0.0943 - val_accuracy: 0.9679
Epoch 11/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0425 - accuracy: 1.00 - ETA: 0s - loss: 0.1098 - accuracy: 0.95 - ETA: 0s - loss: 0.0987 - accuracy: 0.96 - ETA: 0s - loss: 0.1052 - accuracy: 0.96 - ETA: 0s - loss: 0.1071 - accuracy: 0.96 - ETA: 0s - loss: 0.1099 - accuracy: 0.95 - ETA: 0s - loss: 0.1124 - accuracy: 0.95 - 2s 100us/step - loss: 0.1155 - accuracy: 0.9574 - val_loss: 0.1408 - val_accuracy: 0.9464
Epoch 12/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0991 - accuracy: 0.96 - ETA: 0s - loss: 0.1338 - accuracy: 0.95 - ETA: 0s - loss: 0.1271 - accuracy: 0.95 - ETA: 0s - loss: 0.1174 - accuracy: 0.95 - ETA: 0s - loss: 0.1149 - accuracy: 0.95 - ETA: 0s - loss: 0.1109 - accuracy: 0.96 - ETA: 0s - loss: 0.1107 - accuracy: 0.96 - ETA: 0s - loss: 0.1114 - accuracy: 0.96 - 2s 104us/step - loss: 0.1107 - accuracy: 0.9619 - val_loss: 0.0843 - val_accuracy: 0.9707
Epoch 13/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1949 - accuracy: 0.90 - ETA: 0s - loss: 0.0911 - accuracy: 0.96 - ETA: 0s - loss: 0.0872 - accuracy: 0.96 - ETA: 0s - loss: 0.0943 - accuracy: 0.96 - ETA: 0s - loss: 0.1002 - accuracy: 0.96 - ETA: 0s - loss: 0.1068 - accuracy: 0.96 - ETA: 0s - loss: 0.1112 - accuracy: 0.95 - 2s 102us/step - loss: 0.1098 - accuracy: 0.9592 - val_loss: 0.0916 - val_accuracy: 0.9700
Epoch 14/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0711 - accuracy: 0.96 - ETA: 0s - loss: 0.0996 - accuracy: 0.96 - ETA: 0s - loss: 0.1074 - accuracy: 0.96 - ETA: 0s - loss: 0.1204 - accuracy: 0.95 - ETA: 0s - loss: 0.1184 - accuracy: 0.95 - ETA: 0s - loss: 0.1118 - accuracy: 0.95 - ETA: 0s - loss: 0.1081 - accuracy: 0.96 - 2s 97us/step - loss: 0.1091 - accuracy: 0.9604 - val_loss: 0.1071 - val_accuracy: 0.9602
Epoch 15/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0172 - accuracy: 1.00 - ETA: 0s - loss: 0.1060 - accuracy: 0.96 - ETA: 0s - loss: 0.1036 - accuracy: 0.95 - ETA: 0s - loss: 0.1007 - accuracy: 0.96 - ETA: 0s - loss: 0.0968 - accuracy: 0.96 - ETA: 0s - loss: 0.0999 - accuracy: 0.96 - ETA: 0s - loss: 0.1008 - accuracy: 0.96 - 2s 98us/step - loss: 0.0995 - accuracy: 0.9649 - val_loss: 0.1024 - val_accuracy: 0.9661
Epoch 16/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0327 - accuracy: 1.00 - ETA: 0s - loss: 0.0968 - accuracy: 0.96 - ETA: 0s - loss: 0.1199 - accuracy: 0.95 - ETA: 0s - loss: 0.1218 - accuracy: 0.95 - ETA: 0s - loss: 0.1172 - accuracy: 0.95 - ETA: 0s - loss: 0.1132 - accuracy: 0.95 - ETA: 0s - loss: 0.1132 - accuracy: 0.96 - 2s 99us/step - loss: 0.1109 - accuracy: 0.9613 - val_loss: 0.0845 - val_accuracy: 0.9723
Epoch 17/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0223 - accuracy: 1.00 - ETA: 0s - loss: 0.0942 - accuracy: 0.96 - ETA: 0s - loss: 0.0892 - accuracy: 0.96 - ETA: 0s - loss: 0.0889 - accuracy: 0.96 - ETA: 0s - loss: 0.0900 - accuracy: 0.96 - ETA: 0s - loss: 0.0925 - accuracy: 0.96 - ETA: 0s - loss: 0.0982 - accuracy: 0.96 - ETA: 0s - loss: 0.0954 - accuracy: 0.96 - 2s 99us/step - loss: 0.0955 - accuracy: 0.9666 - val_loss: 0.0854 - val_accuracy: 0.9719
Epoch 18/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0278 - accuracy: 1.00 - ETA: 0s - loss: 0.0835 - accuracy: 0.97 - ETA: 0s - loss: 0.0847 - accuracy: 0.97 - ETA: 0s - loss: 0.0903 - accuracy: 0.96 - ETA: 0s - loss: 0.0930 - accuracy: 0.96 - ETA: 0s - loss: 0.0942 - accuracy: 0.96 - ETA: 0s - loss: 0.0909 - accuracy: 0.96 - 2s 100us/step - loss: 0.0904 - accuracy: 0.9677 - val_loss: 0.1059 - val_accuracy: 0.9670
Epoch 19/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1972 - accuracy: 0.93 - ETA: 0s - loss: 0.0773 - accuracy: 0.97 - ETA: 0s - loss: 0.0849 - accuracy: 0.97 - ETA: 0s - loss: 0.0869 - accuracy: 0.97 - ETA: 0s - loss: 0.0817 - accuracy: 0.97 - ETA: 0s - loss: 0.0803 - accuracy: 0.97 - ETA: 0s - loss: 0.0845 - accuracy: 0.97 - 2s 97us/step - loss: 0.0852 - accuracy: 0.9716 - val_loss: 0.1073 - val_accuracy: 0.9576
Epoch 20/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1471 - accuracy: 0.93 - ETA: 0s - loss: 0.0899 - accuracy: 0.96 - ETA: 0s - loss: 0.0802 - accuracy: 0.97 - ETA: 0s - loss: 0.0826 - accuracy: 0.97 - ETA: 0s - loss: 0.0898 - accuracy: 0.96 - ETA: 0s - loss: 0.0912 - accuracy: 0.96 - ETA: 0s - loss: 0.0898 - accuracy: 0.96 - 2s 99us/step - loss: 0.0899 - accuracy: 0.9690 - val_loss: 0.0760 - val_accuracy: 0.9753
Epoch 21/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1002 - accuracy: 0.96 - ETA: 0s - loss: 0.0704 - accuracy: 0.97 - ETA: 0s - loss: 0.0741 - accuracy: 0.97 - ETA: 0s - loss: 0.0735 - accuracy: 0.97 - ETA: 0s - loss: 0.0729 - accuracy: 0.97 - ETA: 0s - loss: 0.0734 - accuracy: 0.97 - ETA: 0s - loss: 0.0755 - accuracy: 0.97 - 2s 102us/step - loss: 0.0755 - accuracy: 0.9735 - val_loss: 0.0788 - val_accuracy: 0.9719
Epoch 22/100
18068/18068 [==============================] - ETA: 0s - loss: 0.2126 - accuracy: 0.90 - ETA: 0s - loss: 0.0985 - accuracy: 0.96 - ETA: 0s - loss: 0.0866 - accuracy: 0.96 - ETA: 0s - loss: 0.0771 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0791 - accuracy: 0.97 - ETA: 0s - loss: 0.0772 - accuracy: 0.97 - 2s 97us/step - loss: 0.0767 - accuracy: 0.9727 - val_loss: 0.0837 - val_accuracy: 0.9757
Epoch 23/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0565 - accuracy: 0.96 - ETA: 0s - loss: 0.0798 - accuracy: 0.97 - ETA: 0s - loss: 0.0660 - accuracy: 0.97 - ETA: 0s - loss: 0.0723 - accuracy: 0.97 - ETA: 0s - loss: 0.0793 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0759 - accuracy: 0.97 - 2s 98us/step - loss: 0.0767 - accuracy: 0.9733 - val_loss: 0.0879 - val_accuracy: 0.9695
Epoch 24/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0170 - accuracy: 1.00 - ETA: 0s - loss: 0.0718 - accuracy: 0.97 - ETA: 0s - loss: 0.0800 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0785 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0799 - accuracy: 0.97 - 2s 98us/step - loss: 0.0807 - accuracy: 0.9716 - val_loss: 0.0583 - val_accuracy: 0.9794
Epoch 25/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0030 - accuracy: 1.00 - ETA: 0s - loss: 0.0636 - accuracy: 0.97 - ETA: 0s - loss: 0.0630 - accuracy: 0.97 - ETA: 0s - loss: 0.0804 - accuracy: 0.97 - ETA: 0s - loss: 0.0770 - accuracy: 0.97 - ETA: 0s - loss: 0.0748 - accuracy: 0.97 - ETA: 0s - loss: 0.0725 - accuracy: 0.97 - ETA: 0s - loss: 0.0742 - accuracy: 0.97 - 2s 97us/step - loss: 0.0745 - accuracy: 0.9740 - val_loss: 0.0661 - val_accuracy: 0.9816
Epoch 26/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0948 - accuracy: 0.93 - ETA: 0s - loss: 0.0584 - accuracy: 0.97 - ETA: 0s - loss: 0.0578 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0804 - accuracy: 0.96 - ETA: 0s - loss: 0.0886 - accuracy: 0.96 - ETA: 0s - loss: 0.0872 - accuracy: 0.96 - 2s 98us/step - loss: 0.0849 - accuracy: 0.9677 - val_loss: 0.0790 - val_accuracy: 0.9766
Epoch 27/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0358 - accuracy: 1.00 - ETA: 0s - loss: 0.0517 - accuracy: 0.98 - ETA: 0s - loss: 0.0588 - accuracy: 0.98 - ETA: 0s - loss: 0.0719 - accuracy: 0.97 - ETA: 0s - loss: 0.0696 - accuracy: 0.97 - ETA: 0s - loss: 0.0659 - accuracy: 0.97 - ETA: 0s - loss: 0.0649 - accuracy: 0.97 - 2s 99us/step - loss: 0.0643 - accuracy: 0.9778 - val_loss: 0.0584 - val_accuracy: 0.9804
Epoch 28/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1423 - accuracy: 0.93 - ETA: 0s - loss: 0.0831 - accuracy: 0.96 - ETA: 0s - loss: 0.0703 - accuracy: 0.97 - ETA: 0s - loss: 0.0680 - accuracy: 0.97 - ETA: 0s - loss: 0.0644 - accuracy: 0.97 - ETA: 0s - loss: 0.0650 - accuracy: 0.97 - ETA: 0s - loss: 0.0664 - accuracy: 0.97 - 2s 100us/step - loss: 0.0649 - accuracy: 0.9760 - val_loss: 0.0610 - val_accuracy: 0.9821
Epoch 29/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0349 - accuracy: 0.96 - ETA: 0s - loss: 0.0645 - accuracy: 0.97 - ETA: 0s - loss: 0.0565 - accuracy: 0.98 - ETA: 0s - loss: 0.0675 - accuracy: 0.97 - ETA: 0s - loss: 0.0673 - accuracy: 0.97 - ETA: 0s - loss: 0.0667 - accuracy: 0.97 - ETA: 0s - loss: 0.0663 - accuracy: 0.97 - 2s 97us/step - loss: 0.0650 - accuracy: 0.9775 - val_loss: 0.0835 - val_accuracy: 0.9671
Epoch 30/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0869 - accuracy: 0.96 - ETA: 0s - loss: 0.0555 - accuracy: 0.98 - ETA: 0s - loss: 0.0522 - accuracy: 0.98 - ETA: 0s - loss: 0.0543 - accuracy: 0.98 - ETA: 0s - loss: 0.0567 - accuracy: 0.98 - ETA: 0s - loss: 0.0611 - accuracy: 0.97 - ETA: 0s - loss: 0.0642 - accuracy: 0.97 - 2s 100us/step - loss: 0.0638 - accuracy: 0.9774 - val_loss: 0.0530 - val_accuracy: 0.9830
Epoch 31/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0032 - accuracy: 1.00 - ETA: 0s - loss: 0.0556 - accuracy: 0.98 - ETA: 0s - loss: 0.0615 - accuracy: 0.97 - ETA: 0s - loss: 0.0574 - accuracy: 0.98 - ETA: 0s - loss: 0.0574 - accuracy: 0.98 - ETA: 0s - loss: 0.0583 - accuracy: 0.98 - ETA: 0s - loss: 0.0581 - accuracy: 0.98 - 2s 95us/step - loss: 0.0563 - accuracy: 0.9813 - val_loss: 0.0531 - val_accuracy: 0.9849
Epoch 32/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0303 - accuracy: 1.00 - ETA: 0s - loss: 0.0350 - accuracy: 0.98 - ETA: 0s - loss: 0.0485 - accuracy: 0.98 - ETA: 0s - loss: 0.0524 - accuracy: 0.98 - ETA: 0s - loss: 0.0515 - accuracy: 0.98 - ETA: 0s - loss: 0.0588 - accuracy: 0.97 - ETA: 0s - loss: 0.0606 - accuracy: 0.97 - 2s 99us/step - loss: 0.0622 - accuracy: 0.9787 - val_loss: 0.0634 - val_accuracy: 0.9815
Epoch 33/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0507 - accuracy: 1.00 - ETA: 0s - loss: 0.0457 - accuracy: 0.98 - ETA: 0s - loss: 0.0464 - accuracy: 0.98 - ETA: 0s - loss: 0.0504 - accuracy: 0.98 - ETA: 0s - loss: 0.0511 - accuracy: 0.98 - ETA: 0s - loss: 0.0512 - accuracy: 0.98 - ETA: 0s - loss: 0.0510 - accuracy: 0.98 - 2s 101us/step - loss: 0.0508 - accuracy: 0.9833 - val_loss: 0.0499 - val_accuracy: 0.9864
Epoch 34/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0097 - accuracy: 1.00 - ETA: 0s - loss: 0.0488 - accuracy: 0.98 - ETA: 0s - loss: 0.0614 - accuracy: 0.98 - ETA: 0s - loss: 0.0627 - accuracy: 0.98 - ETA: 0s - loss: 0.0614 - accuracy: 0.98 - ETA: 0s - loss: 0.0615 - accuracy: 0.98 - ETA: 0s - loss: 0.0592 - accuracy: 0.98 - 2s 93us/step - loss: 0.0572 - accuracy: 0.9815 - val_loss: 0.0420 - val_accuracy: 0.9871
Epoch 35/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0795 - accuracy: 0.96 - ETA: 0s - loss: 0.0474 - accuracy: 0.98 - ETA: 0s - loss: 0.0459 - accuracy: 0.98 - ETA: 0s - loss: 0.0480 - accuracy: 0.98 - ETA: 0s - loss: 0.0458 - accuracy: 0.98 - ETA: 0s - loss: 0.0477 - accuracy: 0.98 - ETA: 0s - loss: 0.0560 - accuracy: 0.98 - 2s 95us/step - loss: 0.0579 - accuracy: 0.9807 - val_loss: 0.0606 - val_accuracy: 0.9777
Epoch 36/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0130 - accuracy: 1.00 - ETA: 0s - loss: 0.0689 - accuracy: 0.97 - ETA: 0s - loss: 0.0682 - accuracy: 0.97 - ETA: 0s - loss: 0.0658 - accuracy: 0.97 - ETA: 0s - loss: 0.0601 - accuracy: 0.97 - ETA: 0s - loss: 0.0572 - accuracy: 0.98 - ETA: 0s - loss: 0.0540 - accuracy: 0.98 - 2s 93us/step - loss: 0.0523 - accuracy: 0.9823 - val_loss: 0.0444 - val_accuracy: 0.9877
Epoch 37/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0141 - accuracy: 1.00 - ETA: 0s - loss: 0.0639 - accuracy: 0.97 - ETA: 0s - loss: 0.0546 - accuracy: 0.97 - ETA: 0s - loss: 0.0501 - accuracy: 0.98 - ETA: 0s - loss: 0.0509 - accuracy: 0.98 - ETA: 0s - loss: 0.0528 - accuracy: 0.98 - ETA: 0s - loss: 0.0533 - accuracy: 0.98 - 2s 95us/step - loss: 0.0527 - accuracy: 0.9806 - val_loss: 0.0399 - val_accuracy: 0.9892
Epoch 38/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0120 - accuracy: 1.00 - ETA: 0s - loss: 0.0439 - accuracy: 0.98 - ETA: 0s - loss: 0.0460 - accuracy: 0.98 - ETA: 0s - loss: 0.0474 - accuracy: 0.98 - ETA: 0s - loss: 0.0488 - accuracy: 0.98 - ETA: 0s - loss: 0.0470 - accuracy: 0.98 - ETA: 0s - loss: 0.0468 - accuracy: 0.98 - 2s 97us/step - loss: 0.0490 - accuracy: 0.9841 - val_loss: 0.0560 - val_accuracy: 0.9819
Epoch 39/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0037 - accuracy: 1.00 - ETA: 0s - loss: 0.0482 - accuracy: 0.98 - ETA: 0s - loss: 0.0398 - accuracy: 0.98 - ETA: 0s - loss: 0.0396 - accuracy: 0.98 - ETA: 0s - loss: 0.0402 - accuracy: 0.98 - ETA: 0s - loss: 0.0419 - accuracy: 0.98 - ETA: 0s - loss: 0.0441 - accuracy: 0.98 - 2s 98us/step - loss: 0.0457 - accuracy: 0.9843 - val_loss: 0.0492 - val_accuracy: 0.9844
Epoch 40/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0412 - accuracy: 0.96 - ETA: 0s - loss: 0.0411 - accuracy: 0.98 - ETA: 0s - loss: 0.0500 - accuracy: 0.98 - ETA: 0s - loss: 0.0472 - accuracy: 0.98 - ETA: 0s - loss: 0.0477 - accuracy: 0.98 - ETA: 0s - loss: 0.0470 - accuracy: 0.98 - ETA: 0s - loss: 0.0469 - accuracy: 0.98 - 2s 97us/step - loss: 0.0465 - accuracy: 0.9842 - val_loss: 0.0679 - val_accuracy: 0.9757
Epoch 41/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0218 - accuracy: 1.00 - ETA: 0s - loss: 0.0453 - accuracy: 0.98 - ETA: 0s - loss: 0.0404 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - ETA: 0s - loss: 0.0435 - accuracy: 0.98 - ETA: 0s - loss: 0.0439 - accuracy: 0.98 - ETA: 0s - loss: 0.0423 - accuracy: 0.98 - 2s 95us/step - loss: 0.0461 - accuracy: 0.9846 - val_loss: 0.0624 - val_accuracy: 0.9818
Epoch 42/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1395 - accuracy: 0.96 - ETA: 0s - loss: 0.0579 - accuracy: 0.97 - ETA: 0s - loss: 0.0520 - accuracy: 0.98 - ETA: 0s - loss: 0.0567 - accuracy: 0.97 - ETA: 0s - loss: 0.0531 - accuracy: 0.98 - ETA: 0s - loss: 0.0481 - accuracy: 0.98 - ETA: 0s - loss: 0.0461 - accuracy: 0.98 - 2s 97us/step - loss: 0.0461 - accuracy: 0.9842 - val_loss: 0.0568 - val_accuracy: 0.9820
Epoch 43/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1796 - accuracy: 0.93 - ETA: 0s - loss: 0.0395 - accuracy: 0.98 - ETA: 0s - loss: 0.0388 - accuracy: 0.98 - ETA: 0s - loss: 0.0365 - accuracy: 0.98 - ETA: 0s - loss: 0.0386 - accuracy: 0.98 - ETA: 0s - loss: 0.0428 - accuracy: 0.98 - ETA: 0s - loss: 0.0423 - accuracy: 0.98 - ETA: 0s - loss: 0.0420 - accuracy: 0.98 - 2s 95us/step - loss: 0.0415 - accuracy: 0.9852 - val_loss: 0.0502 - val_accuracy: 0.9832
Epoch 44/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0091 - accuracy: 1.00 - ETA: 0s - loss: 0.0421 - accuracy: 0.98 - ETA: 0s - loss: 0.0521 - accuracy: 0.98 - ETA: 0s - loss: 0.0472 - accuracy: 0.98 - ETA: 0s - loss: 0.0458 - accuracy: 0.98 - ETA: 0s - loss: 0.0460 - accuracy: 0.98 - ETA: 0s - loss: 0.0442 - accuracy: 0.98 - 2s 97us/step - loss: 0.0442 - accuracy: 0.9846 - val_loss: 0.0770 - val_accuracy: 0.9756
Epoch 45/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1832 - accuracy: 0.90 - ETA: 0s - loss: 0.0467 - accuracy: 0.98 - ETA: 0s - loss: 0.0432 - accuracy: 0.98 - ETA: 0s - loss: 0.0455 - accuracy: 0.98 - ETA: 0s - loss: 0.0492 - accuracy: 0.98 - ETA: 0s - loss: 0.0501 - accuracy: 0.98 - ETA: 0s - loss: 0.0489 - accuracy: 0.98 - 2s 95us/step - loss: 0.0478 - accuracy: 0.9835 - val_loss: 0.0404 - val_accuracy: 0.9876
Epoch 46/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1283 - accuracy: 0.93 - ETA: 0s - loss: 0.0580 - accuracy: 0.98 - ETA: 0s - loss: 0.0460 - accuracy: 0.98 - ETA: 0s - loss: 0.0409 - accuracy: 0.98 - ETA: 0s - loss: 0.0432 - accuracy: 0.98 - ETA: 0s - loss: 0.0453 - accuracy: 0.98 - ETA: 0s - loss: 0.0464 - accuracy: 0.98 - 2s 97us/step - loss: 0.0454 - accuracy: 0.9852 - val_loss: 0.0507 - val_accuracy: 0.9834
Epoch 47/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0083 - accuracy: 1.00 - ETA: 0s - loss: 0.0326 - accuracy: 0.99 - ETA: 0s - loss: 0.0341 - accuracy: 0.98 - ETA: 0s - loss: 0.0374 - accuracy: 0.98 - ETA: 0s - loss: 0.0397 - accuracy: 0.98 - ETA: 0s - loss: 0.0426 - accuracy: 0.98 - ETA: 0s - loss: 0.0426 - accuracy: 0.98 - 2s 95us/step - loss: 0.0431 - accuracy: 0.9860 - val_loss: 0.0376 - val_accuracy: 0.9880
Epoch 48/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0127 - accuracy: 1.00 - ETA: 0s - loss: 0.0370 - accuracy: 0.98 - ETA: 0s - loss: 0.0380 - accuracy: 0.98 - ETA: 0s - loss: 0.0364 - accuracy: 0.98 - ETA: 0s - loss: 0.0375 - accuracy: 0.98 - ETA: 0s - loss: 0.0429 - accuracy: 0.98 - ETA: 0s - loss: 0.0431 - accuracy: 0.98 - 2s 93us/step - loss: 0.0416 - accuracy: 0.9862 - val_loss: 0.0383 - val_accuracy: 0.9888
Epoch 49/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0018 - accuracy: 1.00 - ETA: 0s - loss: 0.0304 - accuracy: 0.98 - ETA: 0s - loss: 0.0320 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.98 - ETA: 0s - loss: 0.0392 - accuracy: 0.98 - ETA: 0s - loss: 0.0393 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - 2s 97us/step - loss: 0.0398 - accuracy: 0.9867 - val_loss: 0.0483 - val_accuracy: 0.9870
Epoch 50/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0272 - accuracy: 1.00 - ETA: 0s - loss: 0.0242 - accuracy: 0.99 - ETA: 0s - loss: 0.0285 - accuracy: 0.99 - ETA: 0s - loss: 0.0330 - accuracy: 0.98 - ETA: 0s - loss: 0.0439 - accuracy: 0.98 - ETA: 0s - loss: 0.0465 - accuracy: 0.98 - ETA: 0s - loss: 0.0441 - accuracy: 0.98 - 2s 96us/step - loss: 0.0455 - accuracy: 0.9846 - val_loss: 0.1553 - val_accuracy: 0.9496
Epoch 51/100
18068/18068 [==============================] - ETA: 0s - loss: 0.2229 - accuracy: 0.96 - ETA: 0s - loss: 0.0702 - accuracy: 0.97 - ETA: 0s - loss: 0.0525 - accuracy: 0.98 - ETA: 0s - loss: 0.0484 - accuracy: 0.98 - ETA: 0s - loss: 0.0464 - accuracy: 0.98 - ETA: 0s - loss: 0.0452 - accuracy: 0.98 - ETA: 0s - loss: 0.0438 - accuracy: 0.98 - 2s 98us/step - loss: 0.0427 - accuracy: 0.9851 - val_loss: 0.0405 - val_accuracy: 0.9872
Epoch 52/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0044 - accuracy: 1.00 - ETA: 0s - loss: 0.0345 - accuracy: 0.98 - ETA: 0s - loss: 0.0323 - accuracy: 0.98 - ETA: 0s - loss: 0.0305 - accuracy: 0.98 - ETA: 0s - loss: 0.0303 - accuracy: 0.98 - ETA: 0s - loss: 0.0313 - accuracy: 0.98 - ETA: 0s - loss: 0.0353 - accuracy: 0.98 - 2s 101us/step - loss: 0.0372 - accuracy: 0.9867 - val_loss: 0.0507 - val_accuracy: 0.9849
Epoch 53/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0992 - accuracy: 0.96 - ETA: 0s - loss: 0.0372 - accuracy: 0.98 - ETA: 0s - loss: 0.0325 - accuracy: 0.98 - ETA: 0s - loss: 0.0338 - accuracy: 0.98 - ETA: 0s - loss: 0.0350 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0330 - accuracy: 0.98 - 2s 96us/step - loss: 0.0341 - accuracy: 0.9890 - val_loss: 0.0706 - val_accuracy: 0.9775
Epoch 54/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0255 - accuracy: 1.00 - ETA: 0s - loss: 0.0471 - accuracy: 0.98 - ETA: 0s - loss: 0.0442 - accuracy: 0.98 - ETA: 0s - loss: 0.0541 - accuracy: 0.98 - ETA: 0s - loss: 0.0506 - accuracy: 0.98 - ETA: 0s - loss: 0.0458 - accuracy: 0.98 - ETA: 0s - loss: 0.0431 - accuracy: 0.98 - 2s 98us/step - loss: 0.0431 - accuracy: 0.9851 - val_loss: 0.0332 - val_accuracy: 0.9903
Epoch 55/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0017 - accuracy: 1.00 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0270 - accuracy: 0.99 - ETA: 0s - loss: 0.0321 - accuracy: 0.98 - ETA: 0s - loss: 0.0424 - accuracy: 0.98 - ETA: 0s - loss: 0.0421 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - 2s 98us/step - loss: 0.0380 - accuracy: 0.9869 - val_loss: 0.0261 - val_accuracy: 0.9926
Epoch 56/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0202 - accuracy: 1.00 - ETA: 0s - loss: 0.0348 - accuracy: 0.98 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0280 - accuracy: 0.99 - ETA: 0s - loss: 0.0316 - accuracy: 0.99 - ETA: 0s - loss: 0.0324 - accuracy: 0.98 - ETA: 0s - loss: 0.0369 - accuracy: 0.98 - 2s 100us/step - loss: 0.0365 - accuracy: 0.9878 - val_loss: 0.0421 - val_accuracy: 0.9857
Epoch 57/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0309 - accuracy: 1.00 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0268 - accuracy: 0.99 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - ETA: 0s - loss: 0.0272 - accuracy: 0.99 - ETA: 0s - loss: 0.0348 - accuracy: 0.98 - 2s 100us/step - loss: 0.0378 - accuracy: 0.9872 - val_loss: 0.0563 - val_accuracy: 0.9826
Epoch 58/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0666 - accuracy: 0.96 - ETA: 0s - loss: 0.0312 - accuracy: 0.99 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0267 - accuracy: 0.99 - ETA: 0s - loss: 0.0328 - accuracy: 0.98 - ETA: 0s - loss: 0.0355 - accuracy: 0.98 - ETA: 0s - loss: 0.0363 - accuracy: 0.98 - 2s 100us/step - loss: 0.0372 - accuracy: 0.9870 - val_loss: 0.1505 - val_accuracy: 0.9517
Epoch 59/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1813 - accuracy: 0.96 - ETA: 0s - loss: 0.0483 - accuracy: 0.98 - ETA: 0s - loss: 0.0370 - accuracy: 0.98 - ETA: 0s - loss: 0.0355 - accuracy: 0.98 - ETA: 0s - loss: 0.0349 - accuracy: 0.98 - ETA: 0s - loss: 0.0339 - accuracy: 0.98 - ETA: 0s - loss: 0.0337 - accuracy: 0.98 - 2s 102us/step - loss: 0.0329 - accuracy: 0.9886 - val_loss: 0.0502 - val_accuracy: 0.9844
Epoch 60/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0191 - accuracy: 1.00 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - ETA: 0s - loss: 0.0333 - accuracy: 0.98 - ETA: 0s - loss: 0.0374 - accuracy: 0.98 - ETA: 0s - loss: 0.0342 - accuracy: 0.98 - ETA: 0s - loss: 0.0349 - accuracy: 0.98 - ETA: 0s - loss: 0.0343 - accuracy: 0.98 - 2s 97us/step - loss: 0.0381 - accuracy: 0.9862 - val_loss: 0.0817 - val_accuracy: 0.9748
Epoch 61/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1110 - accuracy: 0.96 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0375 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0304 - accuracy: 0.98 - ETA: 0s - loss: 0.0320 - accuracy: 0.98 - 2s 94us/step - loss: 0.0326 - accuracy: 0.9883 - val_loss: 0.0324 - val_accuracy: 0.9908
Epoch 62/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0520 - accuracy: 0.96 - ETA: 0s - loss: 0.0428 - accuracy: 0.98 - ETA: 0s - loss: 0.0428 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - ETA: 0s - loss: 0.0362 - accuracy: 0.98 - ETA: 0s - loss: 0.0350 - accuracy: 0.98 - ETA: 0s - loss: 0.0353 - accuracy: 0.98 - 2s 94us/step - loss: 0.0344 - accuracy: 0.9882 - val_loss: 0.0375 - val_accuracy: 0.9880
Epoch 63/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0866 - accuracy: 0.96 - ETA: 0s - loss: 0.0334 - accuracy: 0.98 - ETA: 0s - loss: 0.0292 - accuracy: 0.98 - ETA: 0s - loss: 0.0268 - accuracy: 0.99 - ETA: 0s - loss: 0.0276 - accuracy: 0.99 - ETA: 0s - loss: 0.0418 - accuracy: 0.98 - ETA: 0s - loss: 0.0397 - accuracy: 0.98 - 2s 97us/step - loss: 0.0384 - accuracy: 0.9880 - val_loss: 0.0308 - val_accuracy: 0.9910
Epoch 64/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0635 - accuracy: 0.96 - ETA: 0s - loss: 0.0280 - accuracy: 0.99 - ETA: 0s - loss: 0.0262 - accuracy: 0.99 - ETA: 0s - loss: 0.0331 - accuracy: 0.98 - ETA: 0s - loss: 0.0329 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0309 - accuracy: 0.99 - 2s 98us/step - loss: 0.0302 - accuracy: 0.9904 - val_loss: 0.0318 - val_accuracy: 0.9913
Epoch 65/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0202 - accuracy: 1.00 - ETA: 0s - loss: 0.0188 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0267 - accuracy: 0.99 - ETA: 0s - loss: 0.0278 - accuracy: 0.99 - ETA: 0s - loss: 0.0303 - accuracy: 0.99 - 2s 98us/step - loss: 0.0297 - accuracy: 0.9904 - val_loss: 0.0412 - val_accuracy: 0.9882
Epoch 66/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0062 - accuracy: 1.00 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0294 - accuracy: 0.98 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0311 - accuracy: 0.99 - ETA: 0s - loss: 0.0308 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - 2s 99us/step - loss: 0.0329 - accuracy: 0.9893 - val_loss: 0.0662 - val_accuracy: 0.9754
Epoch 67/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0316 - accuracy: 0.96 - ETA: 0s - loss: 0.0315 - accuracy: 0.98 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0263 - accuracy: 0.99 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - 2s 95us/step - loss: 0.0269 - accuracy: 0.9915 - val_loss: 0.0258 - val_accuracy: 0.9924
Epoch 68/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0184 - accuracy: 1.00 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0245 - accuracy: 0.99 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0286 - accuracy: 0.99 - ETA: 0s - loss: 0.0297 - accuracy: 0.98 - ETA: 0s - loss: 0.0333 - accuracy: 0.98 - 2s 97us/step - loss: 0.0330 - accuracy: 0.9892 - val_loss: 0.0395 - val_accuracy: 0.9862
Epoch 69/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0023 - accuracy: 1.00 - ETA: 0s - loss: 0.0443 - accuracy: 0.98 - ETA: 0s - loss: 0.0333 - accuracy: 0.98 - ETA: 0s - loss: 0.0322 - accuracy: 0.98 - ETA: 0s - loss: 0.0344 - accuracy: 0.98 - ETA: 0s - loss: 0.0348 - accuracy: 0.98 - ETA: 0s - loss: 0.0343 - accuracy: 0.98 - 2s 101us/step - loss: 0.0318 - accuracy: 0.9893 - val_loss: 0.0282 - val_accuracy: 0.9916
Epoch 70/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0045 - accuracy: 1.00 - ETA: 0s - loss: 0.0306 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.98 - ETA: 0s - loss: 0.0355 - accuracy: 0.98 - ETA: 0s - loss: 0.0358 - accuracy: 0.98 - ETA: 0s - loss: 0.0386 - accuracy: 0.98 - ETA: 0s - loss: 0.0364 - accuracy: 0.98 - 2s 101us/step - loss: 0.0350 - accuracy: 0.9886 - val_loss: 0.0411 - val_accuracy: 0.9877
Epoch 71/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1661 - accuracy: 0.96 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0219 - accuracy: 0.99 - ETA: 0s - loss: 0.0219 - accuracy: 0.99 - ETA: 0s - loss: 0.0232 - accuracy: 0.99 - ETA: 0s - loss: 0.0214 - accuracy: 0.99 - ETA: 0s - loss: 0.0215 - accuracy: 0.99 - 2s 97us/step - loss: 0.0235 - accuracy: 0.9923 - val_loss: 0.0373 - val_accuracy: 0.9881
Epoch 72/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0066 - accuracy: 1.00 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0317 - accuracy: 0.98 - ETA: 0s - loss: 0.0312 - accuracy: 0.98 - ETA: 0s - loss: 0.0347 - accuracy: 0.98 - ETA: 0s - loss: 0.0337 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.98 - 2s 95us/step - loss: 0.0316 - accuracy: 0.9888 - val_loss: 0.0365 - val_accuracy: 0.9889
Epoch 73/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0025 - accuracy: 1.00 - ETA: 0s - loss: 0.0276 - accuracy: 0.98 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0272 - accuracy: 0.99 - ETA: 0s - loss: 0.0278 - accuracy: 0.99 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0274 - accuracy: 0.99 - 2s 97us/step - loss: 0.0304 - accuracy: 0.9897 - val_loss: 0.1574 - val_accuracy: 0.9417
Epoch 74/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1581 - accuracy: 0.93 - ETA: 0s - loss: 0.0238 - accuracy: 0.99 - ETA: 0s - loss: 0.0226 - accuracy: 0.99 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0273 - accuracy: 0.99 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - 2s 97us/step - loss: 0.0281 - accuracy: 0.9904 - val_loss: 0.0361 - val_accuracy: 0.9866
Epoch 75/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1132 - accuracy: 0.96 - ETA: 0s - loss: 0.0242 - accuracy: 0.99 - ETA: 0s - loss: 0.0260 - accuracy: 0.99 - ETA: 0s - loss: 0.0269 - accuracy: 0.99 - ETA: 0s - loss: 0.0292 - accuracy: 0.99 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - ETA: 0s - loss: 0.0274 - accuracy: 0.99 - 2s 98us/step - loss: 0.0262 - accuracy: 0.9918 - val_loss: 0.0262 - val_accuracy: 0.9920
Epoch 76/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0058 - accuracy: 1.00 - ETA: 0s - loss: 0.0158 - accuracy: 0.99 - ETA: 0s - loss: 0.0210 - accuracy: 0.99 - ETA: 0s - loss: 0.0223 - accuracy: 0.99 - ETA: 0s - loss: 0.0250 - accuracy: 0.99 - ETA: 0s - loss: 0.0251 - accuracy: 0.99 - ETA: 0s - loss: 0.0277 - accuracy: 0.99 - 2s 94us/step - loss: 0.0280 - accuracy: 0.9905 - val_loss: 0.0269 - val_accuracy: 0.9918
Epoch 77/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0045 - accuracy: 1.00 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0226 - accuracy: 0.99 - ETA: 0s - loss: 0.0178 - accuracy: 0.99 - ETA: 0s - loss: 0.0208 - accuracy: 0.99 - ETA: 0s - loss: 0.0248 - accuracy: 0.99 - ETA: 0s - loss: 0.0244 - accuracy: 0.99 - 2s 98us/step - loss: 0.0261 - accuracy: 0.9912 - val_loss: 0.0540 - val_accuracy: 0.9809
Epoch 78/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0312 - accuracy: 1.00 - ETA: 0s - loss: 0.0224 - accuracy: 0.99 - ETA: 0s - loss: 0.0276 - accuracy: 0.99 - ETA: 0s - loss: 0.0277 - accuracy: 0.99 - ETA: 0s - loss: 0.0288 - accuracy: 0.99 - ETA: 0s - loss: 0.0275 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - 2s 97us/step - loss: 0.0268 - accuracy: 0.9911 - val_loss: 0.0334 - val_accuracy: 0.9908
Epoch 79/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0066 - accuracy: 1.00 - ETA: 0s - loss: 0.0261 - accuracy: 0.98 - ETA: 0s - loss: 0.0284 - accuracy: 0.98 - ETA: 0s - loss: 0.0250 - accuracy: 0.99 - ETA: 0s - loss: 0.0323 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.99 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - 2s 94us/step - loss: 0.0277 - accuracy: 0.9910 - val_loss: 0.0764 - val_accuracy: 0.9735
Epoch 80/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0497 - accuracy: 0.96 - ETA: 0s - loss: 0.0316 - accuracy: 0.98 - ETA: 0s - loss: 0.0396 - accuracy: 0.98 - ETA: 0s - loss: 0.0420 - accuracy: 0.98 - ETA: 0s - loss: 0.0374 - accuracy: 0.98 - ETA: 0s - loss: 0.0370 - accuracy: 0.98 - ETA: 0s - loss: 0.0343 - accuracy: 0.98 - 2s 93us/step - loss: 0.0327 - accuracy: 0.9884 - val_loss: 0.0308 - val_accuracy: 0.9921
Epoch 81/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0094 - accuracy: 1.00 - ETA: 0s - loss: 0.0156 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0249 - accuracy: 0.99 - ETA: 0s - loss: 0.0268 - accuracy: 0.99 - ETA: 0s - loss: 0.0242 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - 2s 98us/step - loss: 0.0220 - accuracy: 0.9931 - val_loss: 0.0310 - val_accuracy: 0.9896
Epoch 82/100
18068/18068 [==============================] - ETA: 0s - loss: 5.9038e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0228 - accuracy: 0.9914   - ETA: 0s - loss: 0.0214 - accuracy: 0.99 - ETA: 0s - loss: 0.0204 - accuracy: 0.99 - ETA: 0s - loss: 0.0228 - accuracy: 0.99 - ETA: 0s - loss: 0.0257 - accuracy: 0.99 - ETA: 0s - loss: 0.0246 - accuracy: 0.99 - 2s 94us/step - loss: 0.0246 - accuracy: 0.9911 - val_loss: 0.0409 - val_accuracy: 0.9863
Epoch 83/100
18068/18068 [==============================] - ETA: 1s - loss: 6.2001e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0330 - accuracy: 0.9878   - ETA: 0s - loss: 0.0410 - accuracy: 0.98 - ETA: 0s - loss: 0.0346 - accuracy: 0.98 - ETA: 0s - loss: 0.0317 - accuracy: 0.98 - ETA: 0s - loss: 0.0321 - accuracy: 0.98 - ETA: 0s - loss: 0.0305 - accuracy: 0.98 - 2s 97us/step - loss: 0.0286 - accuracy: 0.9901 - val_loss: 0.0233 - val_accuracy: 0.9931
Epoch 84/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0143 - accuracy: 1.00 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0233 - accuracy: 0.99 - ETA: 0s - loss: 0.0224 - accuracy: 0.99 - ETA: 0s - loss: 0.0250 - accuracy: 0.99 - ETA: 0s - loss: 0.0323 - accuracy: 0.98 - ETA: 0s - loss: 0.0354 - accuracy: 0.98 - 2s 100us/step - loss: 0.0329 - accuracy: 0.9888 - val_loss: 0.0236 - val_accuracy: 0.9920
Epoch 85/100
18068/18068 [==============================] - ETA: 0s - loss: 0.1124 - accuracy: 0.93 - ETA: 0s - loss: 0.0144 - accuracy: 0.99 - ETA: 0s - loss: 0.0157 - accuracy: 0.99 - ETA: 0s - loss: 0.0179 - accuracy: 0.99 - ETA: 0s - loss: 0.0184 - accuracy: 0.99 - ETA: 0s - loss: 0.0179 - accuracy: 0.99 - ETA: 0s - loss: 0.0191 - accuracy: 0.99 - 2s 100us/step - loss: 0.0184 - accuracy: 0.9936 - val_loss: 0.0257 - val_accuracy: 0.9923
Epoch 86/100
18068/18068 [==============================] - ETA: 1s - loss: 0.1240 - accuracy: 0.93 - ETA: 0s - loss: 0.0351 - accuracy: 0.98 - ETA: 0s - loss: 0.0290 - accuracy: 0.98 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0213 - accuracy: 0.99 - ETA: 0s - loss: 0.0206 - accuracy: 0.99 - ETA: 0s - loss: 0.0288 - accuracy: 0.99 - 2s 93us/step - loss: 0.0303 - accuracy: 0.9896 - val_loss: 0.0298 - val_accuracy: 0.9899
Epoch 87/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0170 - accuracy: 1.00 - ETA: 0s - loss: 0.0520 - accuracy: 0.98 - ETA: 0s - loss: 0.0377 - accuracy: 0.98 - ETA: 0s - loss: 0.0303 - accuracy: 0.98 - ETA: 0s - loss: 0.0280 - accuracy: 0.99 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - ETA: 0s - loss: 0.0247 - accuracy: 0.99 - 2s 94us/step - loss: 0.0242 - accuracy: 0.9916 - val_loss: 0.0368 - val_accuracy: 0.9900
Epoch 88/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0029 - accuracy: 1.00 - ETA: 0s - loss: 0.0234 - accuracy: 0.99 - ETA: 0s - loss: 0.0309 - accuracy: 0.99 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0293 - accuracy: 0.99 - ETA: 0s - loss: 0.0262 - accuracy: 0.99 - ETA: 0s - loss: 0.0245 - accuracy: 0.99 - 2s 98us/step - loss: 0.0235 - accuracy: 0.9919 - val_loss: 0.0353 - val_accuracy: 0.9898
Epoch 89/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0204 - accuracy: 1.00 - ETA: 0s - loss: 0.0228 - accuracy: 0.99 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - ETA: 0s - loss: 0.0277 - accuracy: 0.99 - ETA: 0s - loss: 0.0273 - accuracy: 0.99 - ETA: 0s - loss: 0.0280 - accuracy: 0.98 - 2s 96us/step - loss: 0.0293 - accuracy: 0.9896 - val_loss: 0.0315 - val_accuracy: 0.9903
Epoch 90/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0029 - accuracy: 1.00 - ETA: 0s - loss: 0.0310 - accuracy: 0.99 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0220 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0259 - accuracy: 0.99 - 2s 92us/step - loss: 0.0248 - accuracy: 0.9925 - val_loss: 0.0330 - val_accuracy: 0.9895
Epoch 91/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0244 - accuracy: 1.00 - ETA: 0s - loss: 0.0134 - accuracy: 0.99 - ETA: 0s - loss: 0.0164 - accuracy: 0.99 - ETA: 0s - loss: 0.0225 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0249 - accuracy: 0.99 - ETA: 0s - loss: 0.0236 - accuracy: 0.99 - 2s 98us/step - loss: 0.0228 - accuracy: 0.9924 - val_loss: 0.0276 - val_accuracy: 0.9914
Epoch 92/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0057 - accuracy: 1.00 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0238 - accuracy: 0.99 - ETA: 0s - loss: 0.0214 - accuracy: 0.99 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0205 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - 2s 99us/step - loss: 0.0213 - accuracy: 0.9927 - val_loss: 0.0323 - val_accuracy: 0.9899
Epoch 93/100
18068/18068 [==============================] - ETA: 1s - loss: 2.3066e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0247 - accuracy: 0.9918   - ETA: 0s - loss: 0.0236 - accuracy: 0.99 - ETA: 0s - loss: 0.0215 - accuracy: 0.99 - ETA: 0s - loss: 0.0220 - accuracy: 0.99 - ETA: 0s - loss: 0.0200 - accuracy: 0.99 - ETA: 0s - loss: 0.0198 - accuracy: 0.99 - 2s 95us/step - loss: 0.0250 - accuracy: 0.9923 - val_loss: 0.1360 - val_accuracy: 0.9603
Epoch 94/100
18068/18068 [==============================] - ETA: 0s - loss: 0.0579 - accuracy: 0.96 - ETA: 0s - loss: 0.0235 - accuracy: 0.99 - ETA: 0s - loss: 0.0210 - accuracy: 0.99 - ETA: 0s - loss: 0.0253 - accuracy: 0.99 - ETA: 0s - loss: 0.0286 - accuracy: 0.99 - ETA: 0s - loss: 0.0270 - accuracy: 0.99 - ETA: 0s - loss: 0.0251 - accuracy: 0.99 - 2s 97us/step - loss: 0.0238 - accuracy: 0.9920 - val_loss: 0.0203 - val_accuracy: 0.9939
Epoch 95/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0031 - accuracy: 1.00 - ETA: 0s - loss: 0.0116 - accuracy: 0.99 - ETA: 0s - loss: 0.0159 - accuracy: 0.99 - ETA: 0s - loss: 0.0166 - accuracy: 0.99 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0192 - accuracy: 0.99 - 2s 96us/step - loss: 0.0189 - accuracy: 0.9934 - val_loss: 0.0234 - val_accuracy: 0.9933
Epoch 96/100
18068/18068 [==============================] - ETA: 1s - loss: 1.9295e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0450 - accuracy: 0.9853   - ETA: 0s - loss: 0.0335 - accuracy: 0.98 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0320 - accuracy: 0.98 - ETA: 0s - loss: 0.0289 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - 2s 95us/step - loss: 0.0259 - accuracy: 0.9911 - val_loss: 0.0221 - val_accuracy: 0.9936
Epoch 97/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0011 - accuracy: 1.00 - ETA: 0s - loss: 0.0193 - accuracy: 0.99 - ETA: 0s - loss: 0.0295 - accuracy: 0.98 - ETA: 0s - loss: 0.0255 - accuracy: 0.99 - ETA: 0s - loss: 0.0218 - accuracy: 0.99 - ETA: 0s - loss: 0.0220 - accuracy: 0.99 - ETA: 0s - loss: 0.0205 - accuracy: 0.99 - 2s 96us/step - loss: 0.0215 - accuracy: 0.9926 - val_loss: 0.0838 - val_accuracy: 0.9744
Epoch 98/100
18068/18068 [==============================] - ETA: 1s - loss: 6.6428e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0324 - accuracy: 0.9899   - ETA: 0s - loss: 0.0235 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - 2s 94us/step - loss: 0.0280 - accuracy: 0.9908 - val_loss: 0.0490 - val_accuracy: 0.9835
Epoch 99/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0147 - accuracy: 1.00 - ETA: 0s - loss: 0.0185 - accuracy: 0.99 - ETA: 0s - loss: 0.0153 - accuracy: 0.99 - ETA: 0s - loss: 0.0169 - accuracy: 0.99 - ETA: 0s - loss: 0.0163 - accuracy: 0.99 - ETA: 0s - loss: 0.0156 - accuracy: 0.99 - ETA: 0s - loss: 0.0170 - accuracy: 0.99 - 2s 97us/step - loss: 0.0165 - accuracy: 0.9946 - val_loss: 0.0241 - val_accuracy: 0.9921
Epoch 100/100
18068/18068 [==============================] - ETA: 1s - loss: 0.0017 - accuracy: 1.00 - ETA: 0s - loss: 0.0244 - accuracy: 0.99 - ETA: 0s - loss: 0.0232 - accuracy: 0.99 - ETA: 0s - loss: 0.0245 - accuracy: 0.99 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - ETA: 0s - loss: 0.0238 - accuracy: 0.99 - ETA: 0s - loss: 0.0228 - accuracy: 0.99 - 2s 101us/step - loss: 0.0228 - accuracy: 0.9919 - val_loss: 0.0591 - val_accuracy: 0.9803
Initializing network.
100%|████████████████████████████████████████████████████████████████████████████████████████| 100/100 [00:00<?, ?it/s]
Round 1.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
  1%|▊                                                                                 | 1/100 [00:01<02:52,  1.74s/it]
  2%|█▋                                                                                | 2/100 [00:03<02:51,  1.75s/it]
  3%|██▍                                                                               | 3/100 [00:05<02:49,  1.75s/it]
  4%|███▎                                                                              | 4/100 [00:07<02:48,  1.75s/it]
  5%|████                                                                              | 5/100 [00:08<02:46,  1.75s/it]
  6%|████▉                                                                             | 6/100 [00:10<02:43,  1.74s/it]
  7%|█████▋                                                                            | 7/100 [00:12<02:42,  1.74s/it]
  8%|██████▌                                                                           | 8/100 [00:13<02:39,  1.74s/it]
  9%|███████▍                                                                          | 9/100 [00:15<02:36,  1.72s/it]
 10%|████████                                                                         | 10/100 [00:17<02:33,  1.70s/it]
 11%|████████▉                                                                        | 11/100 [00:18<02:30,  1.69s/it]
 12%|█████████▋                                                                       | 12/100 [00:20<02:28,  1.69s/it]
 13%|██████████▌                                                                      | 13/100 [00:22<02:25,  1.68s/it]
 14%|███████████▎                                                                     | 14/100 [00:23<02:24,  1.68s/it]
 15%|████████████▏                                                                    | 15/100 [00:25<02:22,  1.68s/it]
 16%|████████████▉                                                                    | 16/100 [00:27<02:20,  1.68s/it]
 17%|█████████████▊                                                                   | 17/100 [00:29<02:21,  1.70s/it]
 18%|██████████████▌                                                                  | 18/100 [00:30<02:18,  1.69s/it]
 19%|███████████████▍                                                                 | 19/100 [00:32<02:16,  1.68s/it]
 20%|████████████████▏                                                                | 20/100 [00:34<02:14,  1.68s/it]
 21%|█████████████████                                                                | 21/100 [00:35<02:12,  1.68s/it]
 22%|█████████████████▊                                                               | 22/100 [00:37<02:12,  1.69s/it]
 23%|██████████████████▋                                                              | 23/100 [00:39<02:09,  1.69s/it]
 24%|███████████████████▍                                                             | 24/100 [00:40<02:07,  1.68s/it]
 25%|████████████████████▎                                                            | 25/100 [00:42<02:05,  1.68s/it]
 26%|█████████████████████                                                            | 26/100 [00:44<02:04,  1.68s/it]
 27%|█████████████████████▊                                                           | 27/100 [00:45<02:02,  1.68s/it]
 28%|██████████████████████▋                                                          | 28/100 [00:47<02:00,  1.68s/it]
 29%|███████████████████████▍                                                         | 29/100 [00:49<01:59,  1.68s/it]
 30%|████████████████████████▎                                                        | 30/100 [00:50<01:57,  1.68s/it]
 31%|█████████████████████████                                                        | 31/100 [00:52<01:55,  1.68s/it]
 32%|█████████████████████████▉                                                       | 32/100 [00:54<01:53,  1.67s/it]
 33%|██████████████████████████▋                                                      | 33/100 [00:55<01:51,  1.67s/it]
 34%|███████████████████████████▌                                                     | 34/100 [00:57<01:50,  1.67s/it]
 35%|████████████████████████████▎                                                    | 35/100 [00:59<01:48,  1.67s/it]
 36%|█████████████████████████████▏                                                   | 36/100 [01:00<01:46,  1.67s/it]
 37%|█████████████████████████████▉                                                   | 37/100 [01:02<01:45,  1.67s/it]
 38%|██████████████████████████████▊                                                  | 38/100 [01:04<01:44,  1.68s/it]
 39%|███████████████████████████████▌                                                 | 39/100 [01:05<01:42,  1.68s/it]
 40%|████████████████████████████████▍                                                | 40/100 [01:07<01:40,  1.67s/it]
 41%|█████████████████████████████████▏                                               | 41/100 [01:09<01:38,  1.67s/it]
 42%|██████████████████████████████████                                               | 42/100 [01:10<01:36,  1.67s/it]
 43%|██████████████████████████████████▊                                              | 43/100 [01:12<01:35,  1.68s/it]
 44%|███████████████████████████████████▋                                             | 44/100 [01:14<01:33,  1.67s/it]
 45%|████████████████████████████████████▍                                            | 45/100 [01:15<01:32,  1.68s/it]
 46%|█████████████████████████████████████▎                                           | 46/100 [01:17<01:30,  1.67s/it]
 47%|██████████████████████████████████████                                           | 47/100 [01:19<01:28,  1.68s/it]
 48%|██████████████████████████████████████▉                                          | 48/100 [01:20<01:27,  1.67s/it]
 49%|███████████████████████████████████████▋                                         | 49/100 [01:22<01:25,  1.67s/it]
 50%|████████████████████████████████████████▌                                        | 50/100 [01:24<01:23,  1.67s/it]
 51%|█████████████████████████████████████████▎                                       | 51/100 [01:26<01:21,  1.67s/it]
 52%|██████████████████████████████████████████                                       | 52/100 [01:27<01:20,  1.67s/it]
 53%|██████████████████████████████████████████▉                                      | 53/100 [01:29<01:18,  1.68s/it]
 54%|███████████████████████████████████████████▋                                     | 54/100 [01:31<01:17,  1.68s/it]
 55%|████████████████████████████████████████████▌                                    | 55/100 [01:32<01:15,  1.67s/it]
 56%|█████████████████████████████████████████████▎                                   | 56/100 [01:34<01:13,  1.67s/it]
 57%|██████████████████████████████████████████████▏                                  | 57/100 [01:36<01:11,  1.67s/it]
 58%|██████████████████████████████████████████████▉                                  | 58/100 [01:37<01:10,  1.67s/it]
 59%|███████████████████████████████████████████████▊                                 | 59/100 [01:39<01:08,  1.68s/it]
 60%|████████████████████████████████████████████████▌                                | 60/100 [01:41<01:07,  1.68s/it]
 61%|█████████████████████████████████████████████████▍                               | 61/100 [01:42<01:05,  1.68s/it]
 62%|██████████████████████████████████████████████████▏                              | 62/100 [01:44<01:03,  1.67s/it]
 63%|███████████████████████████████████████████████████                              | 63/100 [01:46<01:02,  1.68s/it]
 64%|███████████████████████████████████████████████████▊                             | 64/100 [01:47<01:00,  1.67s/it]
 65%|████████████████████████████████████████████████████▋                            | 65/100 [01:49<00:58,  1.67s/it]
 66%|█████████████████████████████████████████████████████▍                           | 66/100 [01:51<00:56,  1.67s/it]
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [01:52<00:55,  1.69s/it]
 68%|███████████████████████████████████████████████████████                          | 68/100 [01:54<00:54,  1.69s/it]
 69%|███████████████████████████████████████████████████████▉                         | 69/100 [01:56<00:53,  1.72s/it]
 70%|████████████████████████████████████████████████████████▋                        | 70/100 [01:58<00:51,  1.72s/it]
 71%|█████████████████████████████████████████████████████████▌                       | 71/100 [01:59<00:49,  1.72s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:   12.0s finished
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  236
 72%|██████████████████████████████████████████████████████████▎                      | 72/100 [02:26<04:21,  9.34s/it]
 73%|███████████████████████████████████████████████████████████▏                     | 73/100 [02:28<03:10,  7.04s/it]
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [02:30<02:21,  5.43s/it]
 75%|████████████████████████████████████████████████████████████▊                    | 75/100 [02:31<01:47,  4.31s/it]
 76%|█████████████████████████████████████████████████████████████▌                   | 76/100 [02:33<01:24,  3.51s/it]
 77%|██████████████████████████████████████████████████████████████▎                  | 77/100 [02:35<01:08,  2.96s/it]
 78%|███████████████████████████████████████████████████████████████▏                 | 78/100 [02:36<00:56,  2.58s/it]
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [02:38<00:48,  2.30s/it]
 80%|████████████████████████████████████████████████████████████████▊                | 80/100 [02:40<00:42,  2.11s/it]
 81%|█████████████████████████████████████████████████████████████████▌               | 81/100 [02:41<00:37,  1.98s/it]
 82%|██████████████████████████████████████████████████████████████████▍              | 82/100 [02:43<00:33,  1.89s/it]
 83%|███████████████████████████████████████████████████████████████████▏             | 83/100 [02:45<00:30,  1.82s/it]
 84%|████████████████████████████████████████████████████████████████████             | 84/100 [02:46<00:28,  1.77s/it]
 85%|████████████████████████████████████████████████████████████████████▊            | 85/100 [02:48<00:26,  1.74s/it]
 86%|█████████████████████████████████████████████████████████████████████▋           | 86/100 [02:50<00:24,  1.72s/it]
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [02:51<00:22,  1.70s/it]
 88%|███████████████████████████████████████████████████████████████████████▎         | 88/100 [02:53<00:20,  1.70s/it]
 89%|████████████████████████████████████████████████████████████████████████         | 89/100 [02:55<00:18,  1.69s/it]
 90%|████████████████████████████████████████████████████████████████████████▉        | 90/100 [02:56<00:16,  1.69s/it]
 91%|█████████████████████████████████████████████████████████████████████████▋       | 91/100 [02:58<00:15,  1.68s/it]
 92%|██████████████████████████████████████████████████████████████████████████▌      | 92/100 [03:00<00:13,  1.68s/it]
 93%|███████████████████████████████████████████████████████████████████████████▎     | 93/100 [03:01<00:11,  1.68s/it]
 94%|████████████████████████████████████████████████████████████████████████████▏    | 94/100 [03:03<00:10,  1.67s/it]
 95%|████████████████████████████████████████████████████████████████████████████▉    | 95/100 [03:05<00:08,  1.67s/it]
 96%|█████████████████████████████████████████████████████████████████████████████▊   | 96/100 [03:06<00:06,  1.67s/it]
 97%|██████████████████████████████████████████████████████████████████████████████▌  | 97/100 [03:08<00:04,  1.67s/it]
 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [03:10<00:03,  1.66s/it]
 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [03:11<00:01,  1.67s/it]
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:13<00:00,  1.94s/it]
Global model loss: 0.19349391767446147; global model accuracy: 0.9243025779724121
Round 2.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
  1%|▊                                                                                 | 1/100 [00:01<02:43,  1.66s/it]
  2%|█▋                                                                                | 2/100 [00:03<02:42,  1.66s/it]
  3%|██▍                                                                               | 3/100 [00:04<02:40,  1.66s/it]
  4%|███▎                                                                              | 4/100 [00:06<02:39,  1.66s/it]
  5%|████                                                                              | 5/100 [00:08<02:37,  1.66s/it]
  6%|████▉                                                                             | 6/100 [00:09<02:36,  1.66s/it]
  7%|█████▋                                                                            | 7/100 [00:11<02:34,  1.66s/it]
  8%|██████▌                                                                           | 8/100 [00:13<02:32,  1.66s/it]
  9%|███████▍                                                                          | 9/100 [00:14<02:30,  1.65s/it]
 10%|████████                                                                         | 10/100 [00:16<02:28,  1.65s/it]
 11%|████████▉                                                                        | 11/100 [00:18<02:27,  1.66s/it]
 12%|█████████▋                                                                       | 12/100 [00:19<02:26,  1.66s/it]
 13%|██████████▌                                                                      | 13/100 [00:21<02:24,  1.66s/it]
 14%|███████████▎                                                                     | 14/100 [00:23<02:22,  1.66s/it]
 15%|████████████▏                                                                    | 15/100 [00:24<02:21,  1.66s/it]
 16%|████████████▉                                                                    | 16/100 [00:26<02:19,  1.66s/it]
 17%|█████████████▊                                                                   | 17/100 [00:28<02:18,  1.67s/it]
 18%|██████████████▌                                                                  | 18/100 [00:29<02:16,  1.67s/it]
 19%|███████████████▍                                                                 | 19/100 [00:31<02:14,  1.66s/it]
 20%|████████████████▏                                                                | 20/100 [00:33<02:13,  1.66s/it]
 21%|█████████████████                                                                | 21/100 [00:34<02:11,  1.66s/it]
 22%|█████████████████▊                                                               | 22/100 [00:36<02:09,  1.66s/it]
 23%|██████████████████▋                                                              | 23/100 [00:38<02:07,  1.66s/it]
 24%|███████████████████▍                                                             | 24/100 [00:39<02:05,  1.66s/it]
 25%|████████████████████▎                                                            | 25/100 [00:41<02:04,  1.66s/it]
 26%|█████████████████████                                                            | 26/100 [00:43<02:03,  1.66s/it]
 27%|█████████████████████▊                                                           | 27/100 [00:44<02:01,  1.66s/it]
 28%|██████████████████████▋                                                          | 28/100 [00:46<01:59,  1.66s/it]
 29%|███████████████████████▍                                                         | 29/100 [00:48<01:57,  1.66s/it]
 30%|████████████████████████▎                                                        | 30/100 [00:49<01:55,  1.66s/it]
 31%|█████████████████████████                                                        | 31/100 [00:51<01:54,  1.66s/it]
 32%|█████████████████████████▉                                                       | 32/100 [00:53<01:52,  1.65s/it]
 33%|██████████████████████████▋                                                      | 33/100 [00:54<01:50,  1.65s/it]
 34%|███████████████████████████▌                                                     | 34/100 [00:56<01:49,  1.65s/it]
 35%|████████████████████████████▎                                                    | 35/100 [00:58<01:47,  1.65s/it]
 36%|█████████████████████████████▏                                                   | 36/100 [00:59<01:46,  1.66s/it]
 37%|█████████████████████████████▉                                                   | 37/100 [01:01<01:44,  1.67s/it]
 38%|██████████████████████████████▊                                                  | 38/100 [01:03<01:43,  1.67s/it]
 39%|███████████████████████████████▌                                                 | 39/100 [01:04<01:42,  1.67s/it]
 40%|████████████████████████████████▍                                                | 40/100 [01:06<01:40,  1.67s/it]
 41%|█████████████████████████████████▏                                               | 41/100 [01:08<01:38,  1.66s/it]
 42%|██████████████████████████████████                                               | 42/100 [01:09<01:36,  1.66s/it]
 43%|██████████████████████████████████▊                                              | 43/100 [01:11<01:34,  1.65s/it]
 44%|███████████████████████████████████▋                                             | 44/100 [01:13<01:32,  1.65s/it]
 45%|████████████████████████████████████▍                                            | 45/100 [01:14<01:30,  1.65s/it]
 46%|█████████████████████████████████████▎                                           | 46/100 [01:16<01:29,  1.65s/it]
 47%|██████████████████████████████████████                                           | 47/100 [01:17<01:27,  1.65s/it]
 48%|██████████████████████████████████████▉                                          | 48/100 [01:19<01:26,  1.66s/it]
 49%|███████████████████████████████████████▋                                         | 49/100 [01:21<01:24,  1.66s/it]
 50%|████████████████████████████████████████▌                                        | 50/100 [01:22<01:23,  1.66s/it]
 51%|█████████████████████████████████████████▎                                       | 51/100 [01:24<01:21,  1.66s/it]
 52%|██████████████████████████████████████████                                       | 52/100 [01:26<01:19,  1.66s/it]
 53%|██████████████████████████████████████████▉                                      | 53/100 [01:27<01:18,  1.67s/it]
 54%|███████████████████████████████████████████▋                                     | 54/100 [01:29<01:16,  1.66s/it]
 55%|████████████████████████████████████████████▌                                    | 55/100 [01:31<01:14,  1.66s/it]
 56%|█████████████████████████████████████████████▎                                   | 56/100 [01:32<01:13,  1.66s/it]
 57%|██████████████████████████████████████████████▏                                  | 57/100 [01:34<01:11,  1.66s/it]
 58%|██████████████████████████████████████████████▉                                  | 58/100 [01:36<01:09,  1.66s/it]
 59%|███████████████████████████████████████████████▊                                 | 59/100 [01:37<01:07,  1.66s/it]
 60%|████████████████████████████████████████████████▌                                | 60/100 [01:39<01:06,  1.66s/it]
 61%|█████████████████████████████████████████████████▍                               | 61/100 [01:41<01:04,  1.66s/it]
 62%|██████████████████████████████████████████████████▏                              | 62/100 [01:42<01:03,  1.66s/it]
 63%|███████████████████████████████████████████████████                              | 63/100 [01:44<01:01,  1.66s/it]
 64%|███████████████████████████████████████████████████▊                             | 64/100 [01:46<00:59,  1.66s/it]
 65%|████████████████████████████████████████████████████▋                            | 65/100 [01:47<00:58,  1.66s/it]
 66%|█████████████████████████████████████████████████████▍                           | 66/100 [01:49<00:56,  1.66s/it]
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [01:51<00:54,  1.66s/it]
 68%|███████████████████████████████████████████████████████                          | 68/100 [01:52<00:52,  1.66s/it]
 69%|███████████████████████████████████████████████████████▉                         | 69/100 [01:54<00:51,  1.67s/it]
 70%|████████████████████████████████████████████████████████▋                        | 70/100 [01:56<00:49,  1.67s/it]
 71%|█████████████████████████████████████████████████████████▌                       | 71/100 [01:57<00:48,  1.67s/it]
 72%|██████████████████████████████████████████████████████████▎                      | 72/100 [01:59<00:46,  1.67s/it]
 73%|███████████████████████████████████████████████████████████▏                     | 73/100 [02:01<00:44,  1.66s/it]
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [02:02<00:43,  1.66s/it]
 75%|████████████████████████████████████████████████████████████▊                    | 75/100 [02:04<00:41,  1.66s/it]
 76%|█████████████████████████████████████████████████████████████▌                   | 76/100 [02:06<00:39,  1.66s/it]
 77%|██████████████████████████████████████████████████████████████▎                  | 77/100 [02:07<00:38,  1.66s/it]
 78%|███████████████████████████████████████████████████████████████▏                 | 78/100 [02:09<00:36,  1.66s/it]
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [02:11<00:34,  1.66s/it]
 80%|████████████████████████████████████████████████████████████████▊                | 80/100 [02:12<00:33,  1.66s/it]
 81%|█████████████████████████████████████████████████████████████████▌               | 81/100 [02:14<00:31,  1.66s/it]
 82%|██████████████████████████████████████████████████████████████████▍              | 82/100 [02:16<00:29,  1.66s/it]
 83%|███████████████████████████████████████████████████████████████████▏             | 83/100 [02:17<00:28,  1.66s/it]
 84%|████████████████████████████████████████████████████████████████████             | 84/100 [02:19<00:26,  1.66s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  1014
 85%|████████████████████████████████████████████████████████████████████▊            | 85/100 [03:13<04:22, 17.51s/it]
 86%|█████████████████████████████████████████████████████████████████████▋           | 86/100 [03:15<02:58, 12.75s/it]
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [03:17<02:02,  9.43s/it]
 88%|███████████████████████████████████████████████████████████████████████▎         | 88/100 [03:18<01:25,  7.10s/it]
 89%|████████████████████████████████████████████████████████████████████████         | 89/100 [03:20<01:00,  5.47s/it]
 90%|████████████████████████████████████████████████████████████████████████▉        | 90/100 [03:22<00:43,  4.32s/it]
 91%|█████████████████████████████████████████████████████████████████████████▋       | 91/100 [03:23<00:31,  3.52s/it]
 92%|██████████████████████████████████████████████████████████████████████████▌      | 92/100 [03:25<00:23,  2.96s/it]
 93%|███████████████████████████████████████████████████████████████████████████▎     | 93/100 [03:27<00:17,  2.57s/it]
 94%|████████████████████████████████████████████████████████████████████████████▏    | 94/100 [03:28<00:13,  2.29s/it]
 95%|████████████████████████████████████████████████████████████████████████████▉    | 95/100 [03:30<00:10,  2.10s/it]
 96%|█████████████████████████████████████████████████████████████████████████████▊   | 96/100 [03:32<00:07,  1.98s/it]
 97%|██████████████████████████████████████████████████████████████████████████████▌  | 97/100 [03:33<00:05,  1.88s/it]
 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [03:35<00:03,  1.81s/it]
 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [03:37<00:01,  1.76s/it]
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:38<00:00,  2.19s/it]
Global model loss: 0.056399353472181495; global model accuracy: 0.9811399579048157
Round 3.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
  1%|▊                                                                                 | 1/100 [00:01<02:45,  1.67s/it]
  2%|█▋                                                                                | 2/100 [00:03<02:43,  1.67s/it]
  3%|██▍                                                                               | 3/100 [00:04<02:41,  1.66s/it]
  4%|███▎                                                                              | 4/100 [00:06<02:39,  1.66s/it]
  5%|████                                                                              | 5/100 [00:08<02:37,  1.66s/it]
  6%|████▉                                                                             | 6/100 [00:09<02:35,  1.66s/it]
  7%|█████▋                                                                            | 7/100 [00:11<02:34,  1.66s/it]
  8%|██████▌                                                                           | 8/100 [00:13<02:32,  1.65s/it]
  9%|███████▍                                                                          | 9/100 [00:14<02:30,  1.65s/it]
 10%|████████                                                                         | 10/100 [00:16<02:28,  1.65s/it]
 11%|████████▉                                                                        | 11/100 [00:18<02:26,  1.65s/it]
 12%|█████████▋                                                                       | 12/100 [00:19<02:25,  1.66s/it]
 13%|██████████▌                                                                      | 13/100 [00:21<02:24,  1.66s/it]
 14%|███████████▎                                                                     | 14/100 [00:23<02:22,  1.66s/it]
 15%|████████████▏                                                                    | 15/100 [00:24<02:21,  1.66s/it]
 16%|████████████▉                                                                    | 16/100 [00:26<02:19,  1.66s/it]
 17%|█████████████▊                                                                   | 17/100 [00:28<02:17,  1.66s/it]
 18%|██████████████▌                                                                  | 18/100 [00:29<02:15,  1.66s/it]
 19%|███████████████▍                                                                 | 19/100 [00:31<02:14,  1.66s/it]
 20%|████████████████▏                                                                | 20/100 [00:33<02:13,  1.67s/it]
 21%|█████████████████                                                                | 21/100 [00:34<02:11,  1.66s/it]
 22%|█████████████████▊                                                               | 22/100 [00:36<02:09,  1.66s/it]
 23%|██████████████████▋                                                              | 23/100 [00:38<02:07,  1.66s/it]
 24%|███████████████████▍                                                             | 24/100 [00:39<02:06,  1.66s/it]
 25%|████████████████████▎                                                            | 25/100 [00:41<02:04,  1.66s/it]
 26%|█████████████████████                                                            | 26/100 [00:43<02:03,  1.66s/it]
 27%|█████████████████████▊                                                           | 27/100 [00:44<02:01,  1.66s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  230
 28%|██████████████████████▋                                                          | 28/100 [00:59<06:31,  5.44s/it]
 29%|███████████████████████▍                                                         | 29/100 [01:00<05:05,  4.30s/it]
 30%|████████████████████████▎                                                        | 30/100 [01:02<04:06,  3.51s/it]
 31%|█████████████████████████                                                        | 31/100 [01:04<03:23,  2.96s/it]
 32%|█████████████████████████▉                                                       | 32/100 [01:05<02:54,  2.57s/it]
 33%|██████████████████████████▋                                                      | 33/100 [01:07<02:33,  2.30s/it]
 34%|███████████████████████████▌                                                     | 34/100 [01:08<02:18,  2.10s/it]
 35%|████████████████████████████▎                                                    | 35/100 [01:10<02:07,  1.97s/it]
 36%|█████████████████████████████▏                                                   | 36/100 [01:12<02:00,  1.89s/it]
 37%|█████████████████████████████▉                                                   | 37/100 [01:14<01:54,  1.82s/it]
 38%|██████████████████████████████▊                                                  | 38/100 [01:15<01:49,  1.77s/it]
 39%|███████████████████████████████▌                                                 | 39/100 [01:17<01:45,  1.73s/it]
 40%|████████████████████████████████▍                                                | 40/100 [01:18<01:41,  1.70s/it]
 41%|█████████████████████████████████▏                                               | 41/100 [01:20<01:39,  1.69s/it]
 42%|██████████████████████████████████                                               | 42/100 [01:22<01:36,  1.67s/it]
 43%|██████████████████████████████████▊                                              | 43/100 [01:23<01:34,  1.66s/it]
 44%|███████████████████████████████████▋                                             | 44/100 [01:25<01:32,  1.66s/it]
 45%|████████████████████████████████████▍                                            | 45/100 [01:27<01:32,  1.68s/it]
 46%|█████████████████████████████████████▎                                           | 46/100 [01:28<01:30,  1.67s/it]
 47%|██████████████████████████████████████                                           | 47/100 [01:30<01:28,  1.67s/it]
 48%|██████████████████████████████████████▉                                          | 48/100 [01:32<01:26,  1.66s/it]
 49%|███████████████████████████████████████▋                                         | 49/100 [01:33<01:24,  1.65s/it]
 50%|████████████████████████████████████████▌                                        | 50/100 [01:35<01:22,  1.65s/it]
 51%|█████████████████████████████████████████▎                                       | 51/100 [01:37<01:21,  1.66s/it]
 52%|██████████████████████████████████████████                                       | 52/100 [01:38<01:19,  1.65s/it]
 53%|██████████████████████████████████████████▉                                      | 53/100 [01:40<01:17,  1.65s/it]
 54%|███████████████████████████████████████████▋                                     | 54/100 [01:42<01:15,  1.65s/it]
 55%|████████████████████████████████████████████▌                                    | 55/100 [01:43<01:14,  1.65s/it]
 56%|█████████████████████████████████████████████▎                                   | 56/100 [01:45<01:12,  1.65s/it]
 57%|██████████████████████████████████████████████▏                                  | 57/100 [01:46<01:10,  1.64s/it]
 58%|██████████████████████████████████████████████▉                                  | 58/100 [01:48<01:08,  1.64s/it]
 59%|███████████████████████████████████████████████▊                                 | 59/100 [01:50<01:07,  1.64s/it]
 60%|████████████████████████████████████████████████▌                                | 60/100 [01:51<01:05,  1.64s/it]
 61%|█████████████████████████████████████████████████▍                               | 61/100 [01:53<01:04,  1.65s/it]
 62%|██████████████████████████████████████████████████▏                              | 62/100 [01:55<01:02,  1.65s/it]
 63%|███████████████████████████████████████████████████                              | 63/100 [01:56<01:00,  1.64s/it]
 64%|███████████████████████████████████████████████████▊                             | 64/100 [01:58<00:59,  1.64s/it]
 65%|████████████████████████████████████████████████████▋                            | 65/100 [02:00<00:57,  1.64s/it]
 66%|█████████████████████████████████████████████████████▍                           | 66/100 [02:01<00:55,  1.64s/it]
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [02:03<00:54,  1.64s/it]
 68%|███████████████████████████████████████████████████████                          | 68/100 [02:05<00:52,  1.64s/it]
 69%|███████████████████████████████████████████████████████▉                         | 69/100 [02:06<00:50,  1.64s/it]
 70%|████████████████████████████████████████████████████████▋                        | 70/100 [02:08<00:49,  1.65s/it]
 71%|█████████████████████████████████████████████████████████▌                       | 71/100 [02:10<00:47,  1.65s/it]
 72%|██████████████████████████████████████████████████████████▎                      | 72/100 [02:11<00:46,  1.65s/it]
 73%|███████████████████████████████████████████████████████████▏                     | 73/100 [02:13<00:44,  1.64s/it]
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [02:14<00:42,  1.64s/it]
 75%|████████████████████████████████████████████████████████████▊                    | 75/100 [02:16<00:41,  1.65s/it]
 76%|█████████████████████████████████████████████████████████████▌                   | 76/100 [02:18<00:39,  1.65s/it]
 77%|██████████████████████████████████████████████████████████████▎                  | 77/100 [02:19<00:37,  1.64s/it]
 78%|███████████████████████████████████████████████████████████████▏                 | 78/100 [02:21<00:36,  1.65s/it]
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [02:23<00:34,  1.65s/it]
 80%|████████████████████████████████████████████████████████████████▊                | 80/100 [02:24<00:32,  1.65s/it]
 81%|█████████████████████████████████████████████████████████████████▌               | 81/100 [02:26<00:31,  1.65s/it]
 82%|██████████████████████████████████████████████████████████████████▍              | 82/100 [02:28<00:29,  1.65s/it]
 83%|███████████████████████████████████████████████████████████████████▏             | 83/100 [02:29<00:27,  1.65s/it]
 84%|████████████████████████████████████████████████████████████████████             | 84/100 [02:31<00:26,  1.64s/it]
 85%|████████████████████████████████████████████████████████████████████▊            | 85/100 [02:33<00:24,  1.65s/it]
 86%|█████████████████████████████████████████████████████████████████████▋           | 86/100 [02:34<00:23,  1.65s/it]
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [02:36<00:21,  1.65s/it]
 88%|███████████████████████████████████████████████████████████████████████▎         | 88/100 [02:38<00:19,  1.65s/it]
 89%|████████████████████████████████████████████████████████████████████████         | 89/100 [02:39<00:18,  1.65s/it]
 90%|████████████████████████████████████████████████████████████████████████▉        | 90/100 [02:41<00:16,  1.65s/it]
 91%|█████████████████████████████████████████████████████████████████████████▋       | 91/100 [02:42<00:14,  1.65s/it]
 92%|██████████████████████████████████████████████████████████████████████████▌      | 92/100 [02:44<00:13,  1.65s/it]
 93%|███████████████████████████████████████████████████████████████████████████▎     | 93/100 [02:46<00:11,  1.65s/it]
 94%|████████████████████████████████████████████████████████████████████████████▏    | 94/100 [02:47<00:09,  1.65s/it]
 95%|████████████████████████████████████████████████████████████████████████████▉    | 95/100 [02:49<00:08,  1.65s/it]
 96%|█████████████████████████████████████████████████████████████████████████████▊   | 96/100 [02:51<00:06,  1.65s/it]
 97%|██████████████████████████████████████████████████████████████████████████████▌  | 97/100 [02:52<00:04,  1.65s/it]
 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [02:54<00:03,  1.65s/it]
 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [02:56<00:01,  1.65s/it]
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [02:57<00:00,  1.78s/it]
Global model loss: 0.08008340218102815; global model accuracy: 0.9698019027709961
Round 4.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  391
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [00:22<00:00,  4.48it/s]
Global model loss: 0.805769821976991; global model accuracy: 0.7652534246444702
Round 5.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  2932
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [02:28<00:00,  1.49s/it]
Global model loss: 2.228477299086053; global model accuracy: 0.646946370601654
Round 6.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  4490
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:46<00:00,  2.26s/it]
Global model loss: 4.57180075867313; global model accuracy: 0.5873737335205078
Round 7.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  5231
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [04:32<00:00,  2.72s/it]
Global model loss: 7.691132786578953; global model accuracy: 0.5873737335205078
Round 8.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
  1%|▊                                                                                 | 1/100 [00:01<02:57,  1.79s/it]
  2%|█▋                                                                                | 2/100 [00:03<02:55,  1.79s/it]
  3%|██▍                                                                               | 3/100 [00:05<02:53,  1.78s/it]
  4%|███▎                                                                              | 4/100 [00:07<02:52,  1.80s/it]
  5%|████                                                                              | 5/100 [00:08<02:50,  1.79s/it]
  6%|████▉                                                                             | 6/100 [00:10<02:47,  1.78s/it]
  7%|█████▋                                                                            | 7/100 [00:12<02:44,  1.77s/it]
  8%|██████▌                                                                           | 8/100 [00:14<02:42,  1.76s/it]
  9%|███████▍                                                                          | 9/100 [00:15<02:40,  1.77s/it]
 10%|████████                                                                         | 10/100 [00:17<02:38,  1.77s/it]
 11%|████████▉                                                                        | 11/100 [00:19<02:37,  1.77s/it]
 12%|█████████▋                                                                       | 12/100 [00:21<02:36,  1.77s/it]
 13%|██████████▌                                                                      | 13/100 [00:23<02:34,  1.77s/it]
 14%|███████████▎                                                                     | 14/100 [00:24<02:32,  1.78s/it]
 15%|████████████▏                                                                    | 15/100 [00:26<02:30,  1.77s/it]
 16%|████████████▉                                                                    | 16/100 [00:28<02:29,  1.77s/it]
 17%|█████████████▊                                                                   | 17/100 [00:30<02:27,  1.77s/it]
 18%|██████████████▌                                                                  | 18/100 [00:31<02:25,  1.77s/it]
 19%|███████████████▍                                                                 | 19/100 [00:33<02:22,  1.76s/it]
 20%|████████████████▏                                                                | 20/100 [00:35<02:20,  1.76s/it]
 21%|█████████████████                                                                | 21/100 [00:37<02:20,  1.77s/it]
 22%|█████████████████▊                                                               | 22/100 [00:39<02:19,  1.79s/it]
 23%|██████████████████▋                                                              | 23/100 [00:40<02:17,  1.78s/it]
 24%|███████████████████▍                                                             | 24/100 [00:42<02:15,  1.78s/it]
 25%|████████████████████▎                                                            | 25/100 [00:44<02:13,  1.77s/it]
 26%|█████████████████████                                                            | 26/100 [00:46<02:10,  1.77s/it]
 27%|█████████████████████▊                                                           | 27/100 [00:47<02:08,  1.76s/it]
 28%|██████████████████████▋                                                          | 28/100 [00:49<02:06,  1.76s/it]
 29%|███████████████████████▍                                                         | 29/100 [00:51<02:04,  1.76s/it]
 30%|████████████████████████▎                                                        | 30/100 [00:53<02:03,  1.76s/it]
 31%|█████████████████████████                                                        | 31/100 [00:54<02:02,  1.77s/it]
 32%|█████████████████████████▉                                                       | 32/100 [00:56<02:00,  1.77s/it]
 33%|██████████████████████████▋                                                      | 33/100 [00:58<01:57,  1.76s/it]
 34%|███████████████████████████▌                                                     | 34/100 [01:00<01:56,  1.77s/it]
 35%|████████████████████████████▎                                                    | 35/100 [01:01<01:54,  1.76s/it]
 36%|█████████████████████████████▏                                                   | 36/100 [01:03<01:52,  1.76s/it]
 37%|█████████████████████████████▉                                                   | 37/100 [01:05<01:50,  1.76s/it]
 38%|██████████████████████████████▊                                                  | 38/100 [01:07<01:48,  1.75s/it]
 39%|███████████████████████████████▌                                                 | 39/100 [01:08<01:46,  1.75s/it]
 40%|████████████████████████████████▍                                                | 40/100 [01:10<01:45,  1.75s/it]
 41%|█████████████████████████████████▏                                               | 41/100 [01:12<01:43,  1.75s/it]
 42%|██████████████████████████████████                                               | 42/100 [01:14<01:42,  1.78s/it]
 43%|██████████████████████████████████▊                                              | 43/100 [01:16<01:41,  1.77s/it]
 44%|███████████████████████████████████▋                                             | 44/100 [01:17<01:39,  1.77s/it]
 45%|████████████████████████████████████▍                                            | 45/100 [01:19<01:36,  1.76s/it]
 46%|█████████████████████████████████████▎                                           | 46/100 [01:21<01:35,  1.76s/it]
 47%|██████████████████████████████████████                                           | 47/100 [01:23<01:33,  1.76s/it]
 48%|██████████████████████████████████████▉                                          | 48/100 [01:24<01:31,  1.76s/it]
 49%|███████████████████████████████████████▋                                         | 49/100 [01:26<01:29,  1.76s/it]
 50%|████████████████████████████████████████▌                                        | 50/100 [01:28<01:27,  1.76s/it]
 51%|█████████████████████████████████████████▎                                       | 51/100 [01:30<01:26,  1.76s/it]
 52%|██████████████████████████████████████████                                       | 52/100 [01:31<01:24,  1.76s/it]
 53%|██████████████████████████████████████████▉                                      | 53/100 [01:33<01:22,  1.76s/it]
 54%|███████████████████████████████████████████▋                                     | 54/100 [01:35<01:21,  1.77s/it]
 55%|████████████████████████████████████████████▌                                    | 55/100 [01:37<01:19,  1.77s/it]
 56%|█████████████████████████████████████████████▎                                   | 56/100 [01:38<01:17,  1.77s/it]
 57%|██████████████████████████████████████████████▏                                  | 57/100 [01:40<01:15,  1.76s/it]
 58%|██████████████████████████████████████████████▉                                  | 58/100 [01:42<01:13,  1.76s/it]
 59%|███████████████████████████████████████████████▊                                 | 59/100 [01:44<01:12,  1.76s/it]
 60%|████████████████████████████████████████████████▌                                | 60/100 [01:46<01:10,  1.76s/it]
 61%|█████████████████████████████████████████████████▍                               | 61/100 [01:47<01:08,  1.77s/it]
 62%|██████████████████████████████████████████████████▏                              | 62/100 [01:49<01:07,  1.77s/it]
 63%|███████████████████████████████████████████████████                              | 63/100 [01:51<01:05,  1.77s/it]
 64%|███████████████████████████████████████████████████▊                             | 64/100 [01:53<01:03,  1.77s/it]
 65%|████████████████████████████████████████████████████▋                            | 65/100 [01:54<01:02,  1.78s/it]
 66%|█████████████████████████████████████████████████████▍                           | 66/100 [01:56<01:00,  1.77s/it]
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [01:58<00:57,  1.76s/it]
 68%|███████████████████████████████████████████████████████                          | 68/100 [02:00<00:55,  1.75s/it]
 69%|███████████████████████████████████████████████████████▉                         | 69/100 [02:01<00:54,  1.75s/it]
 70%|████████████████████████████████████████████████████████▋                        | 70/100 [02:03<00:52,  1.75s/it]
 71%|█████████████████████████████████████████████████████████▌                       | 71/100 [02:05<00:50,  1.75s/it]
 72%|██████████████████████████████████████████████████████████▎                      | 72/100 [02:07<00:49,  1.75s/it]
 73%|███████████████████████████████████████████████████████████▏                     | 73/100 [02:08<00:47,  1.75s/it]
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [02:10<00:45,  1.74s/it]
 75%|████████████████████████████████████████████████████████████▊                    | 75/100 [02:12<00:43,  1.75s/it]
 76%|█████████████████████████████████████████████████████████████▌                   | 76/100 [02:14<00:41,  1.75s/it]
 77%|██████████████████████████████████████████████████████████████▎                  | 77/100 [02:15<00:39,  1.71s/it]
 78%|███████████████████████████████████████████████████████████████▏                 | 78/100 [02:17<00:37,  1.69s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  5199
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [06:44<28:29, 81.39s/it]
 80%|████████████████████████████████████████████████████████████████▊                | 80/100 [06:46<19:09, 57.49s/it]
 81%|█████████████████████████████████████████████████████████████████▌               | 81/100 [06:48<12:54, 40.74s/it]
 82%|██████████████████████████████████████████████████████████████████▍              | 82/100 [06:49<08:42, 29.02s/it]
 83%|███████████████████████████████████████████████████████████████████▏             | 83/100 [06:51<05:53, 20.82s/it]
 84%|████████████████████████████████████████████████████████████████████             | 84/100 [06:53<04:01, 15.08s/it]
 85%|████████████████████████████████████████████████████████████████████▊            | 85/100 [06:54<02:45, 11.05s/it]
 86%|█████████████████████████████████████████████████████████████████████▋           | 86/100 [06:56<01:55,  8.24s/it]
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [06:58<01:21,  6.27s/it]
 88%|███████████████████████████████████████████████████████████████████████▎         | 88/100 [06:59<00:58,  4.89s/it]
 89%|████████████████████████████████████████████████████████████████████████         | 89/100 [07:01<00:43,  3.93s/it]
 90%|████████████████████████████████████████████████████████████████████████▉        | 90/100 [07:03<00:32,  3.24s/it]
 91%|█████████████████████████████████████████████████████████████████████████▋       | 91/100 [07:04<00:24,  2.77s/it]
 92%|██████████████████████████████████████████████████████████████████████████▌      | 92/100 [07:06<00:19,  2.44s/it]
 93%|███████████████████████████████████████████████████████████████████████████▎     | 93/100 [07:08<00:15,  2.21s/it]
 94%|████████████████████████████████████████████████████████████████████████████▏    | 94/100 [07:09<00:12,  2.05s/it]
 95%|████████████████████████████████████████████████████████████████████████████▉    | 95/100 [07:11<00:09,  1.94s/it]
 96%|█████████████████████████████████████████████████████████████████████████████▊   | 96/100 [07:13<00:07,  1.86s/it]
 97%|██████████████████████████████████████████████████████████████████████████████▌  | 97/100 [07:14<00:05,  1.81s/it]
 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [07:16<00:03,  1.77s/it]
 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [07:18<00:01,  1.74s/it]
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [07:19<00:00,  4.40s/it]
Global model loss: 1.8344421140176637; global model accuracy: 0.7665107846260071
Round 9.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
  1%|▊                                                                                 | 1/100 [00:01<02:56,  1.78s/it]
  2%|█▋                                                                                | 2/100 [00:03<02:55,  1.79s/it]
  3%|██▍                                                                               | 3/100 [00:05<02:54,  1.80s/it]
  4%|███▎                                                                              | 4/100 [00:07<02:52,  1.79s/it]
  5%|████                                                                              | 5/100 [00:08<02:50,  1.79s/it]
  6%|████▉                                                                             | 6/100 [00:10<02:48,  1.79s/it]
  7%|█████▋                                                                            | 7/100 [00:12<02:45,  1.78s/it]
  8%|██████▌                                                                           | 8/100 [00:14<02:40,  1.75s/it]
  9%|███████▍                                                                          | 9/100 [00:15<02:37,  1.73s/it]
 10%|████████                                                                         | 10/100 [00:17<02:34,  1.72s/it]
 11%|████████▉                                                                        | 11/100 [00:19<02:32,  1.71s/it]
 12%|█████████▋                                                                       | 12/100 [00:21<02:31,  1.72s/it]
 13%|██████████▌                                                                      | 13/100 [00:22<02:28,  1.71s/it]
 14%|███████████▎                                                                     | 14/100 [00:24<02:27,  1.71s/it]
 15%|████████████▏                                                                    | 15/100 [00:26<02:25,  1.71s/it]
 16%|████████████▉                                                                    | 16/100 [00:27<02:21,  1.69s/it]
 17%|█████████████▊                                                                   | 17/100 [00:29<02:19,  1.69s/it]
 18%|██████████████▌                                                                  | 18/100 [00:31<02:17,  1.68s/it]
 19%|███████████████▍                                                                 | 19/100 [00:32<02:16,  1.69s/it]
 20%|████████████████▏                                                                | 20/100 [00:34<02:16,  1.71s/it]
 21%|█████████████████                                                                | 21/100 [00:36<02:17,  1.73s/it]
 22%|█████████████████▊                                                               | 22/100 [00:38<02:16,  1.75s/it]
 23%|██████████████████▋                                                              | 23/100 [00:39<02:15,  1.76s/it]
 24%|███████████████████▍                                                             | 24/100 [00:41<02:14,  1.77s/it]
 25%|████████████████████▎                                                            | 25/100 [00:43<02:12,  1.77s/it]
 26%|█████████████████████                                                            | 26/100 [00:45<02:08,  1.74s/it]
 27%|█████████████████████▊                                                           | 27/100 [00:46<02:05,  1.72s/it]
 28%|██████████████████████▋                                                          | 28/100 [00:48<02:02,  1.70s/it]
 29%|███████████████████████▍                                                         | 29/100 [00:50<01:59,  1.69s/it]
 30%|████████████████████████▎                                                        | 30/100 [00:51<01:57,  1.69s/it]
 31%|█████████████████████████                                                        | 31/100 [00:53<01:56,  1.69s/it]
 32%|█████████████████████████▉                                                       | 32/100 [00:55<01:54,  1.69s/it]
 33%|██████████████████████████▋                                                      | 33/100 [00:56<01:52,  1.68s/it]
 34%|███████████████████████████▌                                                     | 34/100 [00:58<01:50,  1.68s/it]
 35%|████████████████████████████▎                                                    | 35/100 [01:00<01:48,  1.68s/it]
 36%|█████████████████████████████▏                                                   | 36/100 [01:01<01:47,  1.67s/it]
 37%|█████████████████████████████▉                                                   | 37/100 [01:03<01:45,  1.67s/it]
 38%|██████████████████████████████▊                                                  | 38/100 [01:05<01:43,  1.67s/it]
 39%|███████████████████████████████▌                                                 | 39/100 [01:06<01:41,  1.67s/it]
 40%|████████████████████████████████▍                                                | 40/100 [01:08<01:39,  1.66s/it]
 41%|█████████████████████████████████▏                                               | 41/100 [01:10<01:38,  1.66s/it]
 42%|██████████████████████████████████                                               | 42/100 [01:11<01:35,  1.65s/it]
 43%|██████████████████████████████████▊                                              | 43/100 [01:13<01:34,  1.66s/it]
 44%|███████████████████████████████████▋                                             | 44/100 [01:15<01:32,  1.66s/it]
 45%|████████████████████████████████████▍                                            | 45/100 [01:16<01:31,  1.66s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.3s finished
the number of miss classified sampels is  2984
 46%|█████████████████████████████████████▎                                           | 46/100 [03:53<43:26, 48.27s/it]
 47%|██████████████████████████████████████                                           | 47/100 [03:55<30:17, 34.29s/it]
 48%|██████████████████████████████████████▉                                          | 48/100 [03:57<21:14, 24.50s/it]
 49%|███████████████████████████████████████▋                                         | 49/100 [03:58<15:00, 17.65s/it]
 50%|████████████████████████████████████████▌                                        | 50/100 [04:00<10:42, 12.86s/it]
 51%|█████████████████████████████████████████▎                                       | 51/100 [04:02<07:45,  9.50s/it]
 52%|██████████████████████████████████████████                                       | 52/100 [04:03<05:43,  7.16s/it]
 53%|██████████████████████████████████████████▉                                      | 53/100 [04:05<04:19,  5.51s/it]
 54%|███████████████████████████████████████████▋                                     | 54/100 [04:07<03:20,  4.36s/it]
 55%|████████████████████████████████████████████▌                                    | 55/100 [04:08<02:39,  3.55s/it]
 56%|█████████████████████████████████████████████▎                                   | 56/100 [04:10<02:11,  2.99s/it]
 57%|██████████████████████████████████████████████▏                                  | 57/100 [04:12<01:51,  2.59s/it]
 58%|██████████████████████████████████████████████▉                                  | 58/100 [04:13<01:37,  2.32s/it]
 59%|███████████████████████████████████████████████▊                                 | 59/100 [04:15<01:27,  2.12s/it]
 60%|████████████████████████████████████████████████▌                                | 60/100 [04:17<01:19,  1.99s/it]
 61%|█████████████████████████████████████████████████▍                               | 61/100 [04:18<01:13,  1.90s/it]
 62%|██████████████████████████████████████████████████▏                              | 62/100 [04:20<01:09,  1.83s/it]
 63%|███████████████████████████████████████████████████                              | 63/100 [04:22<01:04,  1.75s/it]
 64%|███████████████████████████████████████████████████▊                             | 64/100 [04:23<01:01,  1.71s/it]
 65%|████████████████████████████████████████████████████▋                            | 65/100 [04:25<00:58,  1.68s/it]
 66%|█████████████████████████████████████████████████████▍                           | 66/100 [04:27<00:56,  1.66s/it]
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [04:28<00:54,  1.64s/it]
 68%|███████████████████████████████████████████████████████                          | 68/100 [04:30<00:52,  1.63s/it]
 69%|███████████████████████████████████████████████████████▉                         | 69/100 [04:31<00:50,  1.62s/it]
 70%|████████████████████████████████████████████████████████▋                        | 70/100 [04:33<00:48,  1.61s/it]
 71%|█████████████████████████████████████████████████████████▌                       | 71/100 [04:34<00:46,  1.60s/it]
 72%|██████████████████████████████████████████████████████████▎                      | 72/100 [04:36<00:44,  1.60s/it]
 73%|███████████████████████████████████████████████████████████▏                     | 73/100 [04:38<00:43,  1.60s/it]
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [04:39<00:41,  1.60s/it]
 75%|████████████████████████████████████████████████████████████▊                    | 75/100 [04:41<00:40,  1.61s/it]
 76%|█████████████████████████████████████████████████████████████▌                   | 76/100 [04:43<00:38,  1.61s/it]
 77%|██████████████████████████████████████████████████████████████▎                  | 77/100 [04:44<00:36,  1.61s/it]
 78%|███████████████████████████████████████████████████████████████▏                 | 78/100 [04:46<00:35,  1.60s/it]
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [04:47<00:33,  1.60s/it]
 80%|████████████████████████████████████████████████████████████████▊                | 80/100 [04:49<00:31,  1.59s/it]
 81%|█████████████████████████████████████████████████████████████████▌               | 81/100 [04:50<00:30,  1.60s/it]
 82%|██████████████████████████████████████████████████████████████████▍              | 82/100 [04:52<00:28,  1.61s/it]
 83%|███████████████████████████████████████████████████████████████████▏             | 83/100 [04:54<00:27,  1.61s/it]
 84%|████████████████████████████████████████████████████████████████████             | 84/100 [04:55<00:25,  1.61s/it]
 85%|████████████████████████████████████████████████████████████████████▊            | 85/100 [04:57<00:24,  1.61s/it]
 86%|█████████████████████████████████████████████████████████████████████▋           | 86/100 [04:59<00:22,  1.60s/it]
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [05:00<00:20,  1.59s/it]
 88%|███████████████████████████████████████████████████████████████████████▎         | 88/100 [05:02<00:19,  1.59s/it]
 89%|████████████████████████████████████████████████████████████████████████         | 89/100 [05:03<00:17,  1.59s/it]
 90%|████████████████████████████████████████████████████████████████████████▉        | 90/100 [05:05<00:15,  1.59s/it]
 91%|█████████████████████████████████████████████████████████████████████████▋       | 91/100 [05:06<00:14,  1.59s/it]
 92%|██████████████████████████████████████████████████████████████████████████▌      | 92/100 [05:08<00:12,  1.59s/it]
 93%|███████████████████████████████████████████████████████████████████████████▎     | 93/100 [05:10<00:11,  1.60s/it]
 94%|████████████████████████████████████████████████████████████████████████████▏    | 94/100 [05:11<00:09,  1.60s/it]
 95%|████████████████████████████████████████████████████████████████████████████▉    | 95/100 [05:13<00:08,  1.60s/it]
 96%|█████████████████████████████████████████████████████████████████████████████▊   | 96/100 [05:14<00:06,  1.60s/it]
 97%|██████████████████████████████████████████████████████████████████████████████▌  | 97/100 [05:16<00:04,  1.60s/it]
 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [05:18<00:03,  1.60s/it]
 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [05:19<00:01,  1.60s/it]
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [05:21<00:00,  3.21s/it]
Global model loss: 3.510807255450716; global model accuracy: 0.5873957872390747
Round 10.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
  1%|▊                                                                                 | 1/100 [00:01<02:50,  1.72s/it]
  2%|█▋                                                                                | 2/100 [00:03<02:49,  1.73s/it]
  3%|██▍                                                                               | 3/100 [00:05<02:47,  1.72s/it]
  4%|███▎                                                                              | 4/100 [00:06<02:45,  1.73s/it]
  5%|████                                                                              | 5/100 [00:08<02:43,  1.72s/it]
  6%|████▉                                                                             | 6/100 [00:10<02:40,  1.70s/it]
  7%|█████▋                                                                            | 7/100 [00:11<02:38,  1.70s/it]
  8%|██████▌                                                                           | 8/100 [00:13<02:35,  1.69s/it]
  9%|███████▍                                                                          | 9/100 [00:15<02:34,  1.70s/it]
 10%|████████                                                                         | 10/100 [00:17<02:33,  1.71s/it]
 11%|████████▉                                                                        | 11/100 [00:18<02:32,  1.72s/it]
 12%|█████████▋                                                                       | 12/100 [00:20<02:30,  1.71s/it]
 13%|██████████▌                                                                      | 13/100 [00:22<02:29,  1.72s/it]
 14%|███████████▎                                                                     | 14/100 [00:23<02:28,  1.72s/it]
 15%|████████████▏                                                                    | 15/100 [00:25<02:25,  1.72s/it]
 16%|████████████▉                                                                    | 16/100 [00:27<02:24,  1.72s/it]
 17%|█████████████▊                                                                   | 17/100 [00:29<02:23,  1.73s/it]
 18%|██████████████▌                                                                  | 18/100 [00:30<02:21,  1.73s/it]
 19%|███████████████▍                                                                 | 19/100 [00:32<02:19,  1.72s/it]
 20%|████████████████▏                                                                | 20/100 [00:34<02:16,  1.71s/it]
 21%|█████████████████                                                                | 21/100 [00:35<02:14,  1.71s/it]
 22%|█████████████████▊                                                               | 22/100 [00:37<02:12,  1.70s/it]
 23%|██████████████████▋                                                              | 23/100 [00:39<02:10,  1.70s/it]
 24%|███████████████████▍                                                             | 24/100 [00:41<02:09,  1.70s/it]
 25%|████████████████████▎                                                            | 25/100 [00:42<02:07,  1.71s/it]
 26%|█████████████████████                                                            | 26/100 [00:44<02:05,  1.70s/it]
 27%|█████████████████████▊                                                           | 27/100 [00:46<02:04,  1.70s/it]
 28%|██████████████████████▋                                                          | 28/100 [00:47<02:02,  1.70s/it]
 29%|███████████████████████▍                                                         | 29/100 [00:49<02:00,  1.69s/it]
 30%|████████████████████████▎                                                        | 30/100 [00:51<01:59,  1.71s/it]
 31%|█████████████████████████                                                        | 31/100 [00:53<01:57,  1.71s/it]
 32%|█████████████████████████▉                                                       | 32/100 [00:54<01:56,  1.71s/it]
 33%|██████████████████████████▋                                                      | 33/100 [00:56<01:54,  1.71s/it]
 34%|███████████████████████████▌                                                     | 34/100 [00:58<01:52,  1.70s/it]
 35%|████████████████████████████▎                                                    | 35/100 [00:59<01:50,  1.69s/it]
 36%|█████████████████████████████▏                                                   | 36/100 [01:01<01:48,  1.69s/it]
 37%|█████████████████████████████▉                                                   | 37/100 [01:03<01:46,  1.70s/it]
 38%|██████████████████████████████▊                                                  | 38/100 [01:04<01:44,  1.69s/it]
 39%|███████████████████████████████▌                                                 | 39/100 [01:06<01:43,  1.70s/it]
 40%|████████████████████████████████▍                                                | 40/100 [01:08<01:42,  1.70s/it]
 41%|█████████████████████████████████▏                                               | 41/100 [01:09<01:40,  1.70s/it]
 42%|██████████████████████████████████                                               | 42/100 [01:11<01:38,  1.69s/it]
 43%|██████████████████████████████████▊                                              | 43/100 [01:13<01:36,  1.69s/it]
 44%|███████████████████████████████████▋                                             | 44/100 [01:15<01:34,  1.69s/it]
 45%|████████████████████████████████████▍                                            | 45/100 [01:16<01:33,  1.70s/it]
 46%|█████████████████████████████████████▎                                           | 46/100 [01:18<01:31,  1.70s/it]
 47%|██████████████████████████████████████                                           | 47/100 [01:20<01:30,  1.70s/it]
 48%|██████████████████████████████████████▉                                          | 48/100 [01:21<01:28,  1.70s/it]
 49%|███████████████████████████████████████▋                                         | 49/100 [01:23<01:26,  1.70s/it]
 50%|████████████████████████████████████████▌                                        | 50/100 [01:25<01:24,  1.69s/it]
 51%|█████████████████████████████████████████▎                                       | 51/100 [01:26<01:22,  1.69s/it]
 52%|██████████████████████████████████████████                                       | 52/100 [01:28<01:20,  1.69s/it]
 53%|██████████████████████████████████████████▉                                      | 53/100 [01:30<01:19,  1.69s/it]
 54%|███████████████████████████████████████████▋                                     | 54/100 [01:32<01:18,  1.70s/it]
 55%|████████████████████████████████████████████▌                                    | 55/100 [01:33<01:16,  1.69s/it]
 56%|█████████████████████████████████████████████▎                                   | 56/100 [01:35<01:14,  1.70s/it]
 57%|██████████████████████████████████████████████▏                                  | 57/100 [01:37<01:13,  1.71s/it]
 58%|██████████████████████████████████████████████▉                                  | 58/100 [01:38<01:12,  1.72s/it]
 59%|███████████████████████████████████████████████▊                                 | 59/100 [01:40<01:10,  1.72s/it]
 60%|████████████████████████████████████████████████▌                                | 60/100 [01:42<01:08,  1.72s/it]
 61%|█████████████████████████████████████████████████▍                               | 61/100 [01:44<01:06,  1.72s/it]
 62%|██████████████████████████████████████████████████▏                              | 62/100 [01:45<01:04,  1.71s/it]
 63%|███████████████████████████████████████████████████                              | 63/100 [01:47<01:03,  1.71s/it]
 64%|███████████████████████████████████████████████████▊                             | 64/100 [01:49<01:02,  1.74s/it]
 65%|████████████████████████████████████████████████████▋                            | 65/100 [01:50<01:00,  1.73s/it]
 66%|█████████████████████████████████████████████████████▍                           | 66/100 [01:52<00:58,  1.73s/it]
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [01:54<00:56,  1.72s/it]
 68%|███████████████████████████████████████████████████████                          | 68/100 [01:56<00:55,  1.72s/it]
 69%|███████████████████████████████████████████████████████▉                         | 69/100 [01:57<00:53,  1.73s/it]
 70%|████████████████████████████████████████████████████████▋                        | 70/100 [01:59<00:51,  1.73s/it]
 71%|█████████████████████████████████████████████████████████▌                       | 71/100 [02:01<00:50,  1.73s/it]
 72%|██████████████████████████████████████████████████████████▎                      | 72/100 [02:03<00:48,  1.72s/it]
 73%|███████████████████████████████████████████████████████████▏                     | 73/100 [02:04<00:46,  1.73s/it]
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [02:06<00:44,  1.71s/it]
 75%|████████████████████████████████████████████████████████████▊                    | 75/100 [02:08<00:42,  1.72s/it]
 76%|█████████████████████████████████████████████████████████████▌                   | 76/100 [02:09<00:41,  1.72s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.2s finished
the number of miss classified sampels is  5261
 77%|██████████████████████████████████████████████████████████████▎                  | 77/100 [06:34<30:50, 80.47s/it]
 78%|███████████████████████████████████████████████████████████████▏                 | 78/100 [06:35<20:50, 56.85s/it]
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [06:37<14:06, 40.30s/it]
 80%|████████████████████████████████████████████████████████████████▊                | 80/100 [06:39<09:34, 28.72s/it]
 81%|█████████████████████████████████████████████████████████████████▌               | 81/100 [06:40<06:31, 20.61s/it]
 82%|██████████████████████████████████████████████████████████████████▍              | 82/100 [06:42<04:29, 14.95s/it]
 83%|███████████████████████████████████████████████████████████████████▏             | 83/100 [06:44<03:06, 10.97s/it]
 84%|████████████████████████████████████████████████████████████████████             | 84/100 [06:46<02:11,  8.19s/it]
 85%|████████████████████████████████████████████████████████████████████▊            | 85/100 [06:47<01:33,  6.24s/it]
 86%|█████████████████████████████████████████████████████████████████████▋           | 86/100 [06:49<01:08,  4.88s/it]
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [06:51<00:50,  3.92s/it]
 88%|███████████████████████████████████████████████████████████████████████▎         | 88/100 [06:52<00:39,  3.26s/it]
 89%|████████████████████████████████████████████████████████████████████████         | 89/100 [06:54<00:30,  2.80s/it]
 90%|████████████████████████████████████████████████████████████████████████▉        | 90/100 [06:56<00:24,  2.46s/it]
 91%|█████████████████████████████████████████████████████████████████████████▋       | 91/100 [06:57<00:20,  2.25s/it]
 92%|██████████████████████████████████████████████████████████████████████████▌      | 92/100 [06:59<00:16,  2.11s/it]
 93%|███████████████████████████████████████████████████████████████████████████▎     | 93/100 [07:01<00:14,  2.02s/it]
 94%|████████████████████████████████████████████████████████████████████████████▏    | 94/100 [07:03<00:11,  1.93s/it]
 95%|████████████████████████████████████████████████████████████████████████████▉    | 95/100 [07:05<00:09,  1.87s/it]
 96%|█████████████████████████████████████████████████████████████████████████████▊   | 96/100 [07:06<00:07,  1.82s/it]
 97%|██████████████████████████████████████████████████████████████████████████████▌  | 97/100 [07:08<00:05,  1.78s/it]
 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [07:10<00:03,  1.75s/it]
 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [07:11<00:01,  1.73s/it]
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [07:13<00:00,  4.34s/it]
Global model loss: 0.8507050739322711; global model accuracy: 0.7916574478149414
In [74]:
peers_selected
Out[74]:
[50]
In [75]:
FI_dic1
Out[75]:
{0: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 1: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 2: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 3: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 4: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 5: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 6: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 7: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 8: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803]),
 9: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
        0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
        0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
        0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
        0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
        0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
        0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
        0.14058599, 0.02268127, 0.00775547, 0.02748803])}
In [76]:
FI_dic1[9]
Out[76]:
array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
       0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
       0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
       0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
       0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
       0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
       0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
       0.14058599, 0.02268127, 0.00775547, 0.02748803])
In [77]:
FI_dic1[9]
Out[77]:
array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,
       0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,
       0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,
       0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,
       0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,
       0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,
       0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,
       0.14058599, 0.02268127, 0.00775547, 0.02748803])
In [78]:
z=FI_dic1[9].max(axis = 0)
In [79]:
z
Out[79]:
0.14358194388079848
In [80]:
names = ['temp_hand','acceleration_16_x_hand',
        'acceleration_16_y_hand','acceleration_16_z_hand','acceleration_6_x_hand',
        'acceleration_6_y_hand','acceleration_6_z_hand','gyroscope_x_hand','gyroscope_y_hand',
        'gyroscope_z_hand','magnetometer_x_hand','magnetometer_y_hand','magnetometer_z_hand',
        'temp_chest','acceleration_16_x_chest','acceleration_16_y_chest','acceleration_16_z_chest','acceleration_6_x_chest',
        'acceleration_6_y_chest','acceleration_6_z_chest','gyroscope_x_chest','gyroscope_y_chest','gyroscope_z_chest',
        'magnetometer_x_chest','magnetometer_y_chest','magnetometer_z_chest','temp_ankle','acceleration_16_x_ankle',
        'acceleration_16_y_ankle','acceleration_16_z_ankle','acceleration_6_x_ankle','acceleration_6_y_ankle',
        'acceleration_6_z_ankle','gyroscope_x_ankle','gyroscope_y_ankle','gyroscope_z_ankle','magnetometer_x_ankle',
        'magnetometer_y_ankle','magnetometer_z_ankle']
In [81]:
sort_index = np.argsort(FI_dic1[9])
In [82]:
for x in sort_index:
    print(names[x], ', ', FI_dic1[9][x])
gyroscope_x_hand ,  0.001339578209529386
acceleration_6_y_hand ,  0.001606558887748289
acceleration_16_y_hand ,  0.0018787909132091745
acceleration_16_x_chest ,  0.0022176706422468036
acceleration_16_z_ankle ,  0.0022480971625748777
acceleration_6_z_ankle ,  0.0023913839855701775
acceleration_6_x_chest ,  0.002403746179862327
gyroscope_y_hand ,  0.0025097802088887384
gyroscope_x_chest ,  0.0029898374334842324
gyroscope_y_chest ,  0.0034150962097039238
magnetometer_z_hand ,  0.003869476254612703
temp_ankle ,  0.004529408747298203
acceleration_6_y_ankle ,  0.006439416466931228
acceleration_16_y_ankle ,  0.006578886906287925
magnetometer_y_ankle ,  0.007755468044512387
magnetometer_y_hand ,  0.008436874146099699
gyroscope_z_hand ,  0.00889068295023216
acceleration_16_z_hand ,  0.009767340493272414
gyroscope_z_chest ,  0.010422927880039217
temp_chest ,  0.011298379110955319
acceleration_6_z_hand ,  0.011953481433716266
magnetometer_x_chest ,  0.012790397877654609
magnetometer_y_chest ,  0.015686044856015324
acceleration_16_x_hand ,  0.021516610095884795
magnetometer_x_ankle ,  0.022681269695119605
temp_hand ,  0.02392711201465511
acceleration_6_x_hand ,  0.02580065752957123
magnetometer_z_ankle ,  0.027488030896204812
acceleration_6_z_chest ,  0.03199341773861375
acceleration_16_z_chest ,  0.03231549779754809
gyroscope_y_ankle ,  0.03591957265048234
acceleration_16_y_chest ,  0.03796888825062477
acceleration_6_y_chest ,  0.038467231682065194
magnetometer_x_hand ,  0.05876695927124097
gyroscope_x_ankle ,  0.06767148823051926
acceleration_16_x_ankle ,  0.07049980505959738
acceleration_6_x_ankle ,  0.07939620457238372
gyroscope_z_ankle ,  0.1405859856342449
magnetometer_z_chest ,  0.14358194388079848
In [83]:
len(FI_dic1[9])
Out[83]:
39
In [84]:
len(names)
Out[84]:
39
In [ ]: