Explainable-Federated-Learn.../Attack detection via randon...

190 KiB
Raw Permalink Blame History

In [1]:
#IMPORTS

import numpy as np
import random
import tensorflow as tf
import tensorflow.keras as kr
import tensorflow.keras.backend as K
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense
from tensorflow.keras.datasets import mnist
import os
import csv

from scipy.spatial.distance import euclidean
from sklearn.metrics import confusion_matrix

from time import sleep
from tqdm import tqdm

import copy
import numpy
from sklearn.datasets import make_classification
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
import matplotlib.pyplot as plt
import math
import seaborn as sns
from numpy.random import RandomState
import scipy as scp
from sklearn.model_selection import train_test_split
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder, LabelEncoder
from keras.models import Sequential
from keras.layers import Dense
from keras import optimizers
from keras.callbacks import EarlyStopping,ModelCheckpoint
from keras.utils import to_categorical
from keras import backend as K
from itertools import product
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score
from sklearn.metrics import roc_auc_score
from sklearn.metrics import confusion_matrix

from sklearn import mixture

from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
%matplotlib inline
Using TensorFlow backend.
In [2]:
# Enter here the data set you want to explain (adult, activity, or synthatic)

data_set = 'activity'

# Enter here the numb er of peers you want in the experiments

n_peers = 100

# Enter here the type of the attack (Byzantine, poisoning, label_flipping)
attack_type = 'Byzantine'

# the targeted features in case the attack is poisoning attack 
feature_attacked = [3,5,8]

# Enter here the number of attacker peers you want, keep the number of attacker less that 1/2 of the n_peers
number_attackers = 3

# enter here the Number of global training epochs, the start and ending epochs of the attacks
n_rounds = 10
start_attack_round = 3
end_attack_round = 7

# the threshold for attack detection

alpha = 1.2

beta = 1/4
In [3]:
# the random state we will use in the experiments. It can be changed 

rs = RandomState(92)
In [4]:
# preprocessing adults data set

if data_set == 'adult':
    #Load dataset into a pandas DataFrame
    adult_data = pd.read_csv('adult_data.csv', na_values='?')
    # Drop all records with missing values
    adult_data.dropna(inplace=True)
    adult_data.reset_index(drop=True, inplace=True)

    # Drop fnlwgt, not interesting for ML
    adult_data.drop('fnlwgt', axis=1, inplace=True)
    adult_data.drop('education', axis=1, inplace=True)

#     merging some similar features.
    adult_data['marital-status'].replace('Married-civ-spouse', 'Married', inplace=True)
    adult_data['marital-status'].replace('Divorced', 'Unmarried', inplace=True)
    adult_data['marital-status'].replace('Never-married', 'Unmarried', inplace=True)
    adult_data['marital-status'].replace('Separated', 'Unmarried', inplace=True)
    adult_data['marital-status'].replace('Widowed', 'Unmarried', inplace=True)
    adult_data['marital-status'].replace('Married-spouse-absent', 'Married', inplace=True)
    adult_data['marital-status'].replace('Married-AF-spouse', 'Married', inplace=True)
    
    adult_data = pd.concat([adult_data,pd.get_dummies(adult_data['income'], prefix='income')],axis=1)
    adult_data.drop('income', axis=1, inplace=True)
    obj_columns = adult_data.select_dtypes(['object']).columns
    adult_data[obj_columns] = adult_data[obj_columns].astype('category')
    # Convert numerics to floats and normalize
    num_columns = adult_data.select_dtypes(['int64']).columns
    adult_data[num_columns] = adult_data[num_columns].astype('float64')
    for c in num_columns:
        #adult[c] -= adult[c].mean()
        #adult[c] /= adult[c].std()
         adult_data[c] = (adult_data[c] - adult_data[c].min()) / (adult_data[c].max()-adult_data[c].min())
    # 'workclass', 'marital-status', 'occupation', 'relationship' ,'race', 'gender', 'native-country'
    # adult_data['income'] = adult_data['income'].cat.codes
    adult_data['marital-status'] = adult_data['marital-status'].cat.codes
    adult_data['occupation'] = adult_data['occupation'].cat.codes
    adult_data['relationship'] = adult_data['relationship'].cat.codes
    adult_data['race'] = adult_data['race'].cat.codes
    adult_data['gender'] = adult_data['gender'].cat.codes
    adult_data['native-country'] = adult_data['native-country'].cat.codes
    adult_data['workclass'] = adult_data['workclass'].cat.codes

    num_columns = adult_data.select_dtypes(['int8']).columns
    adult_data[num_columns] = adult_data[num_columns].astype('float64')
    for c in num_columns:
        #adult[c] -= adult[c].mean()
        #adult[c] /= adult[c].std()
         adult_data[c] = (adult_data[c] - adult_data[c].min()) / (adult_data[c].max()-adult_data[c].min())
    display(adult_data.info())
    display(adult_data.head(10))
    
    adult_data = adult_data.to_numpy()
    
#     splite the data to train and test datasets
    X_train, X_test, y_train, y_test = train_test_split(adult_data[:,:-2],adult_data[:,-2:], test_size=0.07, random_state=rs)
#     the names of the features
    names = ['age','workclass','educational-num','marital-status','occupation',
         'relationship','race','gender','capital-gain','capital-loss','hours-per-week','native-country']
    Features_number = len(X_train[0])
In [5]:
if data_set == 'synthatic':
    #generate the data
    X, y = make_classification(n_samples=1000000, n_features=10, n_redundant=3, n_repeated=2, #n_classes=3, 
                           n_informative=5, n_clusters_per_class=4, 
                           random_state=42)
    y = pd.DataFrame(data=y, columns=["y"])
    y = pd.get_dummies(y['y'], prefix='y')
    y = y.to_numpy()
    X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=0.07, random_state=rs)
    #     the names of the features
    names = ['X(0)','X(1)','X(2)','X(3)','X(4)','X(5)','X(6)','X(7)','X(8)','X(9)']
    Features_number = len(X_train[0])
In [6]:
if data_set == 'activity':
    #Load dataset into a pandas DataFrame
    activity = pd.read_csv("activity_3_original.csv", sep=',')
#      drop some features that have non value in the majority of the samples
    to_drop = ['subject', 'timestamp', 'heart_rate','activityID']
    activity.drop(axis=1, columns=to_drop, inplace=True)
#     prepare the truth
    activity = pd.concat([activity,pd.get_dummies(activity['motion'], prefix='motion')],axis=1)
    activity.drop('motion', axis=1, inplace=True)
    class_label = [ 'motion_n', 'motion_y']
    predictors = [a for a in activity.columns.values if a not in class_label]

    for p in predictors:
        activity[p].fillna(activity[p].mean(), inplace=True)

    display(predictors)
    for p in predictors:
        activity[p] = (activity[p]-activity[p].min()) / (activity[p].max() - activity[p].min())
        activity[p].astype('float32')
    activity = activity.to_numpy()
    X_train, X_test, y_train, y_test = train_test_split(activity[:,:-2],activity[:,-2:], test_size=0.07, random_state=rs)
    #     the names of the features
    names = ['temp_hand','acceleration_16_x_hand',
        'acceleration_16_y_hand','acceleration_16_z_hand','acceleration_6_x_hand',
        'acceleration_6_y_hand','acceleration_6_z_hand','gyroscope_x_hand','gyroscope_y_hand',
        'gyroscope_z_hand','magnetometer_x_hand','magnetometer_y_hand','magnetometer_z_hand',
        'temp_chest','acceleration_16_x_chest','acceleration_16_y_chest','acceleration_16_z_chest','acceleration_6_x_chest',
        'acceleration_6_y_chest','acceleration_6_z_chest','gyroscope_x_chest','gyroscope_y_chest','gyroscope_z_chest',
        'magnetometer_x_chest','magnetometer_y_chest','magnetometer_z_chest','temp_ankle','acceleration_16_x_ankle',
        'acceleration_16_y_ankle','acceleration_16_z_ankle','acceleration_6_x_ankle','acceleration_6_y_ankle',
        'acceleration_6_z_ankle','gyroscope_x_ankle','gyroscope_y_ankle','gyroscope_z_ankle','magnetometer_x_ankle',
        'magnetometer_y_ankle','magnetometer_z_ankle']
    Features_number = len(X_train[0])
['temp_hand',
 'acceleration_16_x_hand',
 'acceleration_16_y_hand',
 'acceleration_16_z_hand',
 'acceleration_6_x_hand',
 'acceleration_6_y_hand',
 'acceleration_6_z_hand',
 'gyroscope_x_hand',
 'gyroscope_y_hand',
 'gyroscope_z_hand',
 'magnetometer_x_hand',
 'magnetometer_y_hand',
 'magnetometer_z_hand',
 'temp_chest',
 'acceleration_16_x_chest',
 'acceleration_16_y_chest',
 'acceleration_16_z_chest',
 'acceleration_6_x_chest',
 'acceleration_6_y_chest',
 'acceleration_6_z_chest',
 'gyroscope_x_chest',
 'gyroscope_y_chest',
 'gyroscope_z_chest',
 'magnetometer_x_chest',
 'magnetometer_y_chest',
 'magnetometer_z_chest',
 'temp_ankle',
 'acceleration_16_x_ankle',
 'acceleration_16_y_ankle',
 'acceleration_16_z_ankle',
 'acceleration_6_x_ankle',
 'acceleration_6_y_ankle',
 'acceleration_6_z_ankle',
 'gyroscope_x_ankle',
 'gyroscope_y_ankle',
 'gyroscope_z_ankle',
 'magnetometer_x_ankle',
 'magnetometer_y_ankle',
 'magnetometer_z_ankle']
In [7]:
#begin federated

earlystopping = EarlyStopping(monitor = 'val_loss',
                              min_delta = 0.01,
                              patience = 50,
                              verbose = 0,
                              baseline = 2,
                              restore_best_weights = True)

checkpoint = ModelCheckpoint('test.h8',
                             monitor='val_loss',
                             mode='min',
                             save_best_only=True,
                             verbose=0)
    
model = Sequential()
model.add(Dense(70, input_dim=Features_number, activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(2, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
history = model.fit(X_train, y_train,
epochs=2,
validation_data=(X_test, y_test),
callbacks = [checkpoint, earlystopping],
shuffle=True)
Train on 1806870 samples, validate on 136002 samples
Epoch 1/2
 225888/1806870 [==>...........................] - ETA: 3:38:09 - loss: 0.6677 - accuracy: 0.718 - ETA: 6:38 - loss: 0.6753 - accuracy: 0.5789  - ETA: 4:21 - loss: 0.6609 - accuracy: 0.58 - ETA: 3:24 - loss: 0.6429 - accuracy: 0.61 - ETA: 2:57 - loss: 0.6137 - accuracy: 0.64 - ETA: 2:38 - loss: 0.5775 - accuracy: 0.68 - ETA: 2:22 - loss: 0.5492 - accuracy: 0.70 - ETA: 2:10 - loss: 0.5185 - accuracy: 0.72 - ETA: 2:04 - loss: 0.4951 - accuracy: 0.74 - ETA: 1:58 - loss: 0.4725 - accuracy: 0.76 - ETA: 1:52 - loss: 0.4519 - accuracy: 0.77 - ETA: 1:48 - loss: 0.4357 - accuracy: 0.78 - ETA: 1:46 - loss: 0.4264 - accuracy: 0.79 - ETA: 1:42 - loss: 0.4195 - accuracy: 0.79 - ETA: 1:42 - loss: 0.4110 - accuracy: 0.80 - ETA: 1:39 - loss: 0.4011 - accuracy: 0.80 - ETA: 1:38 - loss: 0.3931 - accuracy: 0.81 - ETA: 1:36 - loss: 0.3829 - accuracy: 0.82 - ETA: 1:34 - loss: 0.3757 - accuracy: 0.82 - ETA: 1:32 - loss: 0.3680 - accuracy: 0.82 - ETA: 1:31 - loss: 0.3609 - accuracy: 0.83 - ETA: 1:30 - loss: 0.3570 - accuracy: 0.83 - ETA: 1:28 - loss: 0.3509 - accuracy: 0.83 - ETA: 1:27 - loss: 0.3470 - accuracy: 0.84 - ETA: 1:26 - loss: 0.3417 - accuracy: 0.84 - ETA: 1:26 - loss: 0.3383 - accuracy: 0.84 - ETA: 1:25 - loss: 0.3346 - accuracy: 0.84 - ETA: 1:24 - loss: 0.3299 - accuracy: 0.85 - ETA: 1:23 - loss: 0.3249 - accuracy: 0.85 - ETA: 1:22 - loss: 0.3214 - accuracy: 0.85 - ETA: 1:22 - loss: 0.3169 - accuracy: 0.85 - ETA: 1:21 - loss: 0.3134 - accuracy: 0.86 - ETA: 1:20 - loss: 0.3120 - accuracy: 0.86 - ETA: 1:20 - loss: 0.3086 - accuracy: 0.86 - ETA: 1:19 - loss: 0.3042 - accuracy: 0.86 - ETA: 1:20 - loss: 0.3019 - accuracy: 0.86 - ETA: 1:20 - loss: 0.2993 - accuracy: 0.86 - ETA: 1:20 - loss: 0.2957 - accuracy: 0.86 - ETA: 1:20 - loss: 0.2940 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2916 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2897 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2882 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2866 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2841 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2828 - accuracy: 0.87 - ETA: 1:18 - loss: 0.2797 - accuracy: 0.87 - ETA: 1:18 - loss: 0.2772 - accuracy: 0.87 - ETA: 1:18 - loss: 0.2758 - accuracy: 0.88 - ETA: 1:18 - loss: 0.2737 - accuracy: 0.88 - ETA: 1:17 - loss: 0.2716 - accuracy: 0.88 - ETA: 1:17 - loss: 0.2696 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2688 - accuracy: 0.88 - ETA: 1:17 - loss: 0.2675 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2657 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2637 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2624 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2611 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2590 - accuracy: 0.88 - ETA: 1:15 - loss: 0.2577 - accuracy: 0.88 - ETA: 1:15 - loss: 0.2557 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2545 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2532 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2513 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2499 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2492 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2482 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2465 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2449 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2439 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2433 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2422 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2412 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2402 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2389 - accuracy: 0.89 - ETA: 1:12 - loss: 0.2375 - accuracy: 0.89 - ETA: 1:12 - loss: 0.2363 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2352 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2346 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2340 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2327 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2323 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2313 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2300 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2290 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2281 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2273 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2265 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2258 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2246 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2236 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2227 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2220 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2210 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2203 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2195 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2187 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2181 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2176 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2170 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2167 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2156 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2147 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2141 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2133 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2123 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2111 - accuracy: 0.91 - ETA: 1:09 - loss: 0.2104 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2097 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2093 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2089 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2083 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2078 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2076 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2073 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2068 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2063 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2056 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2050 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2043 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2038 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2030 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2024 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2016 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2012 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2006 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2001 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1995 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1990 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1985 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1980 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1976 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1973 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1968 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1965 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1959 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1954 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1954 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1948 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1943 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1939 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1937 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1930 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1924 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1917 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1911 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1906 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1900 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1892 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1889 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1884 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1878 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1873 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1871 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1869 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1866 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1862 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1859 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1856 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1852 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1847 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1844 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1843 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1840 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1834 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1827 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1823 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1819 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1815 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1811 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1807 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1802 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1796 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1791 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1787 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1785 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1781 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1778 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1774 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1768 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1764 - accuracy: 0.92 - ETA: 1:06 - loss: 0.1761 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1758 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1757 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1752 - accuracy: 0.9305
 466080/1806870 [======>.......................] - ETA: 1:07 - loss: 0.1747 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1745 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1744 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1740 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1735 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1733 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1731 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1727 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1721 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1717 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1713 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1709 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1707 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1705 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1702 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1699 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1698 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1695 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1694 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1693 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1690 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1687 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1685 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1683 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1682 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1679 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1677 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1674 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1671 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1667 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1666 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1664 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1662 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1660 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1658 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1655 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1654 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1653 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1652 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1649 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1648 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1647 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1645 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1641 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1638 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1632 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1628 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1625 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1622 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1619 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1616 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1613 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1611 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1611 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1609 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1606 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1603 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1601 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1599 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1597 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1594 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1591 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1588 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1586 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1584 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1579 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1576 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1572 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1571 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1570 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1567 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1564 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1561 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1558 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1556 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1553 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1551 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1550 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1548 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1546 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1544 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1542 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1541 - accuracy: 0.94 - ETA: 1:03 - loss: 0.1538 - accuracy: 0.94 - ETA: 1:03 - loss: 0.1536 - accuracy: 0.94 - ETA: 1:03 - loss: 0.1533 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1529 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1527 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1524 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1523 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1521 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1519 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1517 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1513 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1511 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1508 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1507 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1505 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1503 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1501 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1498 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1496 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1495 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1493 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1492 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1490 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1488 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1486 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1484 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1482 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1479 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1478 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1475 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1473 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1472 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1469 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1467 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1466 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1463 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1460 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1457 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1455 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1451 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1449 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1451 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1449 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1447 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1445 - accuracy: 0.94 - ETA: 59s - loss: 0.1444 - accuracy: 0.9444 - ETA: 59s - loss: 0.1441 - accuracy: 0.944 - ETA: 59s - loss: 0.1440 - accuracy: 0.944 - ETA: 59s - loss: 0.1438 - accuracy: 0.944 - ETA: 59s - loss: 0.1438 - accuracy: 0.944 - ETA: 59s - loss: 0.1436 - accuracy: 0.944 - ETA: 59s - loss: 0.1434 - accuracy: 0.944 - ETA: 59s - loss: 0.1433 - accuracy: 0.944 - ETA: 59s - loss: 0.1431 - accuracy: 0.945 - ETA: 59s - loss: 0.1428 - accuracy: 0.945 - ETA: 59s - loss: 0.1426 - accuracy: 0.945 - ETA: 58s - loss: 0.1423 - accuracy: 0.945 - ETA: 58s - loss: 0.1420 - accuracy: 0.945 - ETA: 58s - loss: 0.1418 - accuracy: 0.945 - ETA: 58s - loss: 0.1415 - accuracy: 0.945 - ETA: 58s - loss: 0.1412 - accuracy: 0.945 - ETA: 58s - loss: 0.1412 - accuracy: 0.945 - ETA: 58s - loss: 0.1410 - accuracy: 0.945 - ETA: 58s - loss: 0.1408 - accuracy: 0.946 - ETA: 58s - loss: 0.1406 - accuracy: 0.946 - ETA: 58s - loss: 0.1404 - accuracy: 0.946 - ETA: 57s - loss: 0.1402 - accuracy: 0.946 - ETA: 57s - loss: 0.1400 - accuracy: 0.946 - ETA: 57s - loss: 0.1399 - accuracy: 0.946 - ETA: 57s - loss: 0.1397 - accuracy: 0.946 - ETA: 57s - loss: 0.1397 - accuracy: 0.946 - ETA: 57s - loss: 0.1396 - accuracy: 0.946 - ETA: 57s - loss: 0.1393 - accuracy: 0.946 - ETA: 57s - loss: 0.1392 - accuracy: 0.946 - ETA: 57s - loss: 0.1390 - accuracy: 0.946 - ETA: 57s - loss: 0.1388 - accuracy: 0.946 - ETA: 57s - loss: 0.1386 - accuracy: 0.946 - ETA: 56s - loss: 0.1385 - accuracy: 0.947 - ETA: 56s - loss: 0.1383 - accuracy: 0.947 - ETA: 56s - loss: 0.1382 - accuracy: 0.947 - ETA: 56s - loss: 0.1381 - accuracy: 0.947 - ETA: 56s - loss: 0.1379 - accuracy: 0.947 - ETA: 56s - loss: 0.1378 - accuracy: 0.947 - ETA: 56s - loss: 0.1376 - accuracy: 0.947 - ETA: 56s - loss: 0.1375 - accuracy: 0.947 - ETA: 56s - loss: 0.1374 - accuracy: 0.947 - ETA: 56s - loss: 0.1373 - accuracy: 0.947 - ETA: 55s - loss: 0.1371 - accuracy: 0.947 - ETA: 55s - loss: 0.1370 - accuracy: 0.947 - ETA: 55s - loss: 0.1368 - accuracy: 0.947 - ETA: 55s - loss: 0.1366 - accuracy: 0.947 - ETA: 55s - loss: 0.1364 - accuracy: 0.947 - ETA: 55s - loss: 0.1362 - accuracy: 0.948 - ETA: 55s - loss: 0.1360 - accuracy: 0.948 - ETA: 55s - loss: 0.1357 - accuracy: 0.948 - ETA: 55s - loss: 0.1355 - accuracy: 0.948 - ETA: 55s - loss: 0.1355 - accuracy: 0.948 - ETA: 55s - loss: 0.1353 - accuracy: 0.948 - ETA: 55s - loss: 0.1350 - accuracy: 0.948 - ETA: 55s - loss: 0.1349 - accuracy: 0.948 - ETA: 54s - loss: 0.1349 - accuracy: 0.948 - ETA: 54s - loss: 0.1347 - accuracy: 0.948 - ETA: 54s - loss: 0.1347 - accuracy: 0.9487
 743456/1806870 [===========>..................] - ETA: 54s - loss: 0.1345 - accuracy: 0.948 - ETA: 54s - loss: 0.1343 - accuracy: 0.948 - ETA: 54s - loss: 0.1342 - accuracy: 0.948 - ETA: 54s - loss: 0.1340 - accuracy: 0.949 - ETA: 54s - loss: 0.1338 - accuracy: 0.949 - ETA: 54s - loss: 0.1337 - accuracy: 0.949 - ETA: 54s - loss: 0.1334 - accuracy: 0.949 - ETA: 54s - loss: 0.1333 - accuracy: 0.949 - ETA: 54s - loss: 0.1331 - accuracy: 0.949 - ETA: 54s - loss: 0.1329 - accuracy: 0.949 - ETA: 54s - loss: 0.1327 - accuracy: 0.949 - ETA: 53s - loss: 0.1324 - accuracy: 0.949 - ETA: 53s - loss: 0.1323 - accuracy: 0.949 - ETA: 53s - loss: 0.1323 - accuracy: 0.949 - ETA: 53s - loss: 0.1321 - accuracy: 0.949 - ETA: 53s - loss: 0.1320 - accuracy: 0.949 - ETA: 53s - loss: 0.1318 - accuracy: 0.949 - ETA: 53s - loss: 0.1317 - accuracy: 0.950 - ETA: 53s - loss: 0.1315 - accuracy: 0.950 - ETA: 53s - loss: 0.1313 - accuracy: 0.950 - ETA: 53s - loss: 0.1311 - accuracy: 0.950 - ETA: 53s - loss: 0.1310 - accuracy: 0.950 - ETA: 52s - loss: 0.1308 - accuracy: 0.950 - ETA: 52s - loss: 0.1307 - accuracy: 0.950 - ETA: 52s - loss: 0.1305 - accuracy: 0.950 - ETA: 52s - loss: 0.1303 - accuracy: 0.950 - ETA: 52s - loss: 0.1301 - accuracy: 0.950 - ETA: 52s - loss: 0.1299 - accuracy: 0.950 - ETA: 52s - loss: 0.1298 - accuracy: 0.950 - ETA: 52s - loss: 0.1296 - accuracy: 0.950 - ETA: 52s - loss: 0.1295 - accuracy: 0.950 - ETA: 52s - loss: 0.1294 - accuracy: 0.951 - ETA: 52s - loss: 0.1292 - accuracy: 0.951 - ETA: 52s - loss: 0.1290 - accuracy: 0.951 - ETA: 51s - loss: 0.1289 - accuracy: 0.951 - ETA: 51s - loss: 0.1287 - accuracy: 0.951 - ETA: 51s - loss: 0.1286 - accuracy: 0.951 - ETA: 51s - loss: 0.1285 - accuracy: 0.951 - ETA: 51s - loss: 0.1283 - accuracy: 0.951 - ETA: 51s - loss: 0.1282 - accuracy: 0.951 - ETA: 51s - loss: 0.1280 - accuracy: 0.951 - ETA: 51s - loss: 0.1279 - accuracy: 0.951 - ETA: 51s - loss: 0.1278 - accuracy: 0.951 - ETA: 51s - loss: 0.1276 - accuracy: 0.951 - ETA: 51s - loss: 0.1275 - accuracy: 0.951 - ETA: 50s - loss: 0.1274 - accuracy: 0.951 - ETA: 50s - loss: 0.1272 - accuracy: 0.951 - ETA: 50s - loss: 0.1270 - accuracy: 0.952 - ETA: 50s - loss: 0.1268 - accuracy: 0.952 - ETA: 50s - loss: 0.1267 - accuracy: 0.952 - ETA: 50s - loss: 0.1266 - accuracy: 0.952 - ETA: 50s - loss: 0.1264 - accuracy: 0.952 - ETA: 50s - loss: 0.1262 - accuracy: 0.952 - ETA: 50s - loss: 0.1261 - accuracy: 0.952 - ETA: 50s - loss: 0.1259 - accuracy: 0.952 - ETA: 50s - loss: 0.1257 - accuracy: 0.952 - ETA: 50s - loss: 0.1255 - accuracy: 0.952 - ETA: 50s - loss: 0.1254 - accuracy: 0.952 - ETA: 50s - loss: 0.1253 - accuracy: 0.952 - ETA: 49s - loss: 0.1251 - accuracy: 0.952 - ETA: 49s - loss: 0.1249 - accuracy: 0.952 - ETA: 49s - loss: 0.1247 - accuracy: 0.953 - ETA: 49s - loss: 0.1246 - accuracy: 0.953 - ETA: 49s - loss: 0.1243 - accuracy: 0.953 - ETA: 49s - loss: 0.1243 - accuracy: 0.953 - ETA: 49s - loss: 0.1241 - accuracy: 0.953 - ETA: 49s - loss: 0.1239 - accuracy: 0.953 - ETA: 49s - loss: 0.1238 - accuracy: 0.953 - ETA: 49s - loss: 0.1237 - accuracy: 0.953 - ETA: 49s - loss: 0.1235 - accuracy: 0.953 - ETA: 49s - loss: 0.1234 - accuracy: 0.953 - ETA: 49s - loss: 0.1233 - accuracy: 0.953 - ETA: 49s - loss: 0.1232 - accuracy: 0.953 - ETA: 49s - loss: 0.1230 - accuracy: 0.953 - ETA: 48s - loss: 0.1229 - accuracy: 0.953 - ETA: 48s - loss: 0.1228 - accuracy: 0.953 - ETA: 48s - loss: 0.1227 - accuracy: 0.953 - ETA: 48s - loss: 0.1226 - accuracy: 0.953 - ETA: 48s - loss: 0.1225 - accuracy: 0.953 - ETA: 48s - loss: 0.1224 - accuracy: 0.953 - ETA: 48s - loss: 0.1223 - accuracy: 0.953 - ETA: 48s - loss: 0.1221 - accuracy: 0.954 - ETA: 48s - loss: 0.1220 - accuracy: 0.954 - ETA: 48s - loss: 0.1218 - accuracy: 0.954 - ETA: 48s - loss: 0.1217 - accuracy: 0.954 - ETA: 48s - loss: 0.1216 - accuracy: 0.954 - ETA: 48s - loss: 0.1215 - accuracy: 0.954 - ETA: 48s - loss: 0.1213 - accuracy: 0.954 - ETA: 48s - loss: 0.1211 - accuracy: 0.954 - ETA: 47s - loss: 0.1209 - accuracy: 0.954 - ETA: 47s - loss: 0.1208 - accuracy: 0.954 - ETA: 47s - loss: 0.1206 - accuracy: 0.954 - ETA: 47s - loss: 0.1205 - accuracy: 0.954 - ETA: 47s - loss: 0.1203 - accuracy: 0.954 - ETA: 47s - loss: 0.1202 - accuracy: 0.954 - ETA: 47s - loss: 0.1201 - accuracy: 0.954 - ETA: 47s - loss: 0.1199 - accuracy: 0.954 - ETA: 47s - loss: 0.1198 - accuracy: 0.954 - ETA: 47s - loss: 0.1196 - accuracy: 0.955 - ETA: 47s - loss: 0.1195 - accuracy: 0.955 - ETA: 47s - loss: 0.1193 - accuracy: 0.955 - ETA: 47s - loss: 0.1192 - accuracy: 0.955 - ETA: 47s - loss: 0.1191 - accuracy: 0.955 - ETA: 47s - loss: 0.1189 - accuracy: 0.955 - ETA: 47s - loss: 0.1188 - accuracy: 0.955 - ETA: 46s - loss: 0.1187 - accuracy: 0.955 - ETA: 46s - loss: 0.1185 - accuracy: 0.955 - ETA: 46s - loss: 0.1184 - accuracy: 0.955 - ETA: 46s - loss: 0.1182 - accuracy: 0.955 - ETA: 46s - loss: 0.1181 - accuracy: 0.955 - ETA: 46s - loss: 0.1180 - accuracy: 0.955 - ETA: 46s - loss: 0.1179 - accuracy: 0.955 - ETA: 46s - loss: 0.1178 - accuracy: 0.955 - ETA: 46s - loss: 0.1177 - accuracy: 0.955 - ETA: 46s - loss: 0.1176 - accuracy: 0.955 - ETA: 46s - loss: 0.1175 - accuracy: 0.955 - ETA: 46s - loss: 0.1174 - accuracy: 0.955 - ETA: 46s - loss: 0.1172 - accuracy: 0.956 - ETA: 45s - loss: 0.1171 - accuracy: 0.956 - ETA: 45s - loss: 0.1170 - accuracy: 0.956 - ETA: 45s - loss: 0.1168 - accuracy: 0.956 - ETA: 45s - loss: 0.1167 - accuracy: 0.956 - ETA: 45s - loss: 0.1165 - accuracy: 0.956 - ETA: 45s - loss: 0.1163 - accuracy: 0.956 - ETA: 45s - loss: 0.1162 - accuracy: 0.956 - ETA: 45s - loss: 0.1160 - accuracy: 0.956 - ETA: 45s - loss: 0.1159 - accuracy: 0.956 - ETA: 45s - loss: 0.1158 - accuracy: 0.956 - ETA: 45s - loss: 0.1156 - accuracy: 0.956 - ETA: 45s - loss: 0.1155 - accuracy: 0.956 - ETA: 44s - loss: 0.1153 - accuracy: 0.956 - ETA: 44s - loss: 0.1152 - accuracy: 0.956 - ETA: 44s - loss: 0.1150 - accuracy: 0.956 - ETA: 44s - loss: 0.1149 - accuracy: 0.956 - ETA: 44s - loss: 0.1147 - accuracy: 0.957 - ETA: 44s - loss: 0.1147 - accuracy: 0.957 - ETA: 44s - loss: 0.1146 - accuracy: 0.957 - ETA: 44s - loss: 0.1144 - accuracy: 0.957 - ETA: 44s - loss: 0.1143 - accuracy: 0.957 - ETA: 44s - loss: 0.1141 - accuracy: 0.957 - ETA: 44s - loss: 0.1140 - accuracy: 0.957 - ETA: 44s - loss: 0.1139 - accuracy: 0.957 - ETA: 44s - loss: 0.1138 - accuracy: 0.957 - ETA: 43s - loss: 0.1136 - accuracy: 0.957 - ETA: 43s - loss: 0.1135 - accuracy: 0.957 - ETA: 43s - loss: 0.1134 - accuracy: 0.957 - ETA: 43s - loss: 0.1132 - accuracy: 0.957 - ETA: 43s - loss: 0.1131 - accuracy: 0.957 - ETA: 43s - loss: 0.1130 - accuracy: 0.957 - ETA: 43s - loss: 0.1128 - accuracy: 0.957 - ETA: 43s - loss: 0.1127 - accuracy: 0.957 - ETA: 43s - loss: 0.1126 - accuracy: 0.957 - ETA: 43s - loss: 0.1125 - accuracy: 0.957 - ETA: 43s - loss: 0.1123 - accuracy: 0.958 - ETA: 43s - loss: 0.1121 - accuracy: 0.958 - ETA: 42s - loss: 0.1120 - accuracy: 0.958 - ETA: 42s - loss: 0.1119 - accuracy: 0.958 - ETA: 42s - loss: 0.1118 - accuracy: 0.958 - ETA: 42s - loss: 0.1117 - accuracy: 0.958 - ETA: 42s - loss: 0.1115 - accuracy: 0.958 - ETA: 42s - loss: 0.1114 - accuracy: 0.958 - ETA: 42s - loss: 0.1113 - accuracy: 0.958 - ETA: 42s - loss: 0.1112 - accuracy: 0.958 - ETA: 42s - loss: 0.1110 - accuracy: 0.958 - ETA: 42s - loss: 0.1109 - accuracy: 0.958 - ETA: 42s - loss: 0.1108 - accuracy: 0.958 - ETA: 42s - loss: 0.1107 - accuracy: 0.958 - ETA: 42s - loss: 0.1106 - accuracy: 0.958 - ETA: 42s - loss: 0.1105 - accuracy: 0.958 - ETA: 42s - loss: 0.1104 - accuracy: 0.958 - ETA: 41s - loss: 0.1102 - accuracy: 0.958 - ETA: 41s - loss: 0.1101 - accuracy: 0.958 - ETA: 41s - loss: 0.1100 - accuracy: 0.958 - ETA: 41s - loss: 0.1098 - accuracy: 0.959 - ETA: 41s - loss: 0.1097 - accuracy: 0.959 - ETA: 41s - loss: 0.1097 - accuracy: 0.959 - ETA: 41s - loss: 0.1095 - accuracy: 0.959 - ETA: 41s - loss: 0.1094 - accuracy: 0.959 - ETA: 41s - loss: 0.1093 - accuracy: 0.959 - ETA: 41s - loss: 0.1092 - accuracy: 0.959 - ETA: 41s - loss: 0.1091 - accuracy: 0.959 - ETA: 41s - loss: 0.1090 - accuracy: 0.959 - ETA: 41s - loss: 0.1089 - accuracy: 0.959 - ETA: 40s - loss: 0.1088 - accuracy: 0.959 - ETA: 40s - loss: 0.1086 - accuracy: 0.959 - ETA: 40s - loss: 0.1084 - accuracy: 0.9595
1020928/1806870 [===============>..............] - ETA: 40s - loss: 0.1083 - accuracy: 0.959 - ETA: 40s - loss: 0.1082 - accuracy: 0.959 - ETA: 40s - loss: 0.1080 - accuracy: 0.959 - ETA: 40s - loss: 0.1079 - accuracy: 0.959 - ETA: 40s - loss: 0.1077 - accuracy: 0.959 - ETA: 40s - loss: 0.1076 - accuracy: 0.959 - ETA: 40s - loss: 0.1075 - accuracy: 0.959 - ETA: 40s - loss: 0.1074 - accuracy: 0.960 - ETA: 40s - loss: 0.1073 - accuracy: 0.960 - ETA: 40s - loss: 0.1072 - accuracy: 0.960 - ETA: 39s - loss: 0.1071 - accuracy: 0.960 - ETA: 39s - loss: 0.1070 - accuracy: 0.960 - ETA: 39s - loss: 0.1069 - accuracy: 0.960 - ETA: 39s - loss: 0.1068 - accuracy: 0.960 - ETA: 39s - loss: 0.1067 - accuracy: 0.960 - ETA: 39s - loss: 0.1068 - accuracy: 0.960 - ETA: 39s - loss: 0.1067 - accuracy: 0.960 - ETA: 39s - loss: 0.1066 - accuracy: 0.960 - ETA: 39s - loss: 0.1066 - accuracy: 0.960 - ETA: 39s - loss: 0.1065 - accuracy: 0.960 - ETA: 39s - loss: 0.1064 - accuracy: 0.960 - ETA: 39s - loss: 0.1063 - accuracy: 0.960 - ETA: 39s - loss: 0.1062 - accuracy: 0.960 - ETA: 39s - loss: 0.1061 - accuracy: 0.960 - ETA: 38s - loss: 0.1059 - accuracy: 0.960 - ETA: 38s - loss: 0.1058 - accuracy: 0.960 - ETA: 38s - loss: 0.1057 - accuracy: 0.960 - ETA: 38s - loss: 0.1056 - accuracy: 0.960 - ETA: 38s - loss: 0.1055 - accuracy: 0.960 - ETA: 38s - loss: 0.1054 - accuracy: 0.960 - ETA: 38s - loss: 0.1053 - accuracy: 0.960 - ETA: 38s - loss: 0.1052 - accuracy: 0.960 - ETA: 38s - loss: 0.1050 - accuracy: 0.960 - ETA: 38s - loss: 0.1049 - accuracy: 0.961 - ETA: 38s - loss: 0.1048 - accuracy: 0.961 - ETA: 38s - loss: 0.1047 - accuracy: 0.961 - ETA: 38s - loss: 0.1046 - accuracy: 0.961 - ETA: 38s - loss: 0.1046 - accuracy: 0.961 - ETA: 38s - loss: 0.1046 - accuracy: 0.961 - ETA: 38s - loss: 0.1045 - accuracy: 0.961 - ETA: 37s - loss: 0.1044 - accuracy: 0.961 - ETA: 37s - loss: 0.1043 - accuracy: 0.961 - ETA: 37s - loss: 0.1042 - accuracy: 0.961 - ETA: 37s - loss: 0.1041 - accuracy: 0.961 - ETA: 37s - loss: 0.1039 - accuracy: 0.961 - ETA: 37s - loss: 0.1038 - accuracy: 0.961 - ETA: 37s - loss: 0.1037 - accuracy: 0.961 - ETA: 37s - loss: 0.1036 - accuracy: 0.961 - ETA: 37s - loss: 0.1035 - accuracy: 0.961 - ETA: 37s - loss: 0.1034 - accuracy: 0.961 - ETA: 37s - loss: 0.1033 - accuracy: 0.961 - ETA: 37s - loss: 0.1032 - accuracy: 0.961 - ETA: 37s - loss: 0.1031 - accuracy: 0.961 - ETA: 37s - loss: 0.1030 - accuracy: 0.961 - ETA: 36s - loss: 0.1028 - accuracy: 0.961 - ETA: 36s - loss: 0.1027 - accuracy: 0.961 - ETA: 36s - loss: 0.1026 - accuracy: 0.961 - ETA: 36s - loss: 0.1025 - accuracy: 0.961 - ETA: 36s - loss: 0.1024 - accuracy: 0.962 - ETA: 36s - loss: 0.1023 - accuracy: 0.962 - ETA: 36s - loss: 0.1022 - accuracy: 0.962 - ETA: 36s - loss: 0.1021 - accuracy: 0.962 - ETA: 36s - loss: 0.1020 - accuracy: 0.962 - ETA: 36s - loss: 0.1019 - accuracy: 0.962 - ETA: 36s - loss: 0.1019 - accuracy: 0.962 - ETA: 36s - loss: 0.1018 - accuracy: 0.962 - ETA: 36s - loss: 0.1017 - accuracy: 0.962 - ETA: 36s - loss: 0.1016 - accuracy: 0.962 - ETA: 36s - loss: 0.1015 - accuracy: 0.962 - ETA: 36s - loss: 0.1014 - accuracy: 0.962 - ETA: 36s - loss: 0.1014 - accuracy: 0.962 - ETA: 35s - loss: 0.1013 - accuracy: 0.962 - ETA: 35s - loss: 0.1012 - accuracy: 0.962 - ETA: 35s - loss: 0.1011 - accuracy: 0.962 - ETA: 35s - loss: 0.1010 - accuracy: 0.962 - ETA: 35s - loss: 0.1009 - accuracy: 0.962 - ETA: 35s - loss: 0.1008 - accuracy: 0.962 - ETA: 35s - loss: 0.1007 - accuracy: 0.962 - ETA: 35s - loss: 0.1006 - accuracy: 0.962 - ETA: 35s - loss: 0.1005 - accuracy: 0.962 - ETA: 35s - loss: 0.1004 - accuracy: 0.962 - ETA: 35s - loss: 0.1003 - accuracy: 0.962 - ETA: 35s - loss: 0.1003 - accuracy: 0.962 - ETA: 35s - loss: 0.1002 - accuracy: 0.962 - ETA: 35s - loss: 0.1001 - accuracy: 0.962 - ETA: 35s - loss: 0.1001 - accuracy: 0.962 - ETA: 35s - loss: 0.1000 - accuracy: 0.962 - ETA: 34s - loss: 0.0999 - accuracy: 0.963 - ETA: 34s - loss: 0.0998 - accuracy: 0.963 - ETA: 34s - loss: 0.0997 - accuracy: 0.963 - ETA: 34s - loss: 0.0996 - accuracy: 0.963 - ETA: 34s - loss: 0.0995 - accuracy: 0.963 - ETA: 34s - loss: 0.0994 - accuracy: 0.963 - ETA: 34s - loss: 0.0993 - accuracy: 0.963 - ETA: 34s - loss: 0.0992 - accuracy: 0.963 - ETA: 34s - loss: 0.0991 - accuracy: 0.963 - ETA: 34s - loss: 0.0990 - accuracy: 0.963 - ETA: 34s - loss: 0.0988 - accuracy: 0.963 - ETA: 34s - loss: 0.0987 - accuracy: 0.963 - ETA: 34s - loss: 0.0987 - accuracy: 0.963 - ETA: 33s - loss: 0.0986 - accuracy: 0.963 - ETA: 33s - loss: 0.0985 - accuracy: 0.963 - ETA: 33s - loss: 0.0984 - accuracy: 0.963 - ETA: 33s - loss: 0.0983 - accuracy: 0.963 - ETA: 33s - loss: 0.0982 - accuracy: 0.963 - ETA: 33s - loss: 0.0981 - accuracy: 0.963 - ETA: 33s - loss: 0.0980 - accuracy: 0.963 - ETA: 33s - loss: 0.0980 - accuracy: 0.963 - ETA: 33s - loss: 0.0979 - accuracy: 0.963 - ETA: 33s - loss: 0.0978 - accuracy: 0.963 - ETA: 33s - loss: 0.0977 - accuracy: 0.963 - ETA: 33s - loss: 0.0976 - accuracy: 0.963 - ETA: 33s - loss: 0.0975 - accuracy: 0.963 - ETA: 33s - loss: 0.0974 - accuracy: 0.964 - ETA: 33s - loss: 0.0973 - accuracy: 0.964 - ETA: 32s - loss: 0.0972 - accuracy: 0.964 - ETA: 32s - loss: 0.0971 - accuracy: 0.964 - ETA: 32s - loss: 0.0971 - accuracy: 0.964 - ETA: 32s - loss: 0.0970 - accuracy: 0.964 - ETA: 32s - loss: 0.0969 - accuracy: 0.964 - ETA: 32s - loss: 0.0968 - accuracy: 0.964 - ETA: 32s - loss: 0.0967 - accuracy: 0.964 - ETA: 32s - loss: 0.0967 - accuracy: 0.964 - ETA: 32s - loss: 0.0966 - accuracy: 0.964 - ETA: 32s - loss: 0.0965 - accuracy: 0.964 - ETA: 32s - loss: 0.0964 - accuracy: 0.964 - ETA: 32s - loss: 0.0964 - accuracy: 0.964 - ETA: 32s - loss: 0.0963 - accuracy: 0.964 - ETA: 32s - loss: 0.0963 - accuracy: 0.964 - ETA: 32s - loss: 0.0962 - accuracy: 0.964 - ETA: 32s - loss: 0.0961 - accuracy: 0.964 - ETA: 32s - loss: 0.0961 - accuracy: 0.964 - ETA: 32s - loss: 0.0961 - accuracy: 0.964 - ETA: 32s - loss: 0.0960 - accuracy: 0.964 - ETA: 31s - loss: 0.0959 - accuracy: 0.964 - ETA: 31s - loss: 0.0959 - accuracy: 0.964 - ETA: 31s - loss: 0.0958 - accuracy: 0.964 - ETA: 31s - loss: 0.0957 - accuracy: 0.964 - ETA: 31s - loss: 0.0957 - accuracy: 0.964 - ETA: 31s - loss: 0.0956 - accuracy: 0.964 - ETA: 31s - loss: 0.0955 - accuracy: 0.964 - ETA: 31s - loss: 0.0954 - accuracy: 0.964 - ETA: 31s - loss: 0.0953 - accuracy: 0.964 - ETA: 31s - loss: 0.0952 - accuracy: 0.964 - ETA: 31s - loss: 0.0951 - accuracy: 0.964 - ETA: 31s - loss: 0.0951 - accuracy: 0.964 - ETA: 31s - loss: 0.0950 - accuracy: 0.964 - ETA: 31s - loss: 0.0950 - accuracy: 0.964 - ETA: 31s - loss: 0.0949 - accuracy: 0.965 - ETA: 31s - loss: 0.0948 - accuracy: 0.965 - ETA: 31s - loss: 0.0947 - accuracy: 0.965 - ETA: 31s - loss: 0.0946 - accuracy: 0.965 - ETA: 31s - loss: 0.0946 - accuracy: 0.965 - ETA: 31s - loss: 0.0945 - accuracy: 0.965 - ETA: 31s - loss: 0.0944 - accuracy: 0.965 - ETA: 30s - loss: 0.0943 - accuracy: 0.965 - ETA: 30s - loss: 0.0942 - accuracy: 0.965 - ETA: 30s - loss: 0.0942 - accuracy: 0.965 - ETA: 30s - loss: 0.0941 - accuracy: 0.965 - ETA: 30s - loss: 0.0940 - accuracy: 0.965 - ETA: 30s - loss: 0.0940 - accuracy: 0.965 - ETA: 30s - loss: 0.0939 - accuracy: 0.965 - ETA: 30s - loss: 0.0938 - accuracy: 0.965 - ETA: 30s - loss: 0.0938 - accuracy: 0.965 - ETA: 30s - loss: 0.0937 - accuracy: 0.965 - ETA: 30s - loss: 0.0936 - accuracy: 0.965 - ETA: 30s - loss: 0.0935 - accuracy: 0.965 - ETA: 30s - loss: 0.0935 - accuracy: 0.965 - ETA: 30s - loss: 0.0934 - accuracy: 0.965 - ETA: 30s - loss: 0.0934 - accuracy: 0.965 - ETA: 30s - loss: 0.0933 - accuracy: 0.965 - ETA: 30s - loss: 0.0932 - accuracy: 0.965 - ETA: 30s - loss: 0.0931 - accuracy: 0.965 - ETA: 30s - loss: 0.0930 - accuracy: 0.965 - ETA: 29s - loss: 0.0930 - accuracy: 0.965 - ETA: 29s - loss: 0.0930 - accuracy: 0.965 - ETA: 29s - loss: 0.0929 - accuracy: 0.965 - ETA: 29s - loss: 0.0928 - accuracy: 0.965 - ETA: 29s - loss: 0.0927 - accuracy: 0.965 - ETA: 29s - loss: 0.0926 - accuracy: 0.965 - ETA: 29s - loss: 0.0926 - accuracy: 0.965 - ETA: 29s - loss: 0.0925 - accuracy: 0.965 - ETA: 29s - loss: 0.0924 - accuracy: 0.965 - ETA: 29s - loss: 0.0924 - accuracy: 0.966 - ETA: 29s - loss: 0.0923 - accuracy: 0.966 - ETA: 29s - loss: 0.0922 - accuracy: 0.9660
1303488/1806870 [====================>.........] - ETA: 29s - loss: 0.0921 - accuracy: 0.966 - ETA: 29s - loss: 0.0921 - accuracy: 0.966 - ETA: 29s - loss: 0.0920 - accuracy: 0.966 - ETA: 29s - loss: 0.0919 - accuracy: 0.966 - ETA: 29s - loss: 0.0919 - accuracy: 0.966 - ETA: 29s - loss: 0.0918 - accuracy: 0.966 - ETA: 28s - loss: 0.0918 - accuracy: 0.966 - ETA: 28s - loss: 0.0917 - accuracy: 0.966 - ETA: 28s - loss: 0.0917 - accuracy: 0.966 - ETA: 28s - loss: 0.0916 - accuracy: 0.966 - ETA: 28s - loss: 0.0915 - accuracy: 0.966 - ETA: 28s - loss: 0.0915 - accuracy: 0.966 - ETA: 28s - loss: 0.0914 - accuracy: 0.966 - ETA: 28s - loss: 0.0913 - accuracy: 0.966 - ETA: 28s - loss: 0.0912 - accuracy: 0.966 - ETA: 28s - loss: 0.0912 - accuracy: 0.966 - ETA: 28s - loss: 0.0911 - accuracy: 0.966 - ETA: 28s - loss: 0.0910 - accuracy: 0.966 - ETA: 28s - loss: 0.0909 - accuracy: 0.966 - ETA: 28s - loss: 0.0909 - accuracy: 0.966 - ETA: 28s - loss: 0.0908 - accuracy: 0.966 - ETA: 28s - loss: 0.0908 - accuracy: 0.966 - ETA: 28s - loss: 0.0907 - accuracy: 0.966 - ETA: 27s - loss: 0.0906 - accuracy: 0.966 - ETA: 27s - loss: 0.0906 - accuracy: 0.966 - ETA: 27s - loss: 0.0905 - accuracy: 0.966 - ETA: 27s - loss: 0.0904 - accuracy: 0.966 - ETA: 27s - loss: 0.0904 - accuracy: 0.966 - ETA: 27s - loss: 0.0903 - accuracy: 0.966 - ETA: 27s - loss: 0.0903 - accuracy: 0.966 - ETA: 27s - loss: 0.0902 - accuracy: 0.966 - ETA: 27s - loss: 0.0901 - accuracy: 0.966 - ETA: 27s - loss: 0.0900 - accuracy: 0.966 - ETA: 27s - loss: 0.0899 - accuracy: 0.966 - ETA: 27s - loss: 0.0899 - accuracy: 0.966 - ETA: 27s - loss: 0.0898 - accuracy: 0.967 - ETA: 27s - loss: 0.0897 - accuracy: 0.967 - ETA: 27s - loss: 0.0897 - accuracy: 0.967 - ETA: 27s - loss: 0.0897 - accuracy: 0.967 - ETA: 27s - loss: 0.0896 - accuracy: 0.967 - ETA: 27s - loss: 0.0895 - accuracy: 0.967 - ETA: 27s - loss: 0.0895 - accuracy: 0.967 - ETA: 27s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0893 - accuracy: 0.967 - ETA: 26s - loss: 0.0892 - accuracy: 0.967 - ETA: 26s - loss: 0.0892 - accuracy: 0.967 - ETA: 26s - loss: 0.0891 - accuracy: 0.967 - ETA: 26s - loss: 0.0890 - accuracy: 0.967 - ETA: 26s - loss: 0.0890 - accuracy: 0.967 - ETA: 26s - loss: 0.0889 - accuracy: 0.967 - ETA: 26s - loss: 0.0888 - accuracy: 0.967 - ETA: 26s - loss: 0.0888 - accuracy: 0.967 - ETA: 26s - loss: 0.0887 - accuracy: 0.967 - ETA: 26s - loss: 0.0886 - accuracy: 0.967 - ETA: 26s - loss: 0.0885 - accuracy: 0.967 - ETA: 26s - loss: 0.0884 - accuracy: 0.967 - ETA: 25s - loss: 0.0883 - accuracy: 0.967 - ETA: 25s - loss: 0.0882 - accuracy: 0.967 - ETA: 25s - loss: 0.0882 - accuracy: 0.967 - ETA: 25s - loss: 0.0881 - accuracy: 0.967 - ETA: 25s - loss: 0.0880 - accuracy: 0.967 - ETA: 25s - loss: 0.0879 - accuracy: 0.967 - ETA: 25s - loss: 0.0879 - accuracy: 0.967 - ETA: 25s - loss: 0.0878 - accuracy: 0.967 - ETA: 25s - loss: 0.0878 - accuracy: 0.967 - ETA: 25s - loss: 0.0877 - accuracy: 0.967 - ETA: 25s - loss: 0.0877 - accuracy: 0.967 - ETA: 25s - loss: 0.0876 - accuracy: 0.967 - ETA: 25s - loss: 0.0876 - accuracy: 0.967 - ETA: 25s - loss: 0.0875 - accuracy: 0.967 - ETA: 25s - loss: 0.0874 - accuracy: 0.967 - ETA: 25s - loss: 0.0874 - accuracy: 0.967 - ETA: 25s - loss: 0.0873 - accuracy: 0.968 - ETA: 24s - loss: 0.0872 - accuracy: 0.968 - ETA: 24s - loss: 0.0871 - accuracy: 0.968 - ETA: 24s - loss: 0.0871 - accuracy: 0.968 - ETA: 24s - loss: 0.0870 - accuracy: 0.968 - ETA: 24s - loss: 0.0870 - accuracy: 0.968 - ETA: 24s - loss: 0.0869 - accuracy: 0.968 - ETA: 24s - loss: 0.0869 - accuracy: 0.968 - ETA: 24s - loss: 0.0868 - accuracy: 0.968 - ETA: 24s - loss: 0.0867 - accuracy: 0.968 - ETA: 24s - loss: 0.0866 - accuracy: 0.968 - ETA: 24s - loss: 0.0865 - accuracy: 0.968 - ETA: 24s - loss: 0.0865 - accuracy: 0.968 - ETA: 24s - loss: 0.0864 - accuracy: 0.968 - ETA: 24s - loss: 0.0864 - accuracy: 0.968 - ETA: 24s - loss: 0.0863 - accuracy: 0.968 - ETA: 24s - loss: 0.0862 - accuracy: 0.968 - ETA: 23s - loss: 0.0862 - accuracy: 0.968 - ETA: 23s - loss: 0.0861 - accuracy: 0.968 - ETA: 23s - loss: 0.0860 - accuracy: 0.968 - ETA: 23s - loss: 0.0859 - accuracy: 0.968 - ETA: 23s - loss: 0.0858 - accuracy: 0.968 - ETA: 23s - loss: 0.0858 - accuracy: 0.968 - ETA: 23s - loss: 0.0857 - accuracy: 0.968 - ETA: 23s - loss: 0.0857 - accuracy: 0.968 - ETA: 23s - loss: 0.0857 - accuracy: 0.968 - ETA: 23s - loss: 0.0856 - accuracy: 0.968 - ETA: 23s - loss: 0.0855 - accuracy: 0.968 - ETA: 23s - loss: 0.0855 - accuracy: 0.968 - ETA: 23s - loss: 0.0855 - accuracy: 0.968 - ETA: 23s - loss: 0.0854 - accuracy: 0.968 - ETA: 23s - loss: 0.0853 - accuracy: 0.968 - ETA: 23s - loss: 0.0853 - accuracy: 0.968 - ETA: 22s - loss: 0.0852 - accuracy: 0.968 - ETA: 22s - loss: 0.0852 - accuracy: 0.968 - ETA: 22s - loss: 0.0851 - accuracy: 0.968 - ETA: 22s - loss: 0.0850 - accuracy: 0.968 - ETA: 22s - loss: 0.0850 - accuracy: 0.968 - ETA: 22s - loss: 0.0849 - accuracy: 0.968 - ETA: 22s - loss: 0.0848 - accuracy: 0.968 - ETA: 22s - loss: 0.0848 - accuracy: 0.968 - ETA: 22s - loss: 0.0848 - accuracy: 0.969 - ETA: 22s - loss: 0.0847 - accuracy: 0.969 - ETA: 22s - loss: 0.0846 - accuracy: 0.969 - ETA: 22s - loss: 0.0845 - accuracy: 0.969 - ETA: 22s - loss: 0.0845 - accuracy: 0.969 - ETA: 22s - loss: 0.0844 - accuracy: 0.969 - ETA: 22s - loss: 0.0844 - accuracy: 0.969 - ETA: 22s - loss: 0.0843 - accuracy: 0.969 - ETA: 22s - loss: 0.0843 - accuracy: 0.969 - ETA: 21s - loss: 0.0842 - accuracy: 0.969 - ETA: 21s - loss: 0.0842 - accuracy: 0.969 - ETA: 21s - loss: 0.0841 - accuracy: 0.969 - ETA: 21s - loss: 0.0841 - accuracy: 0.969 - ETA: 21s - loss: 0.0840 - accuracy: 0.969 - ETA: 21s - loss: 0.0839 - accuracy: 0.969 - ETA: 21s - loss: 0.0839 - accuracy: 0.969 - ETA: 21s - loss: 0.0838 - accuracy: 0.969 - ETA: 21s - loss: 0.0838 - accuracy: 0.969 - ETA: 21s - loss: 0.0837 - accuracy: 0.969 - ETA: 21s - loss: 0.0837 - accuracy: 0.969 - ETA: 21s - loss: 0.0836 - accuracy: 0.969 - ETA: 21s - loss: 0.0835 - accuracy: 0.969 - ETA: 21s - loss: 0.0834 - accuracy: 0.969 - ETA: 21s - loss: 0.0834 - accuracy: 0.969 - ETA: 21s - loss: 0.0833 - accuracy: 0.969 - ETA: 21s - loss: 0.0832 - accuracy: 0.969 - ETA: 20s - loss: 0.0832 - accuracy: 0.969 - ETA: 20s - loss: 0.0831 - accuracy: 0.969 - ETA: 20s - loss: 0.0831 - accuracy: 0.969 - ETA: 20s - loss: 0.0831 - accuracy: 0.969 - ETA: 20s - loss: 0.0830 - accuracy: 0.969 - ETA: 20s - loss: 0.0829 - accuracy: 0.969 - ETA: 20s - loss: 0.0829 - accuracy: 0.969 - ETA: 20s - loss: 0.0828 - accuracy: 0.969 - ETA: 20s - loss: 0.0827 - accuracy: 0.969 - ETA: 20s - loss: 0.0827 - accuracy: 0.969 - ETA: 20s - loss: 0.0826 - accuracy: 0.969 - ETA: 20s - loss: 0.0826 - accuracy: 0.969 - ETA: 20s - loss: 0.0825 - accuracy: 0.969 - ETA: 20s - loss: 0.0825 - accuracy: 0.969 - ETA: 20s - loss: 0.0824 - accuracy: 0.969 - ETA: 20s - loss: 0.0824 - accuracy: 0.969 - ETA: 19s - loss: 0.0823 - accuracy: 0.969 - ETA: 19s - loss: 0.0823 - accuracy: 0.969 - ETA: 19s - loss: 0.0822 - accuracy: 0.970 - ETA: 19s - loss: 0.0821 - accuracy: 0.970 - ETA: 19s - loss: 0.0820 - accuracy: 0.970 - ETA: 19s - loss: 0.0820 - accuracy: 0.970 - ETA: 19s - loss: 0.0819 - accuracy: 0.970 - ETA: 19s - loss: 0.0819 - accuracy: 0.970 - ETA: 19s - loss: 0.0818 - accuracy: 0.970 - ETA: 19s - loss: 0.0818 - accuracy: 0.970 - ETA: 19s - loss: 0.0817 - accuracy: 0.970 - ETA: 19s - loss: 0.0816 - accuracy: 0.970 - ETA: 19s - loss: 0.0816 - accuracy: 0.970 - ETA: 19s - loss: 0.0815 - accuracy: 0.970 - ETA: 19s - loss: 0.0814 - accuracy: 0.970 - ETA: 19s - loss: 0.0814 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0812 - accuracy: 0.970 - ETA: 18s - loss: 0.0812 - accuracy: 0.970 - ETA: 18s - loss: 0.0811 - accuracy: 0.970 - ETA: 18s - loss: 0.0810 - accuracy: 0.970 - ETA: 18s - loss: 0.0810 - accuracy: 0.970 - ETA: 18s - loss: 0.0810 - accuracy: 0.970 - ETA: 18s - loss: 0.0809 - accuracy: 0.970 - ETA: 18s - loss: 0.0808 - accuracy: 0.9705
1592480/1806870 [=========================>....] - ETA: 18s - loss: 0.0808 - accuracy: 0.970 - ETA: 18s - loss: 0.0808 - accuracy: 0.970 - ETA: 18s - loss: 0.0807 - accuracy: 0.970 - ETA: 18s - loss: 0.0807 - accuracy: 0.970 - ETA: 18s - loss: 0.0806 - accuracy: 0.970 - ETA: 18s - loss: 0.0806 - accuracy: 0.970 - ETA: 17s - loss: 0.0805 - accuracy: 0.970 - ETA: 17s - loss: 0.0804 - accuracy: 0.970 - ETA: 17s - loss: 0.0804 - accuracy: 0.970 - ETA: 17s - loss: 0.0803 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0801 - accuracy: 0.970 - ETA: 17s - loss: 0.0801 - accuracy: 0.970 - ETA: 17s - loss: 0.0800 - accuracy: 0.970 - ETA: 17s - loss: 0.0799 - accuracy: 0.970 - ETA: 17s - loss: 0.0799 - accuracy: 0.970 - ETA: 17s - loss: 0.0798 - accuracy: 0.970 - ETA: 17s - loss: 0.0798 - accuracy: 0.970 - ETA: 17s - loss: 0.0797 - accuracy: 0.970 - ETA: 17s - loss: 0.0797 - accuracy: 0.970 - ETA: 17s - loss: 0.0796 - accuracy: 0.970 - ETA: 16s - loss: 0.0796 - accuracy: 0.970 - ETA: 16s - loss: 0.0796 - accuracy: 0.971 - ETA: 16s - loss: 0.0795 - accuracy: 0.971 - ETA: 16s - loss: 0.0795 - accuracy: 0.971 - ETA: 16s - loss: 0.0794 - accuracy: 0.971 - ETA: 16s - loss: 0.0793 - accuracy: 0.971 - ETA: 16s - loss: 0.0793 - accuracy: 0.971 - ETA: 16s - loss: 0.0792 - accuracy: 0.971 - ETA: 16s - loss: 0.0792 - accuracy: 0.971 - ETA: 16s - loss: 0.0791 - accuracy: 0.971 - ETA: 16s - loss: 0.0791 - accuracy: 0.971 - ETA: 16s - loss: 0.0790 - accuracy: 0.971 - ETA: 16s - loss: 0.0790 - accuracy: 0.971 - ETA: 16s - loss: 0.0789 - accuracy: 0.971 - ETA: 16s - loss: 0.0789 - accuracy: 0.971 - ETA: 16s - loss: 0.0788 - accuracy: 0.971 - ETA: 16s - loss: 0.0788 - accuracy: 0.971 - ETA: 15s - loss: 0.0787 - accuracy: 0.971 - ETA: 15s - loss: 0.0786 - accuracy: 0.971 - ETA: 15s - loss: 0.0786 - accuracy: 0.971 - ETA: 15s - loss: 0.0785 - accuracy: 0.971 - ETA: 15s - loss: 0.0785 - accuracy: 0.971 - ETA: 15s - loss: 0.0784 - accuracy: 0.971 - ETA: 15s - loss: 0.0784 - accuracy: 0.971 - ETA: 15s - loss: 0.0783 - accuracy: 0.971 - ETA: 15s - loss: 0.0783 - accuracy: 0.971 - ETA: 15s - loss: 0.0782 - accuracy: 0.971 - ETA: 15s - loss: 0.0782 - accuracy: 0.971 - ETA: 15s - loss: 0.0781 - accuracy: 0.971 - ETA: 15s - loss: 0.0781 - accuracy: 0.971 - ETA: 15s - loss: 0.0780 - accuracy: 0.971 - ETA: 15s - loss: 0.0780 - accuracy: 0.971 - ETA: 15s - loss: 0.0780 - accuracy: 0.971 - ETA: 14s - loss: 0.0779 - accuracy: 0.971 - ETA: 14s - loss: 0.0779 - accuracy: 0.971 - ETA: 14s - loss: 0.0779 - accuracy: 0.971 - ETA: 14s - loss: 0.0778 - accuracy: 0.971 - ETA: 14s - loss: 0.0777 - accuracy: 0.971 - ETA: 14s - loss: 0.0777 - accuracy: 0.971 - ETA: 14s - loss: 0.0777 - accuracy: 0.971 - ETA: 14s - loss: 0.0776 - accuracy: 0.971 - ETA: 14s - loss: 0.0776 - accuracy: 0.971 - ETA: 14s - loss: 0.0775 - accuracy: 0.971 - ETA: 14s - loss: 0.0774 - accuracy: 0.971 - ETA: 14s - loss: 0.0774 - accuracy: 0.971 - ETA: 14s - loss: 0.0773 - accuracy: 0.971 - ETA: 14s - loss: 0.0773 - accuracy: 0.971 - ETA: 14s - loss: 0.0772 - accuracy: 0.971 - ETA: 14s - loss: 0.0772 - accuracy: 0.971 - ETA: 14s - loss: 0.0771 - accuracy: 0.971 - ETA: 13s - loss: 0.0770 - accuracy: 0.971 - ETA: 13s - loss: 0.0770 - accuracy: 0.971 - ETA: 13s - loss: 0.0770 - accuracy: 0.972 - ETA: 13s - loss: 0.0769 - accuracy: 0.972 - ETA: 13s - loss: 0.0769 - accuracy: 0.972 - ETA: 13s - loss: 0.0768 - accuracy: 0.972 - ETA: 13s - loss: 0.0768 - accuracy: 0.972 - ETA: 13s - loss: 0.0767 - accuracy: 0.972 - ETA: 13s - loss: 0.0767 - accuracy: 0.972 - ETA: 13s - loss: 0.0766 - accuracy: 0.972 - ETA: 13s - loss: 0.0766 - accuracy: 0.972 - ETA: 13s - loss: 0.0765 - accuracy: 0.972 - ETA: 13s - loss: 0.0765 - accuracy: 0.972 - ETA: 13s - loss: 0.0765 - accuracy: 0.972 - ETA: 13s - loss: 0.0764 - accuracy: 0.972 - ETA: 13s - loss: 0.0764 - accuracy: 0.972 - ETA: 13s - loss: 0.0764 - accuracy: 0.972 - ETA: 13s - loss: 0.0763 - accuracy: 0.972 - ETA: 12s - loss: 0.0763 - accuracy: 0.972 - ETA: 12s - loss: 0.0762 - accuracy: 0.972 - ETA: 12s - loss: 0.0762 - accuracy: 0.972 - ETA: 12s - loss: 0.0761 - accuracy: 0.972 - ETA: 12s - loss: 0.0761 - accuracy: 0.972 - ETA: 12s - loss: 0.0760 - accuracy: 0.972 - ETA: 12s - loss: 0.0760 - accuracy: 0.972 - ETA: 12s - loss: 0.0760 - accuracy: 0.972 - ETA: 12s - loss: 0.0759 - accuracy: 0.972 - ETA: 12s - loss: 0.0759 - accuracy: 0.972 - ETA: 12s - loss: 0.0759 - accuracy: 0.972 - ETA: 12s - loss: 0.0758 - accuracy: 0.972 - ETA: 12s - loss: 0.0758 - accuracy: 0.972 - ETA: 12s - loss: 0.0757 - accuracy: 0.972 - ETA: 12s - loss: 0.0757 - accuracy: 0.972 - ETA: 12s - loss: 0.0756 - accuracy: 0.972 - ETA: 12s - loss: 0.0757 - accuracy: 0.972 - ETA: 12s - loss: 0.0756 - accuracy: 0.972 - ETA: 11s - loss: 0.0756 - accuracy: 0.972 - ETA: 11s - loss: 0.0755 - accuracy: 0.972 - ETA: 11s - loss: 0.0755 - accuracy: 0.972 - ETA: 11s - loss: 0.0754 - accuracy: 0.972 - ETA: 11s - loss: 0.0754 - accuracy: 0.972 - ETA: 11s - loss: 0.0753 - accuracy: 0.972 - ETA: 11s - loss: 0.0753 - accuracy: 0.972 - ETA: 11s - loss: 0.0752 - accuracy: 0.972 - ETA: 11s - loss: 0.0752 - accuracy: 0.972 - ETA: 11s - loss: 0.0752 - accuracy: 0.972 - ETA: 11s - loss: 0.0751 - accuracy: 0.972 - ETA: 11s - loss: 0.0751 - accuracy: 0.972 - ETA: 11s - loss: 0.0750 - accuracy: 0.972 - ETA: 11s - loss: 0.0750 - accuracy: 0.972 - ETA: 11s - loss: 0.0750 - accuracy: 0.972 - ETA: 11s - loss: 0.0749 - accuracy: 0.972 - ETA: 11s - loss: 0.0748 - accuracy: 0.972 - ETA: 11s - loss: 0.0748 - accuracy: 0.972 - ETA: 10s - loss: 0.0747 - accuracy: 0.972 - ETA: 10s - loss: 0.0747 - accuracy: 0.972 - ETA: 10s - loss: 0.0746 - accuracy: 0.972 - ETA: 10s - loss: 0.0746 - accuracy: 0.972 - ETA: 10s - loss: 0.0745 - accuracy: 0.972 - ETA: 10s - loss: 0.0745 - accuracy: 0.972 - ETA: 10s - loss: 0.0745 - accuracy: 0.972 - ETA: 10s - loss: 0.0744 - accuracy: 0.972 - ETA: 10s - loss: 0.0744 - accuracy: 0.972 - ETA: 10s - loss: 0.0744 - accuracy: 0.973 - ETA: 10s - loss: 0.0743 - accuracy: 0.973 - ETA: 10s - loss: 0.0743 - accuracy: 0.973 - ETA: 10s - loss: 0.0742 - accuracy: 0.973 - ETA: 10s - loss: 0.0742 - accuracy: 0.973 - ETA: 10s - loss: 0.0742 - accuracy: 0.973 - ETA: 10s - loss: 0.0741 - accuracy: 0.973 - ETA: 10s - loss: 0.0741 - accuracy: 0.973 - ETA: 10s - loss: 0.0741 - accuracy: 0.973 - ETA: 9s - loss: 0.0740 - accuracy: 0.973 - ETA: 9s - loss: 0.0740 - accuracy: 0.97 - ETA: 9s - loss: 0.0740 - accuracy: 0.97 - ETA: 9s - loss: 0.0739 - accuracy: 0.97 - ETA: 9s - loss: 0.0738 - accuracy: 0.97 - ETA: 9s - loss: 0.0738 - accuracy: 0.97 - ETA: 9s - loss: 0.0737 - accuracy: 0.97 - ETA: 9s - loss: 0.0737 - accuracy: 0.97 - ETA: 9s - loss: 0.0736 - accuracy: 0.97 - ETA: 9s - loss: 0.0736 - accuracy: 0.97 - ETA: 9s - loss: 0.0736 - accuracy: 0.97 - ETA: 9s - loss: 0.0735 - accuracy: 0.97 - ETA: 9s - loss: 0.0735 - accuracy: 0.97 - ETA: 9s - loss: 0.0734 - accuracy: 0.97 - ETA: 9s - loss: 0.0734 - accuracy: 0.97 - ETA: 9s - loss: 0.0734 - accuracy: 0.97 - ETA: 8s - loss: 0.0733 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0731 - accuracy: 0.97 - ETA: 8s - loss: 0.0731 - accuracy: 0.97 - ETA: 8s - loss: 0.0730 - accuracy: 0.97 - ETA: 8s - loss: 0.0730 - accuracy: 0.97 - ETA: 8s - loss: 0.0729 - accuracy: 0.97 - ETA: 8s - loss: 0.0729 - accuracy: 0.97 - ETA: 8s - loss: 0.0728 - accuracy: 0.97 - ETA: 8s - loss: 0.0728 - accuracy: 0.97 - ETA: 8s - loss: 0.0728 - accuracy: 0.97 - ETA: 8s - loss: 0.0727 - accuracy: 0.97 - ETA: 8s - loss: 0.0727 - accuracy: 0.97 - ETA: 8s - loss: 0.0726 - accuracy: 0.97 - ETA: 8s - loss: 0.0726 - accuracy: 0.97 - ETA: 7s - loss: 0.0725 - accuracy: 0.97 - ETA: 7s - loss: 0.0725 - accuracy: 0.97 - ETA: 7s - loss: 0.0724 - accuracy: 0.97 - ETA: 7s - loss: 0.0724 - accuracy: 0.97 - ETA: 7s - loss: 0.0724 - accuracy: 0.97 - ETA: 7s - loss: 0.0723 - accuracy: 0.97 - ETA: 7s - loss: 0.0723 - accuracy: 0.9738
1806870/1806870 [==============================] - ETA: 7s - loss: 0.0723 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0721 - accuracy: 0.97 - ETA: 7s - loss: 0.0721 - accuracy: 0.97 - ETA: 7s - loss: 0.0720 - accuracy: 0.97 - ETA: 7s - loss: 0.0720 - accuracy: 0.97 - ETA: 7s - loss: 0.0719 - accuracy: 0.97 - ETA: 7s - loss: 0.0719 - accuracy: 0.97 - ETA: 7s - loss: 0.0718 - accuracy: 0.97 - ETA: 6s - loss: 0.0718 - accuracy: 0.97 - ETA: 6s - loss: 0.0717 - accuracy: 0.97 - ETA: 6s - loss: 0.0717 - accuracy: 0.97 - ETA: 6s - loss: 0.0717 - accuracy: 0.97 - ETA: 6s - loss: 0.0716 - accuracy: 0.97 - ETA: 6s - loss: 0.0716 - accuracy: 0.97 - ETA: 6s - loss: 0.0715 - accuracy: 0.97 - ETA: 6s - loss: 0.0715 - accuracy: 0.97 - ETA: 6s - loss: 0.0715 - accuracy: 0.97 - ETA: 6s - loss: 0.0714 - accuracy: 0.97 - ETA: 6s - loss: 0.0714 - accuracy: 0.97 - ETA: 6s - loss: 0.0713 - accuracy: 0.97 - ETA: 6s - loss: 0.0713 - accuracy: 0.97 - ETA: 6s - loss: 0.0713 - accuracy: 0.97 - ETA: 6s - loss: 0.0712 - accuracy: 0.97 - ETA: 6s - loss: 0.0712 - accuracy: 0.97 - ETA: 6s - loss: 0.0712 - accuracy: 0.97 - ETA: 5s - loss: 0.0711 - accuracy: 0.97 - ETA: 5s - loss: 0.0711 - accuracy: 0.97 - ETA: 5s - loss: 0.0711 - accuracy: 0.97 - ETA: 5s - loss: 0.0710 - accuracy: 0.97 - ETA: 5s - loss: 0.0710 - accuracy: 0.97 - ETA: 5s - loss: 0.0709 - accuracy: 0.97 - ETA: 5s - loss: 0.0709 - accuracy: 0.97 - ETA: 5s - loss: 0.0709 - accuracy: 0.97 - ETA: 5s - loss: 0.0708 - accuracy: 0.97 - ETA: 5s - loss: 0.0708 - accuracy: 0.97 - ETA: 5s - loss: 0.0707 - accuracy: 0.97 - ETA: 5s - loss: 0.0707 - accuracy: 0.97 - ETA: 5s - loss: 0.0707 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0705 - accuracy: 0.97 - ETA: 4s - loss: 0.0705 - accuracy: 0.97 - ETA: 4s - loss: 0.0704 - accuracy: 0.97 - ETA: 4s - loss: 0.0704 - accuracy: 0.97 - ETA: 4s - loss: 0.0704 - accuracy: 0.97 - ETA: 4s - loss: 0.0703 - accuracy: 0.97 - ETA: 4s - loss: 0.0703 - accuracy: 0.97 - ETA: 4s - loss: 0.0702 - accuracy: 0.97 - ETA: 4s - loss: 0.0702 - accuracy: 0.97 - ETA: 4s - loss: 0.0702 - accuracy: 0.97 - ETA: 4s - loss: 0.0701 - accuracy: 0.97 - ETA: 4s - loss: 0.0701 - accuracy: 0.97 - ETA: 4s - loss: 0.0700 - accuracy: 0.97 - ETA: 4s - loss: 0.0700 - accuracy: 0.97 - ETA: 4s - loss: 0.0700 - accuracy: 0.97 - ETA: 4s - loss: 0.0699 - accuracy: 0.97 - ETA: 4s - loss: 0.0699 - accuracy: 0.97 - ETA: 4s - loss: 0.0699 - accuracy: 0.97 - ETA: 4s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0697 - accuracy: 0.97 - ETA: 3s - loss: 0.0697 - accuracy: 0.97 - ETA: 3s - loss: 0.0696 - accuracy: 0.97 - ETA: 3s - loss: 0.0696 - accuracy: 0.97 - ETA: 3s - loss: 0.0696 - accuracy: 0.97 - ETA: 3s - loss: 0.0695 - accuracy: 0.97 - ETA: 3s - loss: 0.0695 - accuracy: 0.97 - ETA: 3s - loss: 0.0694 - accuracy: 0.97 - ETA: 3s - loss: 0.0694 - accuracy: 0.97 - ETA: 3s - loss: 0.0694 - accuracy: 0.97 - ETA: 3s - loss: 0.0693 - accuracy: 0.97 - ETA: 3s - loss: 0.0693 - accuracy: 0.97 - ETA: 3s - loss: 0.0693 - accuracy: 0.97 - ETA: 3s - loss: 0.0692 - accuracy: 0.97 - ETA: 3s - loss: 0.0692 - accuracy: 0.97 - ETA: 2s - loss: 0.0692 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0690 - accuracy: 0.97 - ETA: 2s - loss: 0.0690 - accuracy: 0.97 - ETA: 2s - loss: 0.0689 - accuracy: 0.97 - ETA: 2s - loss: 0.0689 - accuracy: 0.97 - ETA: 2s - loss: 0.0688 - accuracy: 0.97 - ETA: 2s - loss: 0.0688 - accuracy: 0.97 - ETA: 2s - loss: 0.0688 - accuracy: 0.97 - ETA: 2s - loss: 0.0687 - accuracy: 0.97 - ETA: 2s - loss: 0.0687 - accuracy: 0.97 - ETA: 2s - loss: 0.0687 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 1s - loss: 0.0686 - accuracy: 0.97 - ETA: 1s - loss: 0.0685 - accuracy: 0.97 - ETA: 1s - loss: 0.0685 - accuracy: 0.97 - ETA: 1s - loss: 0.0684 - accuracy: 0.97 - ETA: 1s - loss: 0.0684 - accuracy: 0.97 - ETA: 1s - loss: 0.0684 - accuracy: 0.97 - ETA: 1s - loss: 0.0683 - accuracy: 0.97 - ETA: 1s - loss: 0.0683 - accuracy: 0.97 - ETA: 1s - loss: 0.0683 - accuracy: 0.97 - ETA: 1s - loss: 0.0682 - accuracy: 0.97 - ETA: 1s - loss: 0.0682 - accuracy: 0.97 - ETA: 1s - loss: 0.0681 - accuracy: 0.97 - ETA: 1s - loss: 0.0681 - accuracy: 0.97 - ETA: 1s - loss: 0.0681 - accuracy: 0.97 - ETA: 1s - loss: 0.0680 - accuracy: 0.97 - ETA: 1s - loss: 0.0680 - accuracy: 0.97 - ETA: 1s - loss: 0.0679 - accuracy: 0.97 - ETA: 0s - loss: 0.0679 - accuracy: 0.97 - ETA: 0s - loss: 0.0679 - accuracy: 0.97 - ETA: 0s - loss: 0.0678 - accuracy: 0.97 - ETA: 0s - loss: 0.0678 - accuracy: 0.97 - ETA: 0s - loss: 0.0678 - accuracy: 0.97 - ETA: 0s - loss: 0.0677 - accuracy: 0.97 - ETA: 0s - loss: 0.0677 - accuracy: 0.97 - ETA: 0s - loss: 0.0677 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0675 - accuracy: 0.97 - ETA: 0s - loss: 0.0675 - accuracy: 0.97 - ETA: 0s - loss: 0.0674 - accuracy: 0.97 - ETA: 0s - loss: 0.0674 - accuracy: 0.97 - ETA: 0s - loss: 0.0674 - accuracy: 0.97 - ETA: 0s - loss: 0.0673 - accuracy: 0.97 - ETA: 0s - loss: 0.0673 - accuracy: 0.97 - 67s 37us/step - loss: 0.0673 - accuracy: 0.9757 - val_loss: 0.0188 - val_accuracy: 0.9944
Epoch 2/2
 255680/1806870 [===>..........................] - ETA: 1:52:37 - loss: 0.0028 - accuracy: 1.000 - ETA: 4:13 - loss: 0.0256 - accuracy: 0.9901  - ETA: 2:40 - loss: 0.0382 - accuracy: 0.98 - ETA: 2:10 - loss: 0.0324 - accuracy: 0.99 - ETA: 1:57 - loss: 0.0302 - accuracy: 0.99 - ETA: 1:47 - loss: 0.0321 - accuracy: 0.98 - ETA: 1:41 - loss: 0.0311 - accuracy: 0.98 - ETA: 1:36 - loss: 0.0318 - accuracy: 0.98 - ETA: 1:34 - loss: 0.0307 - accuracy: 0.98 - ETA: 1:34 - loss: 0.0307 - accuracy: 0.98 - ETA: 1:33 - loss: 0.0352 - accuracy: 0.98 - ETA: 1:32 - loss: 0.0353 - accuracy: 0.98 - ETA: 1:32 - loss: 0.0343 - accuracy: 0.98 - ETA: 1:31 - loss: 0.0347 - accuracy: 0.98 - ETA: 1:29 - loss: 0.0354 - accuracy: 0.98 - ETA: 1:28 - loss: 0.0349 - accuracy: 0.98 - ETA: 1:26 - loss: 0.0345 - accuracy: 0.98 - ETA: 1:25 - loss: 0.0337 - accuracy: 0.98 - ETA: 1:23 - loss: 0.0336 - accuracy: 0.98 - ETA: 1:22 - loss: 0.0330 - accuracy: 0.98 - ETA: 1:22 - loss: 0.0331 - accuracy: 0.98 - ETA: 1:23 - loss: 0.0327 - accuracy: 0.98 - ETA: 1:22 - loss: 0.0322 - accuracy: 0.98 - ETA: 1:21 - loss: 0.0321 - accuracy: 0.98 - ETA: 1:20 - loss: 0.0313 - accuracy: 0.98 - ETA: 1:19 - loss: 0.0306 - accuracy: 0.98 - ETA: 1:17 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:17 - loss: 0.0307 - accuracy: 0.98 - ETA: 1:16 - loss: 0.0304 - accuracy: 0.98 - ETA: 1:16 - loss: 0.0310 - accuracy: 0.98 - ETA: 1:15 - loss: 0.0316 - accuracy: 0.98 - ETA: 1:15 - loss: 0.0318 - accuracy: 0.98 - ETA: 1:15 - loss: 0.0314 - accuracy: 0.98 - ETA: 1:14 - loss: 0.0309 - accuracy: 0.98 - ETA: 1:14 - loss: 0.0306 - accuracy: 0.98 - ETA: 1:13 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:13 - loss: 0.0305 - accuracy: 0.98 - ETA: 1:12 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:12 - loss: 0.0303 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0306 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0302 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0298 - accuracy: 0.98 - ETA: 1:10 - loss: 0.0296 - accuracy: 0.99 - ETA: 1:10 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:10 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:10 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:09 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:09 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:09 - loss: 0.0299 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0303 - accuracy: 0.98 - ETA: 1:08 - loss: 0.0300 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0301 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0299 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0299 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0297 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0298 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0297 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0296 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0298 - accuracy: 0.98 - ETA: 1:04 - loss: 0.0296 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.98 - ETA: 1:04 - loss: 0.0295 - accuracy: 0.98 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0287 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0287 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0284 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0284 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0281 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 59s - loss: 0.0281 - accuracy: 0.9906 - ETA: 59s - loss: 0.0281 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0281 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0279 - accuracy: 0.990 - ETA: 59s - loss: 0.0279 - accuracy: 0.990 - ETA: 59s - loss: 0.0278 - accuracy: 0.990 - ETA: 59s - loss: 0.0278 - accuracy: 0.990 - ETA: 59s - loss: 0.0277 - accuracy: 0.990 - ETA: 59s - loss: 0.0277 - accuracy: 0.990 - ETA: 59s - loss: 0.0276 - accuracy: 0.990 - ETA: 59s - loss: 0.0276 - accuracy: 0.990 - ETA: 59s - loss: 0.0276 - accuracy: 0.990 - ETA: 59s - loss: 0.0275 - accuracy: 0.990 - ETA: 59s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0272 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0274 - accuracy: 0.9908
 545536/1806870 [========>.....................] - ETA: 57s - loss: 0.0274 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0274 - accuracy: 0.990 - ETA: 57s - loss: 0.0274 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0271 - accuracy: 0.990 - ETA: 56s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0272 - accuracy: 0.990 - ETA: 55s - loss: 0.0272 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.991 - ETA: 55s - loss: 0.0271 - accuracy: 0.991 - ETA: 55s - loss: 0.0270 - accuracy: 0.991 - ETA: 55s - loss: 0.0270 - accuracy: 0.991 - ETA: 55s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0269 - accuracy: 0.991 - ETA: 54s - loss: 0.0271 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0268 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0268 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0268 - accuracy: 0.991 - ETA: 52s - loss: 0.0268 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0270 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0268 - accuracy: 0.991 - ETA: 51s - loss: 0.0268 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 50s - loss: 0.0273 - accuracy: 0.991 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.991 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.991 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 49s - loss: 0.0273 - accuracy: 0.991 - ETA: 49s - loss: 0.0273 - accuracy: 0.991 - ETA: 49s - loss: 0.0273 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0269 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0269 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0271 - accuracy: 0.991 - ETA: 47s - loss: 0.0271 - accuracy: 0.991 - ETA: 47s - loss: 0.0271 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0270 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 45s - loss: 0.0269 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0266 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0266 - accuracy: 0.9911
 827360/1806870 [============>.................] - ETA: 43s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0265 - accuracy: 0.991 - ETA: 43s - loss: 0.0265 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 42s - loss: 0.0264 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 39s - loss: 0.0260 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0257 - accuracy: 0.991 - ETA: 39s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0255 - accuracy: 0.991 - ETA: 38s - loss: 0.0255 - accuracy: 0.991 - ETA: 38s - loss: 0.0255 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0255 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0255 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0252 - accuracy: 0.991 - ETA: 34s - loss: 0.0252 - accuracy: 0.991 - ETA: 34s - loss: 0.0252 - accuracy: 0.991 - ETA: 33s - loss: 0.0252 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.9916
1088864/1806870 [=================>............] - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0249 - accuracy: 0.991 - ETA: 33s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0250 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0250 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 26s - loss: 0.0243 - accuracy: 0.991 - ETA: 26s - loss: 0.0243 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.9920
1370528/1806870 [=====================>........] - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 21s - loss: 0.0237 - accuracy: 0.992 - ETA: 21s - loss: 0.0237 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0235 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 18s - loss: 0.0234 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.9923
1661024/1806870 [==========================>...] - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 9s - loss: 0.0225 - accuracy: 0.992 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0222 - accuracy: 0.99 - ETA: 7s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.9926
1806870/1806870 [==============================] - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - 65s 36us/step - loss: 0.0216 - accuracy: 0.9928 - val_loss: 0.0080 - val_accuracy: 0.9976
In [8]:
#AUXILIARY METHODS FOR FEDERATED LEARNING

# RETURN INDICES TO LAYERS WITH WEIGHTS AND BIASES
def trainable_layers(model):
    return [i for i, layer in enumerate(model.layers) if len(layer.get_weights()) > 0]

# RETURN WEIGHTS AND BIASES OF A MODEL
def get_parameters(model):
    weights = []
    biases = []
    index = trainable_layers(model)
    for i in index:
        weights.append(copy.deepcopy(model.layers[i].get_weights()[0]))
        biases.append(copy.deepcopy(model.layers[i].get_weights()[1]))           
    
    return weights, biases
        
# SET WEIGHTS AND BIASES OF A MODEL
def set_parameters(model, weights, biases):
    index = trainable_layers(model)
    for i, j in enumerate(index):
        model.layers[j].set_weights([weights[i], biases[i]])
    
# DEPRECATED: RETURN THE GRADIENTS OF THE MODEL AFTER AN UPDATE 
def get_gradients(model, inputs, outputs):
    """ Gets gradient of model for given inputs and outputs for all weights"""
    grads = model.optimizer.get_gradients(model.total_loss, model.trainable_weights)
    symb_inputs = (model._feed_inputs + model._feed_targets + model._feed_sample_weights)
    f = K.function(symb_inputs, grads)
    x, y, sample_weight = model._standardize_user_data(inputs, outputs)
    output_grad = f(x + y + sample_weight)
    
    w_grad = [w for i,w in enumerate(output_grad) if i%2==0]
    b_grad = [w for i,w in enumerate(output_grad) if i%2==1]
    
    return w_grad, b_grad

# RETURN THE DIFFERENCE OF MODELS' WEIGHTS AND BIASES AFTER AN UPDATE 
# NOTE: LEARNING RATE IS APPLIED, SO THE UPDATE IS DIFFERENT FROM THE
# GRADIENTS. IN CASE VANILLA SGD IS USED, THE GRADIENTS ARE OBTAINED
# AS (UPDATES / LEARNING_RATE)
def get_updates(model, inputs, outputs, batch_size, epochs):
    w, b = get_parameters(model)
    #model.train_on_batch(inputs, outputs)
    model.fit(inputs, outputs, batch_size=batch_size, epochs=epochs, verbose=0)
    w_new, b_new = get_parameters(model)
    
    weight_updates = [old - new for old,new in zip(w, w_new)]
    bias_updates = [old - new for old,new in zip(b, b_new)]
    
    return weight_updates, bias_updates

# UPDATE THE MODEL'S WEIGHTS AND PARAMETERS WITH AN UPDATE
def apply_updates(model, eta, w_new, b_new):
    w, b = get_parameters(model)
    new_weights = [theta - eta*delta for theta,delta in zip(w, w_new)]
    new_biases = [theta - eta*delta for theta,delta in zip(b, b_new)]
    set_parameters(model, new_weights, new_biases)
    
# FEDERATED AGGREGATION FUNCTION
def aggregate(n_layers, n_peers, f, w_updates, b_updates):
    agg_w = [f([w_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]
    agg_b = [f([b_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]
    return agg_w, agg_b

# SOLVE NANS
def nans_to_zero(W, B):
    W0 = [np.nan_to_num(w, nan=0.0, posinf=0.0, neginf=0.0) for w in W]
    B0 = [np.nan_to_num(b, nan=0.0, posinf=0.0, neginf=0.0) for b in B]
    return W0, B0

def build_forest(X,y):
    clf=RandomForestClassifier(n_estimators=1000, max_depth=7, random_state=0, verbose = 1)
    clf.fit(X,y)
    return clf

# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS
def dist_weights(w_a, w_b):
    wf_a = flatten_weights(w_a)
    wf_b = flatten_weights(w_b)
    return euclidean(wf_a, wf_b)

# TRANSFORM ALL WEIGHT TENSORS TO 1D ARRAY
def flatten_weights(w_in):
    h = w_in[0].reshape(-1)
    for w in w_in[1:]:
        h = np.append(h, w.reshape(-1))
    return h
    
In [9]:
def byzantine_attack_data(inputs):
    attack_persentage = 40
    number_of_attacked_samples = len(inputs) * attack_persentage /100
    number_of_attacked_samples = int(number_of_attacked_samples)
    sampels_attacked =  random.sample(range(len(inputs)), number_of_attacked_samples)
    if data_set == 'adult':
        z=0
        C=0
        z=inputs.max(axis = 0)
        C=inputs.min(axis = 0)
        for i in range(len(inputs)):
            if i in sampels_attacked:
                for j in range(len(inputs[0])):
                    inputs[i][j]= random.uniform(z[j], C[j])
    return inputs
In [10]:
def poisoning_attack_data(h, feature_attacked):
    attack_persentage = 60
    number_of_attacked_samples = len(h) * attack_persentage /100
    number_of_attacked_samples = int(number_of_attacked_samples)
    sampels_attacked =  random.sample(range(len(h)), number_of_attacked_samples)
    if data_set == 'adult':
        z=0
        C=0
        z=h.max(axis = 0)
        C=h.min(axis = 0)
        for i in range(len(h)):
            if i in sampels_attacked:
                for j in range(len(feature_attacked)):
                    h[i][feature_attacked[j]]= random.uniform(z[feature_attacked[j]], C[feature_attacked[j]])
    return h
In [11]:
def label_flipping_attack_data(z):
    attack_persentage = 50
    number_of_attacked_samples = len(z) * attack_persentage /100
    number_of_attacked_samples = int(number_of_attacked_samples)
    sampels_attacked =  random.sample(range(len(z)), number_of_attacked_samples)
    if data_set == 'adult':
        for i in range(len(z)):
            if i in sampels_attacked:
                for j in range(len(z[i])):
                    if z[i][j] == 0:
                        z[i][j] = 1
                    else:
                        z[i][j] = 0
                    
    return z
In [12]:
# scan the forest for trees maches the wrong predictions of the black-box
def scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local):
    sum_feature_improtance= 0
    overal_wrong_feature_importance = 0
    counter = 0
    second_counter = 0
    never_seen = 0
    avr_wrong_importance = 0
    FL_predict1 = np.argmax(FL_predict1, axis=1)
    forest_predictions = np.argmax(forest_predictions, axis=1)
    y_test_local = np.argmax(y_test_local, axis=1)
    FL_wrong = 0
    for i in range (len(FL_predict1)):
        i_tree = 0
#         if the black-box got a wrong prediction
        if (FL_predict1[i] != y_test_local[i]):
            FL_wrong = FL_wrong + 1
#         getting the prediction of the trees one by one
            for tree_in_forest in forest.estimators_:
                sample = X_test_local[i].reshape(1, -1)
                temp = forest.estimators_[i_tree].predict(sample)
                temp =  np.argmax(temp, axis=1)
#                 print('the prediction of the t')
#                 print(temp)
                i_tree = i_tree + 1
#  if the prediction of the tree maches the predictions of the black-box
                if(FL_predict1[i] == temp):
#         getting the features importances
                    sum_feature_improtance = sum_feature_improtance + tree_in_forest.feature_importances_
                    counter = counter + 1
#         if we have trees maches the black-box predictions
        if(counter>0):
            ave_feature_importence = sum_feature_improtance/counter
            overal_wrong_feature_importance = ave_feature_importence + overal_wrong_feature_importance
            second_counter = second_counter + 1
            counter = 0
            sum_feature_improtance = 0
#             if there is no trees maches the black-box predictions
        else:
            if(FL_predict1[i] != y_test_local[i]):
                never_seen = never_seen +1

#                 getting the average features importances for all the samples that had wrong predictions.
    if(second_counter>0):
        avr_wrong_importance = overal_wrong_feature_importance / second_counter
    return avr_wrong_importance
In [13]:
trainable_layers(model)
Out[13]:
[0, 1, 2, 3]
In [14]:
get_parameters(model)
Out[14]:
([array([[ 0.08338594, -0.1254255 , -0.49211267, ...,  0.24750227,
          -0.65377104, -0.05224424],
         [-0.01734522, -0.1021487 ,  0.14053608, ..., -0.28375858,
          -0.15333907, -0.21018188],
         [ 0.0459124 ,  0.17106615, -0.2748136 , ...,  0.29032764,
          -0.12925142,  0.06985184],
         ...,
         [-0.15448453,  0.17909111,  0.18795453, ...,  0.46541557,
           0.01830631, -0.0534319 ],
         [-0.15942477,  0.09147607,  0.06007228, ...,  0.11995307,
           0.5220185 , -0.16314192],
         [ 0.09670959, -0.1825741 , -0.24682267, ...,  0.21973692,
           0.26263914,  0.1036981 ]], dtype=float32),
  array([[-0.13370994,  0.05705584, -0.01060855, ...,  0.03942909,
          -0.05360872,  0.18457419],
         [ 0.18660966,  0.02259383, -0.06519584, ...,  0.12901685,
          -0.03150385, -0.05975418],
         [-0.18495369, -0.3026115 ,  0.15492044, ...,  0.3693038 ,
           0.42332208,  0.17241667],
         ...,
         [-0.41482276, -0.14596964, -0.10000814, ..., -0.07026828,
          -0.13250498, -0.11739882],
         [-0.1614849 , -0.264906  ,  0.1812628 , ...,  0.51671666,
           0.15924722,  0.18865098],
         [-0.22190864,  0.16413453, -0.15392351, ..., -0.02723815,
          -0.19859377, -0.2072882 ]], dtype=float32),
  array([[ 0.47770685, -0.7960992 ,  0.4646107 , ...,  0.04795517,
          -0.10576709,  0.5649921 ],
         [-0.37207457, -0.25275326, -0.18699329, ...,  0.24836566,
          -0.3587133 , -0.03917937],
         [ 0.087133  , -0.14499295, -0.01567947, ..., -0.02826147,
           0.18539242, -0.2073498 ],
         ...,
         [-0.54771024, -0.56772953, -1.081203  , ...,  0.84306604,
           0.14480972,  0.12935087],
         [-1.2092329 ,  0.72066   , -0.76670545, ...,  0.3710571 ,
          -0.02872824,  0.13071369],
         [-0.24432653, -0.20408279,  0.18815796, ..., -0.24326074,
          -0.81241286, -0.38458872]], dtype=float32),
  array([[ 1.1354454 , -0.8150371 ],
         [ 0.8239793 , -0.38024253],
         [ 0.18105277, -0.22851984],
         [ 0.36343297, -0.68712914],
         [-0.14369975,  0.20164204],
         [-1.3597993 ,  1.1511468 ],
         [ 0.17626402, -0.44736794],
         [ 0.783137  , -1.2214484 ],
         [ 0.2721217 , -0.06518261],
         [ 0.8918733 , -0.77505213],
         [-0.656868  ,  1.1960154 ],
         [-1.6819351 ,  1.3945241 ],
         [ 0.61881614, -0.7248123 ],
         [-0.21632305,  0.10406036],
         [ 0.17342244,  0.00396303],
         [ 1.9393425 , -2.2392154 ],
         [ 0.25582054, -0.09821833],
         [-1.4448843 ,  1.351973  ],
         [-0.7736038 ,  1.014288  ],
         [ 1.5285544 , -1.3971529 ],
         [ 2.319214  , -1.9343866 ],
         [ 0.4609306 , -0.22342049],
         [ 0.17103793, -0.22433195],
         [ 0.84780014, -0.5512217 ],
         [ 0.16438304, -0.4931989 ],
         [ 1.15009   , -1.2441653 ],
         [-0.7512086 ,  0.69693834],
         [-1.7812865 ,  1.8653326 ],
         [ 0.9114694 , -1.0244646 ],
         [ 0.68447626, -0.45571706],
         [-0.9834174 ,  1.2978134 ],
         [ 2.1663804 , -1.6653944 ],
         [-0.2522568 , -0.24315625],
         [-0.30284685,  0.6635906 ],
         [-1.0407605 ,  0.75330424],
         [-1.5959861 ,  1.6432168 ],
         [-1.6533945 ,  1.4116566 ],
         [-1.3818094 ,  0.96237814],
         [ 0.49473518, -0.7128965 ],
         [-1.1544157 ,  1.2103665 ],
         [-0.31084502, -0.06416508],
         [ 0.07730638, -0.34380186],
         [ 0.00517065, -0.07974567],
         [ 0.76227933, -0.46143502],
         [-2.0212495 ,  2.1503792 ],
         [-1.419235  ,  1.1352861 ],
         [ 0.40954936, -0.76466006],
         [-0.592412  ,  1.0149235 ],
         [-0.9884663 ,  1.3155804 ],
         [-1.633649  ,  1.6489463 ]], dtype=float32)],
 [array([-1.58836432e-02, -1.20011568e-02, -2.30990946e-02,  0.00000000e+00,
          0.00000000e+00,  0.00000000e+00, -6.30686581e-02, -6.26324415e-02,
         -3.39236371e-02, -6.14111274e-02,  0.00000000e+00, -4.05886732e-02,
          0.00000000e+00, -6.10261457e-03, -3.06573324e-02,  0.00000000e+00,
         -1.63990387e-03,  0.00000000e+00, -1.11483254e-01, -1.81604289e-02,
         -5.61644835e-03,  1.37938242e-02,  0.00000000e+00, -2.23437846e-02,
          0.00000000e+00, -6.16606697e-02,  1.70837268e-01,  5.55998720e-02,
         -1.64261945e-02,  1.20512873e-01, -6.44331053e-03,  0.00000000e+00,
         -3.33447531e-02, -5.07780984e-02,  3.10869161e-02,  7.47342259e-02,
          7.46760815e-02, -5.52449860e-02, -8.27614740e-02, -2.39871517e-02,
         -3.01994607e-02, -4.17327993e-02, -1.15456417e-01,  8.73452723e-02,
         -7.59378746e-02,  0.00000000e+00, -3.00336909e-02,  6.94403024e-09,
         -4.00215089e-02, -1.37547646e-02, -4.61019538e-02,  5.88711686e-02,
         -6.63833246e-02, -2.25303844e-02, -1.15773613e-02,  3.89949568e-02,
         -3.27692814e-02, -1.54451123e-02,  1.45327836e-01, -7.80039234e-03,
         -3.16219591e-02,  0.00000000e+00,  1.14573305e-02, -2.14286316e-02,
         -3.89366113e-02,  1.13435954e-01, -4.89240699e-02, -1.66841432e-01,
         -3.22352685e-02, -1.43911066e-02], dtype=float32),
  array([-1.1628173e-01,  1.4605005e-01, -8.3894879e-02, -1.7480729e-02,
          9.5676459e-02, -1.5144591e-01, -8.5667908e-02, -2.8433751e-02,
         -5.2900422e-02, -4.8166331e-02,  3.3510438e-01, -9.0990268e-02,
          5.9481107e-02,  4.4261031e-02,  1.3082844e-01, -1.3972101e-01,
         -7.2492525e-02, -3.3382095e-02, -2.3739910e-01, -1.8229088e-01,
         -9.5385693e-02,  3.0102465e-02,  6.0827412e-02,  3.3912200e-01,
          7.5339697e-02, -2.1391104e-01, -1.0858524e-01,  3.6047959e-01,
         -3.7770301e-01,  9.7297080e-02, -1.0461237e-01, -7.4935108e-02,
         -8.2315236e-02,  1.9678907e-01,  1.1734279e-06, -2.2777809e-01,
         -1.2057750e-01, -2.1283591e-02,  1.3623047e-01, -1.9323155e-01,
         -3.3379752e-02, -3.5926573e-02,  2.8303096e-01,  9.3418181e-02,
         -1.3997810e-01,  9.8838598e-02, -2.3493488e-01, -1.9615906e-01,
         -1.0158879e-02, -7.1108431e-02], dtype=float32),
  array([-0.33614156, -0.19233358, -0.02823093,  0.48520288,  0.23908217,
          0.27522194,  0.01314433, -0.01497711, -0.0828162 , -0.23802279,
          0.32634258,  0.12124245,  0.18269299,  0.08887233, -0.06745057,
         -0.556095  , -0.11532133,  0.40208554, -0.01085546,  0.4505257 ,
         -0.36126408,  0.01500582,  0.07917931,  0.09815152,  0.33817917,
         -0.2813558 ,  0.35876733, -0.36583313, -0.1797759 , -0.13289435,
          0.04141649, -0.31939384, -0.10274614,  0.1994237 , -0.14661174,
         -0.42591807,  0.28506443,  0.04345732,  0.2656843 , -0.30315512,
         -0.07892313, -0.06843591, -0.09118145,  0.41733742,  0.17977186,
          0.25952908,  0.4197849 ,  0.3480925 , -0.3922683 , -0.05383924],
        dtype=float32),
  array([ 0.30379787, -0.3038005 ], dtype=float32)])
In [15]:
get_updates(model, X_train, y_train, 32, 2)
Out[15]:
([array([[ 2.8014183e-06,  0.0000000e+00, -4.5895576e-06, ...,
          -2.2205949e-02,  0.0000000e+00,  0.0000000e+00],
         [ 2.1923333e-06,  0.0000000e+00, -7.8976154e-06, ...,
           4.9105823e-02,  0.0000000e+00,  0.0000000e+00],
         [ 1.5608966e-06,  0.0000000e+00, -3.7252903e-06, ...,
           7.3180795e-03,  0.0000000e+00,  0.0000000e+00],
         ...,
         [ 1.5050173e-06,  0.0000000e+00, -7.4207783e-06, ...,
          -2.6787940e-01,  0.0000000e+00,  0.0000000e+00],
         [ 1.8626451e-06,  0.0000000e+00, -8.1695616e-06, ...,
           1.6834244e-02,  0.0000000e+00,  0.0000000e+00],
         [ 1.7285347e-06,  0.0000000e+00, -4.6193600e-06, ...,
          -5.3731605e-02,  0.0000000e+00,  0.0000000e+00]], dtype=float32),
  array([[ 0.0000000e+00,  0.0000000e+00,  0.0000000e+00, ...,
          -4.1723251e-07, -1.3783574e-07,  0.0000000e+00],
         [ 0.0000000e+00,  0.0000000e+00,  0.0000000e+00, ...,
           0.0000000e+00,  0.0000000e+00,  0.0000000e+00],
         [ 0.0000000e+00,  0.0000000e+00,  0.0000000e+00, ...,
           0.0000000e+00,  0.0000000e+00,  0.0000000e+00],
         ...,
         [-8.3480060e-02, -7.9958737e-03,  0.0000000e+00, ...,
           1.0635569e-01, -3.4096837e-04,  6.9890119e-02],
         [ 0.0000000e+00,  0.0000000e+00,  0.0000000e+00, ...,
           0.0000000e+00,  0.0000000e+00,  0.0000000e+00],
         [ 0.0000000e+00,  0.0000000e+00,  0.0000000e+00, ...,
           0.0000000e+00,  0.0000000e+00,  0.0000000e+00]], dtype=float32),
  array([[-0.04616958,  0.59762466, -0.26143235, ..., -0.06850739,
          -0.4217179 ,  0.08980078],
         [-0.17200631,  0.3219571 ,  0.27451754, ...,  0.07618259,
           0.48217025,  0.26553166],
         [ 0.        ,  0.        ,  0.        , ...,  0.        ,
           0.        ,  0.        ],
         ...,
         [ 0.37040782,  0.77656084, -0.17277646, ..., -0.04284477,
           0.27722144, -0.29050782],
         [ 0.49921095, -0.3744073 , -0.79761034, ...,  0.06160343,
           0.26952648,  0.44993538],
         [ 0.12304485,  0.08797711,  0.14218739, ...,  0.40071172,
           0.43845785,  0.22023249]], dtype=float32),
  array([[ 0.01556432, -0.01553738],
         [-0.80137116,  0.8013473 ],
         [ 0.05863538, -0.05863395],
         [ 0.09222463, -0.09223729],
         [ 0.22395234, -0.22392958],
         [-0.25126195,  0.25127405],
         [ 0.19329323, -0.1932973 ],
         [-0.42185718,  0.4218619 ],
         [-0.09126899,  0.09127331],
         [-0.13490325,  0.13491231],
         [ 0.07229513, -0.07227075],
         [-0.1858536 ,  0.18585992],
         [ 0.3444129 , -0.3444115 ],
         [-0.04673225,  0.04675576],
         [-0.744485  ,  0.7444947 ],
         [ 0.59019566, -0.5901569 ],
         [ 0.00766785, -0.0076676 ],
         [ 0.1348064 , -0.13479984],
         [ 0.19738197, -0.19736266],
         [ 0.20221901, -0.20221877],
         [ 0.2895143 , -0.28952658],
         [ 0.20282978, -0.20283176],
         [-0.76195276,  0.7619654 ],
         [ 0.26977128, -0.2697831 ],
         [ 0.3250053 , -0.32500228],
         [-0.15308547,  0.15309238],
         [-0.10742784,  0.10744089],
         [-0.15101504,  0.15102363],
         [-0.6646027 ,  0.6646162 ],
         [ 0.23995936, -0.23995501],
         [-0.30166405,  0.30165863],
         [ 0.4730208 , -0.47301006],
         [ 0.        ,  0.        ],
         [-0.02773407,  0.02774608],
         [ 0.07481575, -0.07481062],
         [-0.51466   ,  0.51466644],
         [-0.02005684,  0.02005637],
         [-0.11586785,  0.11588269],
         [ 0.16156894, -0.16156316],
         [-0.30240393,  0.3024137 ],
         [ 0.8963616 , -0.89635366],
         [-0.06791203,  0.06791377],
         [-0.17452781,  0.17454016],
         [ 0.08860135, -0.08860841],
         [-0.6942698 ,  0.69428754],
         [-0.17503893,  0.17506146],
         [ 0.16636667, -0.16637546],
         [-0.25057983,  0.25059932],
         [ 0.2041834 , -0.20417154],
         [-0.01968396,  0.01968992]], dtype=float32)],
 [array([ 3.3285469e-06,  0.0000000e+00, -1.2369826e-05,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00,  8.8644736e-03,
          0.0000000e+00,  0.0000000e+00,  0.0000000e+00,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00,  6.6848241e-02,  0.0000000e+00,
          0.0000000e+00, -2.9313583e-03,  0.0000000e+00,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00, -9.5326707e-02, -3.0721266e-02,
          8.1290156e-03, -6.8495750e-02,  0.0000000e+00,  0.0000000e+00,
          1.8126052e-02,  0.0000000e+00,  0.0000000e+00, -6.3957557e-02,
         -4.2761512e-02, -4.0137991e-03,  2.5934458e-02,  0.0000000e+00,
          0.0000000e+00,  4.7971878e-02,  8.1122592e-02, -6.5605357e-02,
          0.0000000e+00,  0.0000000e+00, -1.6420111e-03,  5.2435798e-09,
          0.0000000e+00,  0.0000000e+00,  4.5959245e-02, -1.8143710e-02,
          9.4908625e-03,  4.1580014e-04,  0.0000000e+00, -6.7653313e-02,
          0.0000000e+00,  0.0000000e+00, -1.2950024e-01,  0.0000000e+00,
          0.0000000e+00,  0.0000000e+00, -1.7437223e-02,  0.0000000e+00,
         -2.3985766e-03, -8.6141318e-02,  0.0000000e+00,  1.0811789e-01,
          0.0000000e+00,  0.0000000e+00], dtype=float32),
  array([-0.00627401, -0.191559  ,  0.        ,  0.00838837,  0.0529352 ,
          0.12782945,  0.        ,  0.0156331 ,  0.        ,  0.        ,
         -0.11151084,  0.0588697 , -0.02708764,  0.07176588, -0.02906452,
          0.11350146,  0.        ,  0.06241233,  0.14389527,  0.15049464,
          0.1770727 , -0.03121255,  0.02478044, -0.2777393 , -0.09406517,
          0.12997295,  0.13041441, -0.1484769 ,  0.2955907 ,  0.00798142,
          0.        ,  0.        ,  0.0003342 ,  0.05636339,  0.        ,
          0.06773217,  0.04719278,  0.12417503, -0.039441  ,  0.14855272,
          0.03889409,  0.10604867, -0.0547139 , -0.06550588,  0.14823543,
          0.01506494,  0.08808593,  0.1861152 ,  0.01942448,  0.11284278],
        dtype=float32),
  array([ 2.54564315e-01,  1.08867824e-01,  5.61506003e-02, -1.75794929e-01,
          2.33219430e-01, -1.58185065e-02, -1.31240949e-01,  8.01615715e-02,
          2.55392641e-02,  3.14171731e-01, -1.29695922e-01, -5.60564399e-02,
         -3.62583399e-02,  3.38414431e-01,  2.22075596e-01,  1.01215661e-01,
          2.62457654e-02, -2.69618690e-01,  1.52954599e-02, -1.20112836e-01,
          2.02051371e-01, -7.11961389e-02, -3.22458982e-01,  2.04761773e-01,
          9.38781500e-02,  2.39238501e-01, -9.64630842e-02,  2.50120312e-01,
         -3.17509770e-02,  2.22122446e-01,  3.26190665e-02,  4.22861874e-02,
         -2.37673521e-06,  4.52675968e-02,  2.75943756e-01,  1.95241719e-01,
         -2.34854549e-01, -7.68669993e-02, -7.01770782e-02,  2.47255087e-01,
          2.31929541e-01,  1.45128936e-01,  4.07781303e-01, -2.12165058e-01,
          3.37380767e-02, -2.01950014e-01, -1.65518045e-01, -4.14324701e-02,
          4.91949677e-01,  2.43567675e-01], dtype=float32),
  array([-0.139424  ,  0.13942933], dtype=float32)])
In [16]:
W = get_parameters(model)[0]
B = get_parameters(model)[1]
In [17]:
# BASELINE SCENARIO
#buid the model as base line for the shards (sequential)
# Number of peers
#accordin to what we need
ss = int(len(X_train)/n_peers)
inputs_in = X_train[0*ss:0*ss+ss]
outputs_in = y_train[0*ss:0*ss+ss]
def build_model(X_t, y_t):
    model = Sequential()
    model.add(Dense(70, input_dim=Features_number, activation='relu'))
    model.add(Dense(50, activation='relu'))
    model.add(Dense(50, activation='relu'))
    model.add(Dense(2, activation='softmax'))
    model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
    model.fit(X_t,
              y_t,        
              batch_size=32, 
              epochs=250, 
              verbose=0,
              validation_data=((X_test, y_test)))
    return model
In [18]:
display(model.summary())
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 70)                2800      
_________________________________________________________________
dense_2 (Dense)              (None, 50)                3550      
_________________________________________________________________
dense_3 (Dense)              (None, 50)                2550      
_________________________________________________________________
dense_4 (Dense)              (None, 2)                 102       
=================================================================
Total params: 9,002
Trainable params: 9,002
Non-trainable params: 0
_________________________________________________________________
None
In [19]:
# predict probabilities for test set
yhat_probs = model.predict(X_test, verbose=0)
# predict crisp classes for test set
yhat_classes = model.predict_classes(X_test, verbose=0)
In [20]:
# accuracy: (tp + tn) / (p + n)
accuracy = accuracy_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('Accuracy: %f' % accuracy)
# precision tp / (tp + fp)
precision = precision_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('Precision: %f' % precision)
# recall: tp / (tp + fn)
recall = recall_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('Recall: %f' % recall)
# f1: 2 tp / (2 tp + fp + fn)
f1 = f1_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))
print('F1 score: %f' % f1)
Accuracy: 0.997338
Precision: 0.996095
Recall: 0.999387
F1 score: 0.997738
In [21]:
# confusion matrix
mat = confusion_matrix(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))

display(mat)
plt.matshow(mat);
plt.colorbar()
plt.show()
array([[55804,   313],
       [   49, 79836]], dtype=int64)
No description has been provided for this image
In [22]:
# the dectinary
FI_dic1= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}
ave_FI_dic= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}
targeted_Features ={0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}
rounds_attack_detected ={0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}
In [ ]:
# select aa random peer to be the scanner peer
peers_selected = random.sample(range(n_peers), number_attackers+1)
scaner = peers_selected[0]
mal = peers_selected[1 :]
if scaner == 0:
    scaner =  random.sample(range(n_peers), 1)

# Percentage and number of peers participating at each global training epoch
percentage_participants = 1.0
n_participants = int(n_peers * percentage_participants)

# the feature you want to attack in case of a poisoning attack
feature_attacked = [3,5,8]

# Number of local training epochs per global training epoch
n_local_rounds = 5

# Local batch size
local_batch_size = 32

# Local learning rate
local_lr = 0.001

# Global learning rate or 'gain'
model_substitution_rate = 1.0

# Attack detection / prevention mechanism = {None, 'distance', 'median', 'accuracy', 'krum'}
discard_outliers = None

# Used in 'dist' attack detection, defines how far the outliers are (1.5 is a typical value)
tau = 1.5

# Used in 'accuracy' attack detection, defines the error margin for the accuracy improvement
sensitivity = 0.05

# Used in 'krum' attack detection, defines how many byzantine attackers we want to defend against
tolerance=4

# Prevent suspicious peers from participating again, only valid for 'dist' and 'accuracy'
ban_malicious = False

# Clear nans and infinites in model updates
clear_nans = True

number_for_threshold1 = numpy.empty(20, dtype=float)
number_for_threshold2 = numpy.empty(20, dtype=float)
for r in range(len(number_for_threshold1)):
    number_for_threshold1[r] = 0
    number_for_threshold2[r] = 0

########################
# ATTACK CONFIGURATION #
########################

# Percentage of malicious peers
r_malicious_peers = 0.0

# Number of malicious peers (absolute or relative to total number of peers)
n_malicious_peers = int(n_peers * r_malicious_peers)
#n_malicious_peers = 1

# Malicious peers
malicious_peer = range(n_malicious_peers)

# Target for coalitions
common_attack_target = [4,7]

# Target class of the attack, per each malicious peer
malicious_targets = dict([(p, t) for p,t in zip(malicious_peer, [common_attack_target]*n_malicious_peers)])

# Boosting parameter per each malicious peer
common_malicious_boost = 12
malicious_boost = dict([(p, b) for p,b in zip(malicious_peer, [common_malicious_boost]*n_malicious_peers)])

###########
# METRICS #
###########
metrics = {'accuracy': [],
          'atk_effectivity': [],
          'update_distances': [],
          'outliers_detected': [],

          'acc_no_target': []}

####################################
# MODEL AND NETWORK INITIALIZATION #
####################################
inputs = X_train[0*ss:0*ss+ss]
outputs = y_train[0*ss:0*ss+ss]
global_model = build_model(inputs,outputs)
n_layers = len(trainable_layers(global_model))

print('Initializing network.')
sleep(1)
network = []
for i in tqdm(range(n_peers)):
    ss = int(len(X_train)/n_peers)
    inputs = X_train[i*ss:i*ss+ss]
    outputs = y_train[i*ss:i*ss+ss]
#     network.append(build_model(inputs, outputs))
    network.append(global_model)


banned_peers = set()

##################
# BEGIN TRAINING #
##################
for t in range(n_rounds):
    print(f'Round {t+1}.')
    sleep(1)

    ## SERVER SIDE #################################################################
    # Fetch global model parameters
    global_weights, global_biases = get_parameters(global_model)

    if clear_nans:
        global_weights, global_biases = nans_to_zero(global_weights, global_biases)

    # Initialize peer update lists
    network_weight_updates = []
    network_bias_updates = []

    # Selection of participant peers in this global training epoch
    if ban_malicious:
        good_peers = list([p for i,p in enumerate(network) if i not in banned_peers])
        n_participants = n_participants if n_participants <= len(good_peers) else int(len(good_peers) * percentage_participants)
        participants = random.sample(list(enumerate(good_peers)), n_participants)
    else:
        participants = random.sample(list(enumerate(network)),n_participants)
    ################################################################################


    ## CLIENT SIDE #################################################################
    for i, local_model in tqdm(participants):

        # Update local model with global parameters 
        set_parameters(local_model, global_weights, global_biases)

        # Initialization of user data
        ss = int(len(X_train)/n_peers)
        inputs = X_train[i*ss:i*ss+ss]
        outputs = y_train[i*ss:i*ss+ss]

# the scanner peer side
        if(i == scaner):
            X_train_local, X_test_local, y_train_local, y_test_local = train_test_split(inputs,outputs, test_size=0.7, random_state=rs)
            inputs = X_train_local
            outputs = y_train_local
            if(t == 0):
                forest = build_forest(X_train_local,y_train_local)
            forest_predictions = forest.predict(X_test_local)
            acc_forest = np.mean([t==p for t,p in zip(y_test_local, forest_predictions)])
            FL_predict1 = global_model.predict(X_test_local)
            imp = scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local)
            FI_dic1[t] = imp
            if t > 0:
                ave_FI_dic[t-1] = abs(FI_dic1[t] - FI_dic1[t-1])
                average_overall_changes = 0
                if t > 1:
                    for r in range(0,t):
                        average_overall_changes = average_overall_changes + ave_FI_dic[r]
                    average_overall_changes = average_overall_changes / t
                    total_Changes=0
                    average_overall_changes_one_val = 0
                    for s in range(len(ave_FI_dic[t-1])):
                        total_Changes = total_Changes + ave_FI_dic[t-1][s]
                        average_overall_changes_one_val = average_overall_changes_one_val + average_overall_changes[s]
                    print('total_Changes in this round: ', total_Changes)
                    print('average changes: ' ,average_overall_changes_one_val)
                    threshold1 = alpha * average_overall_changes_one_val
                    print(threshold1)
                    if total_Changes >= threshold1:
                        for ra in range(len(ave_FI_dic[t-1])):
                            rounds_attack_detected[t+1] = 1
                            if ave_FI_dic[t-1][ra] > beta * total_Changes:
                                print('attack have been detected')
                                targeted_Features[t+1] =names[ra]
                                print("attack detected on feature ", names[ra])
                                
                    



# atttacker peer side


        if(t+1>=start_attack_round and t+1<=end_attack_round):    
            if (i in mal):
                print("I am peer ",i,"I started the attack, at round", t+1)
                #attack
                if attack_type == 'Byzantine':
                    inputs = byzantine_attack_data(inputs)
                elif attack_type == 'poisoning':
                    inputs = poisoning_attack_data(inputs, feature_attacked)
                elif attack_type == 'label_flipping':
                    outputs = label_flipping_attack_data(outputs)

                local_weight_updates, local_bias_updates = get_updates(local_model, 
                                                                       inputs, outputs, 
                                                                       local_batch_size, n_local_rounds)
                if clear_nans:
                    local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)



        else:
            # Benign peer
            # Train local model 
            local_weight_updates, local_bias_updates = get_updates(local_model, 
                                                                   inputs, outputs, 
                                                                   local_batch_size, n_local_rounds)
            if clear_nans:
                local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)

        # Send updates to the server
        network_weight_updates.append(local_weight_updates)
        network_bias_updates.append(local_bias_updates)


    ## END OF CLIENT SIDE ##########################################################

    ######################################
    # SERVER SIDE AGGREGATION MECHANISMS #
    ######################################


        # Aggregate client updates
    aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                      n_participants, 
                                                      np.mean, 
                                                      network_weight_updates, 
                                                      network_bias_updates)

    if clear_nans:
        aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

    # Apply updates to global model
    apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

    # Proceed as in first case
    aggregated_weights, aggregated_biases = aggregate(n_layers, 
                                                      n_participants, 
                                                      np.mean, 
                                                      network_weight_updates, 
                                                      network_bias_updates)
    if clear_nans:
        aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)

    apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)

    ###################
    # COMPUTE METRICS #
    ###################

    # Global model accuracy
    score = global_model.evaluate(X_test, y_test, verbose=0)
    print(f'Global model loss: {score[0]}; global model accuracy: {score[1]}')
    metrics['accuracy'].append(score[1])


    # Accuracy without the target
    score = global_model.evaluate(X_test, y_test, verbose=0)
    metrics['acc_no_target'].append(score[1])


    # Distance of individual updates to the final aggregation
    metrics['update_distances'].append([dist_weights(aggregated_weights, w_i) for w_i in network_weight_updates])
Initializing network.
100%|████████████████████████████████████████████████████████████████████████████| 100/100 [00:00<00:00, 100342.20it/s]
Round 1.
 57%|██████████████████████████████████████████████▏                                  | 57/100 [02:21<01:44,  2.44s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:   12.6s finished
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [04:35<00:00,  2.75s/it]
Global model loss: 0.4784254086968245; global model accuracy: 0.8537300825119019
Round 2.
 32%|█████████████████████████▉                                                       | 32/100 [01:19<02:44,  2.42s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.5s finished
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [06:19<00:00,  3.79s/it]
Global model loss: 1.913670726969011; global model accuracy: 0.7795841097831726
Round 3.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
I am peer  58 I started the attack, at round 3
  9%|███████▍                                                                          | 9/100 [00:02<00:25,  3.57it/s]
I am peer  51 I started the attack, at round 3
 23%|██████████████████▋                                                              | 23/100 [00:05<00:19,  4.01it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
total_Changes in this round:  0.05399442952366329
average changes:  0.058936568090674844
0.0707238817088098
 35%|████████████████████████████▎                                                    | 35/100 [03:17<05:24,  4.99s/it]
I am peer  95 I started the attack, at round 3
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:20<00:00,  2.00s/it]
Global model loss: 1.9800631387364644; global model accuracy: 0.6950191855430603
Round 4.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
I am peer  58 I started the attack, at round 4
  8%|██████▌                                                                           | 8/100 [00:02<00:29,  3.17it/s]
I am peer  95 I started the attack, at round 4
 42%|██████████████████████████████████                                               | 42/100 [00:05<00:14,  4.09it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
total_Changes in this round:  0.07331074788490739
average changes:  0.06372796135541901
0.07647355362650281
 57%|██████████████████████████████████████████████▏                                  | 57/100 [04:30<03:55,  5.48s/it]
I am peer  51 I started the attack, at round 4
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [04:33<00:00,  2.73s/it]
Global model loss: 0.1299796798054893; global model accuracy: 0.9635814428329468
Round 5.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
I am peer  51 I started the attack, at round 5
 24%|███████████████████▍                                                             | 24/100 [00:02<00:08,  9.26it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
total_Changes in this round:  0.07282977245378465
average changes:  0.06600341413001043
0.0792040969560125
 53%|██████████████████████████████████████████▉                                      | 53/100 [00:38<00:20,  2.26it/s]
I am peer  58 I started the attack, at round 5
 79%|███████████████████████████████████████████████████████████████▉                 | 79/100 [00:40<00:07,  2.92it/s]
I am peer  95 I started the attack, at round 5
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [00:43<00:00,  2.30it/s]
Global model loss: 0.04791858909110321; global model accuracy: 0.9851178526878357
Round 6.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
I am peer  51 I started the attack, at round 6
 49%|███████████████████████████████████████▋                                         | 49/100 [00:02<00:02, 18.14it/s]
I am peer  58 I started the attack, at round 6
 74%|███████████████████████████████████████████████████████████▉                     | 74/100 [00:05<00:01, 14.26it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
total_Changes in this round:  0.047182858753608475
average changes:  0.06223930305473005
0.07468716366567606
 87%|██████████████████████████████████████████████████████████████████████▍          | 87/100 [00:21<00:05,  2.34it/s]
I am peer  95 I started the attack, at round 6
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [00:24<00:00,  4.09it/s]
Global model loss: 0.12014329470220501; global model accuracy: 0.9583682417869568
Round 7.
  0%|                                                                                          | 0/100 [00:00<?, ?it/s]
I am peer  95 I started the attack, at round 7
  4%|███▎                                                                              | 4/100 [00:02<01:01,  1.56it/s]
I am peer  51 I started the attack, at round 7
  7%|█████▋                                                                            | 7/100 [00:05<01:05,  1.43it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
total_Changes in this round:  0.04856604792580881
average changes:  0.059960427199909835
0.0719525126398918
 55%|████████████████████████████████████████████▌                                    | 55/100 [00:44<00:33,  1.36it/s]
I am peer  58 I started the attack, at round 7
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [00:47<00:00,  2.12it/s]
Global model loss: 0.06726779270004804; global model accuracy: 0.9827722907066345
Round 8.
 56%|█████████████████████████████████████████████▎                                   | 56/100 [02:20<01:47,  2.43s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.5s finished
total_Changes in this round:  0.085633505125058
average changes:  0.06362800976064527
0.07635361171277431
100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [04:24<00:00,  2.65s/it]
Global model loss: 6.405008795226287; global model accuracy: 0.5955941677093506
Round 9.
  9%|███████▍                                                                          | 9/100 [00:22<03:47,  2.50s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed:    1.4s finished
total_Changes in this round:  0.10100569015777777
average changes:  0.06830021981028685
0.08196026377234421
 67%|██████████████████████████████████████████████████████▎                          | 67/100 [08:34<01:26,  2.61s/it]
In [ ]:
# sort the feature according to the last epoch and print it with importances

sort_index = np.argsort(FI_dic1[9])
for x in sort_index:
    print(names[x], ', ', FI_dic1[9][x])