{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using TensorFlow backend.\n" ] } ], "source": [ "#IMPORTS\n", "\n", "import numpy as np\n", "import random\n", "import tensorflow as tf\n", "import tensorflow.keras as kr\n", "import tensorflow.keras.backend as K\n", "from tensorflow.keras.models import Model\n", "from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense\n", "from tensorflow.keras.datasets import mnist\n", "import os\n", "import csv\n", "\n", "from scipy.spatial.distance import euclidean\n", "from sklearn.metrics import confusion_matrix\n", "\n", "from time import sleep\n", "from tqdm import tqdm\n", "\n", "import copy\n", "import numpy\n", "from sklearn.datasets import make_classification\n", "from sklearn.ensemble import RandomForestClassifier\n", "import pandas as pd\n", "import matplotlib.pyplot as plt\n", "import math\n", "import seaborn as sns\n", "from numpy.random import RandomState\n", "import scipy as scp\n", "from sklearn.model_selection import train_test_split\n", "from sklearn.compose import ColumnTransformer\n", "from sklearn.preprocessing import OneHotEncoder, LabelEncoder\n", "from keras.models import Sequential\n", "from keras.layers import Dense\n", "from keras import optimizers\n", "from keras.callbacks import EarlyStopping,ModelCheckpoint\n", "from keras.utils import to_categorical\n", "from keras import backend as K\n", "from itertools import product\n", "from sklearn.metrics import accuracy_score\n", "from sklearn.metrics import precision_score\n", "from sklearn.metrics import recall_score\n", "from sklearn.metrics import f1_score\n", "from sklearn.metrics import roc_auc_score\n", "from sklearn.metrics import confusion_matrix\n", "\n", "from sklearn import mixture\n", "\n", "from mpl_toolkits.mplot3d import Axes3D\n", "import matplotlib.pyplot as plt\n", "%matplotlib inline" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "# Enter here the data set you want to explain (adult, activity, or synthatic)\n", "\n", "data_set = 'activity'\n", "\n", "# Enter here the numb er of peers you want in the experiments\n", "\n", "n_peers = 100\n", "\n", "# Enter here the type of the attack (Byzantine, poisoning, label_flipping)\n", "attack_type = 'Byzantine'\n", "\n", "# the targeted features in case the attack is poisoning attack \n", "feature_attacked = [3,5,8]\n", "\n", "# Enter here the number of attacker peers you want, keep the number of attacker less that 1/2 of the n_peers\n", "number_attackers = 3\n", "\n", "# enter here the Number of global training epochs, the start and ending epochs of the attacks\n", "n_rounds = 10\n", "start_attack_round = 3\n", "end_attack_round = 7\n", "\n", "# the threshold for attack detection\n", "\n", "alpha = 1.2\n", "\n", "beta = 1/4" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "# the random state we will use in the experiments. It can be changed \n", "\n", "rs = RandomState(92)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "# preprocessing adults data set\n", "\n", "if data_set == 'adult':\n", " #Load dataset into a pandas DataFrame\n", " adult_data = pd.read_csv('adult_data.csv', na_values='?')\n", " # Drop all records with missing values\n", " adult_data.dropna(inplace=True)\n", " adult_data.reset_index(drop=True, inplace=True)\n", "\n", " # Drop fnlwgt, not interesting for ML\n", " adult_data.drop('fnlwgt', axis=1, inplace=True)\n", " adult_data.drop('education', axis=1, inplace=True)\n", "\n", "# merging some similar features.\n", " adult_data['marital-status'].replace('Married-civ-spouse', 'Married', inplace=True)\n", " adult_data['marital-status'].replace('Divorced', 'Unmarried', inplace=True)\n", " adult_data['marital-status'].replace('Never-married', 'Unmarried', inplace=True)\n", " adult_data['marital-status'].replace('Separated', 'Unmarried', inplace=True)\n", " adult_data['marital-status'].replace('Widowed', 'Unmarried', inplace=True)\n", " adult_data['marital-status'].replace('Married-spouse-absent', 'Married', inplace=True)\n", " adult_data['marital-status'].replace('Married-AF-spouse', 'Married', inplace=True)\n", " \n", " adult_data = pd.concat([adult_data,pd.get_dummies(adult_data['income'], prefix='income')],axis=1)\n", " adult_data.drop('income', axis=1, inplace=True)\n", " obj_columns = adult_data.select_dtypes(['object']).columns\n", " adult_data[obj_columns] = adult_data[obj_columns].astype('category')\n", " # Convert numerics to floats and normalize\n", " num_columns = adult_data.select_dtypes(['int64']).columns\n", " adult_data[num_columns] = adult_data[num_columns].astype('float64')\n", " for c in num_columns:\n", " #adult[c] -= adult[c].mean()\n", " #adult[c] /= adult[c].std()\n", " adult_data[c] = (adult_data[c] - adult_data[c].min()) / (adult_data[c].max()-adult_data[c].min())\n", " # 'workclass', 'marital-status', 'occupation', 'relationship' ,'race', 'gender', 'native-country'\n", " # adult_data['income'] = adult_data['income'].cat.codes\n", " adult_data['marital-status'] = adult_data['marital-status'].cat.codes\n", " adult_data['occupation'] = adult_data['occupation'].cat.codes\n", " adult_data['relationship'] = adult_data['relationship'].cat.codes\n", " adult_data['race'] = adult_data['race'].cat.codes\n", " adult_data['gender'] = adult_data['gender'].cat.codes\n", " adult_data['native-country'] = adult_data['native-country'].cat.codes\n", " adult_data['workclass'] = adult_data['workclass'].cat.codes\n", "\n", " num_columns = adult_data.select_dtypes(['int8']).columns\n", " adult_data[num_columns] = adult_data[num_columns].astype('float64')\n", " for c in num_columns:\n", " #adult[c] -= adult[c].mean()\n", " #adult[c] /= adult[c].std()\n", " adult_data[c] = (adult_data[c] - adult_data[c].min()) / (adult_data[c].max()-adult_data[c].min())\n", " display(adult_data.info())\n", " display(adult_data.head(10))\n", " \n", " adult_data = adult_data.to_numpy()\n", " \n", "# splite the data to train and test datasets\n", " X_train, X_test, y_train, y_test = train_test_split(adult_data[:,:-2],adult_data[:,-2:], test_size=0.07, random_state=rs)\n", "# the names of the features\n", " names = ['age','workclass','educational-num','marital-status','occupation',\n", " 'relationship','race','gender','capital-gain','capital-loss','hours-per-week','native-country']\n", " Features_number = len(X_train[0])" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "if data_set == 'synthatic':\n", " #generate the data\n", " X, y = make_classification(n_samples=1000000, n_features=10, n_redundant=3, n_repeated=2, #n_classes=3, \n", " n_informative=5, n_clusters_per_class=4, \n", " random_state=42)\n", " y = pd.DataFrame(data=y, columns=[\"y\"])\n", " y = pd.get_dummies(y['y'], prefix='y')\n", " y = y.to_numpy()\n", " X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=0.07, random_state=rs)\n", " # the names of the features\n", " names = ['X(0)','X(1)','X(2)','X(3)','X(4)','X(5)','X(6)','X(7)','X(8)','X(9)']\n", " Features_number = len(X_train[0])" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['temp_hand',\n", " 'acceleration_16_x_hand',\n", " 'acceleration_16_y_hand',\n", " 'acceleration_16_z_hand',\n", " 'acceleration_6_x_hand',\n", " 'acceleration_6_y_hand',\n", " 'acceleration_6_z_hand',\n", " 'gyroscope_x_hand',\n", " 'gyroscope_y_hand',\n", " 'gyroscope_z_hand',\n", " 'magnetometer_x_hand',\n", " 'magnetometer_y_hand',\n", " 'magnetometer_z_hand',\n", " 'temp_chest',\n", " 'acceleration_16_x_chest',\n", " 'acceleration_16_y_chest',\n", " 'acceleration_16_z_chest',\n", " 'acceleration_6_x_chest',\n", " 'acceleration_6_y_chest',\n", " 'acceleration_6_z_chest',\n", " 'gyroscope_x_chest',\n", " 'gyroscope_y_chest',\n", " 'gyroscope_z_chest',\n", " 'magnetometer_x_chest',\n", " 'magnetometer_y_chest',\n", " 'magnetometer_z_chest',\n", " 'temp_ankle',\n", " 'acceleration_16_x_ankle',\n", " 'acceleration_16_y_ankle',\n", " 'acceleration_16_z_ankle',\n", " 'acceleration_6_x_ankle',\n", " 'acceleration_6_y_ankle',\n", " 'acceleration_6_z_ankle',\n", " 'gyroscope_x_ankle',\n", " 'gyroscope_y_ankle',\n", " 'gyroscope_z_ankle',\n", " 'magnetometer_x_ankle',\n", " 'magnetometer_y_ankle',\n", " 'magnetometer_z_ankle']" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "if data_set == 'activity':\n", " #Load dataset into a pandas DataFrame\n", " activity = pd.read_csv(\"activity_3_original.csv\", sep=',')\n", "# drop some features that have non value in the majority of the samples\n", " to_drop = ['subject', 'timestamp', 'heart_rate','activityID']\n", " activity.drop(axis=1, columns=to_drop, inplace=True)\n", "# prepare the truth\n", " activity = pd.concat([activity,pd.get_dummies(activity['motion'], prefix='motion')],axis=1)\n", " activity.drop('motion', axis=1, inplace=True)\n", " class_label = [ 'motion_n', 'motion_y']\n", " predictors = [a for a in activity.columns.values if a not in class_label]\n", "\n", " for p in predictors:\n", " activity[p].fillna(activity[p].mean(), inplace=True)\n", "\n", " display(predictors)\n", " for p in predictors:\n", " activity[p] = (activity[p]-activity[p].min()) / (activity[p].max() - activity[p].min())\n", " activity[p].astype('float32')\n", " activity = activity.to_numpy()\n", " X_train, X_test, y_train, y_test = train_test_split(activity[:,:-2],activity[:,-2:], test_size=0.07, random_state=rs)\n", " # the names of the features\n", " names = ['temp_hand','acceleration_16_x_hand',\n", " 'acceleration_16_y_hand','acceleration_16_z_hand','acceleration_6_x_hand',\n", " 'acceleration_6_y_hand','acceleration_6_z_hand','gyroscope_x_hand','gyroscope_y_hand',\n", " 'gyroscope_z_hand','magnetometer_x_hand','magnetometer_y_hand','magnetometer_z_hand',\n", " 'temp_chest','acceleration_16_x_chest','acceleration_16_y_chest','acceleration_16_z_chest','acceleration_6_x_chest',\n", " 'acceleration_6_y_chest','acceleration_6_z_chest','gyroscope_x_chest','gyroscope_y_chest','gyroscope_z_chest',\n", " 'magnetometer_x_chest','magnetometer_y_chest','magnetometer_z_chest','temp_ankle','acceleration_16_x_ankle',\n", " 'acceleration_16_y_ankle','acceleration_16_z_ankle','acceleration_6_x_ankle','acceleration_6_y_ankle',\n", " 'acceleration_6_z_ankle','gyroscope_x_ankle','gyroscope_y_ankle','gyroscope_z_ankle','magnetometer_x_ankle',\n", " 'magnetometer_y_ankle','magnetometer_z_ankle']\n", " Features_number = len(X_train[0])\n" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Train on 1806870 samples, validate on 136002 samples\n", "Epoch 1/2\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ " 225888/1806870 [==>...........................] - ETA: 3:38:09 - loss: 0.6677 - accuracy: 0.718 - ETA: 6:38 - loss: 0.6753 - accuracy: 0.5789 - ETA: 4:21 - loss: 0.6609 - accuracy: 0.58 - ETA: 3:24 - loss: 0.6429 - accuracy: 0.61 - ETA: 2:57 - loss: 0.6137 - accuracy: 0.64 - ETA: 2:38 - loss: 0.5775 - accuracy: 0.68 - ETA: 2:22 - loss: 0.5492 - accuracy: 0.70 - ETA: 2:10 - loss: 0.5185 - accuracy: 0.72 - ETA: 2:04 - loss: 0.4951 - accuracy: 0.74 - ETA: 1:58 - loss: 0.4725 - accuracy: 0.76 - ETA: 1:52 - loss: 0.4519 - accuracy: 0.77 - ETA: 1:48 - loss: 0.4357 - accuracy: 0.78 - ETA: 1:46 - loss: 0.4264 - accuracy: 0.79 - ETA: 1:42 - loss: 0.4195 - accuracy: 0.79 - ETA: 1:42 - loss: 0.4110 - accuracy: 0.80 - ETA: 1:39 - loss: 0.4011 - accuracy: 0.80 - ETA: 1:38 - loss: 0.3931 - accuracy: 0.81 - ETA: 1:36 - loss: 0.3829 - accuracy: 0.82 - ETA: 1:34 - loss: 0.3757 - accuracy: 0.82 - ETA: 1:32 - loss: 0.3680 - accuracy: 0.82 - ETA: 1:31 - loss: 0.3609 - accuracy: 0.83 - ETA: 1:30 - loss: 0.3570 - accuracy: 0.83 - ETA: 1:28 - loss: 0.3509 - accuracy: 0.83 - ETA: 1:27 - loss: 0.3470 - accuracy: 0.84 - ETA: 1:26 - loss: 0.3417 - accuracy: 0.84 - ETA: 1:26 - loss: 0.3383 - accuracy: 0.84 - ETA: 1:25 - loss: 0.3346 - accuracy: 0.84 - ETA: 1:24 - loss: 0.3299 - accuracy: 0.85 - ETA: 1:23 - loss: 0.3249 - accuracy: 0.85 - ETA: 1:22 - loss: 0.3214 - accuracy: 0.85 - ETA: 1:22 - loss: 0.3169 - accuracy: 0.85 - ETA: 1:21 - loss: 0.3134 - accuracy: 0.86 - ETA: 1:20 - loss: 0.3120 - accuracy: 0.86 - ETA: 1:20 - loss: 0.3086 - accuracy: 0.86 - ETA: 1:19 - loss: 0.3042 - accuracy: 0.86 - ETA: 1:20 - loss: 0.3019 - accuracy: 0.86 - ETA: 1:20 - loss: 0.2993 - accuracy: 0.86 - ETA: 1:20 - loss: 0.2957 - accuracy: 0.86 - ETA: 1:20 - loss: 0.2940 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2916 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2897 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2882 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2866 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2841 - accuracy: 0.87 - ETA: 1:19 - loss: 0.2828 - accuracy: 0.87 - ETA: 1:18 - loss: 0.2797 - accuracy: 0.87 - ETA: 1:18 - loss: 0.2772 - accuracy: 0.87 - ETA: 1:18 - loss: 0.2758 - accuracy: 0.88 - ETA: 1:18 - loss: 0.2737 - accuracy: 0.88 - ETA: 1:17 - loss: 0.2716 - accuracy: 0.88 - ETA: 1:17 - loss: 0.2696 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2688 - accuracy: 0.88 - ETA: 1:17 - loss: 0.2675 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2657 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2637 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2624 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2611 - accuracy: 0.88 - ETA: 1:16 - loss: 0.2590 - accuracy: 0.88 - ETA: 1:15 - loss: 0.2577 - accuracy: 0.88 - ETA: 1:15 - loss: 0.2557 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2545 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2532 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2513 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2499 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2492 - accuracy: 0.89 - ETA: 1:14 - loss: 0.2482 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2465 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2449 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2439 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2433 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2422 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2412 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2402 - accuracy: 0.89 - ETA: 1:13 - loss: 0.2389 - accuracy: 0.89 - ETA: 1:12 - loss: 0.2375 - accuracy: 0.89 - ETA: 1:12 - loss: 0.2363 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2352 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2346 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2340 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2327 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2323 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2313 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2300 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2290 - accuracy: 0.90 - ETA: 1:12 - loss: 0.2281 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2273 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2265 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2258 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2246 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2236 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2227 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2220 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2210 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2203 - accuracy: 0.90 - ETA: 1:11 - loss: 0.2195 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2187 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2181 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2176 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2170 - accuracy: 0.90 - ETA: 1:10 - loss: 0.2167 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2156 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2147 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2141 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2133 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2123 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2111 - accuracy: 0.91 - ETA: 1:09 - loss: 0.2104 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2097 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2093 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2089 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2083 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2078 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2076 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2073 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2068 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2063 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2056 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2050 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2043 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2038 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2030 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2024 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2016 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2012 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2006 - accuracy: 0.91 - ETA: 1:10 - loss: 0.2001 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1995 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1990 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1985 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1980 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1976 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1973 - accuracy: 0.91 - ETA: 1:10 - loss: 0.1968 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1965 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1959 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1954 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1954 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1948 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1943 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1939 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1937 - accuracy: 0.92 - ETA: 1:10 - loss: 0.1930 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1924 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1917 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1911 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1906 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1900 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1892 - accuracy: 0.92 - ETA: 1:09 - loss: 0.1889 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1884 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1878 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1873 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1871 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1869 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1866 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1862 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1859 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1856 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1852 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1847 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1844 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1843 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1840 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1834 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1827 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1823 - accuracy: 0.92 - ETA: 1:08 - loss: 0.1819 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1815 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1811 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1807 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1802 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1796 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1791 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1787 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1785 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1781 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1778 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1774 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1768 - accuracy: 0.92 - ETA: 1:07 - loss: 0.1764 - accuracy: 0.92 - ETA: 1:06 - loss: 0.1761 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1758 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1757 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1752 - accuracy: 0.9305\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b" ] }, { "name": "stdout", "output_type": "stream", "text": [ " 466080/1806870 [======>.......................] - ETA: 1:07 - loss: 0.1747 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1745 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1744 - accuracy: 0.93 - ETA: 1:07 - loss: 0.1740 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1735 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1733 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1731 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1727 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1721 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1717 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1713 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1709 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1707 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1705 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1702 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1699 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1698 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1695 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1694 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1693 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1690 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1687 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1685 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1683 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1682 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1679 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1677 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1674 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1671 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1667 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1666 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1664 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1662 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1660 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1658 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1655 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1654 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1653 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1652 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1649 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1648 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1647 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1645 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1641 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1638 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1632 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1628 - accuracy: 0.93 - ETA: 1:06 - loss: 0.1625 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1622 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1619 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1616 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1613 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1611 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1611 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1609 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1606 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1603 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1601 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1599 - accuracy: 0.93 - ETA: 1:05 - loss: 0.1597 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1594 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1591 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1588 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1586 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1584 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1579 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1576 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1572 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1571 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1570 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1567 - accuracy: 0.93 - ETA: 1:04 - loss: 0.1564 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1561 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1558 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1556 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1553 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1551 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1550 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1548 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1546 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1544 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1542 - accuracy: 0.93 - ETA: 1:03 - loss: 0.1541 - accuracy: 0.94 - ETA: 1:03 - loss: 0.1538 - accuracy: 0.94 - ETA: 1:03 - loss: 0.1536 - accuracy: 0.94 - ETA: 1:03 - loss: 0.1533 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1529 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1527 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1524 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1523 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1521 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1519 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1517 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1513 - accuracy: 0.94 - ETA: 1:02 - loss: 0.1511 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1508 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1507 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1505 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1503 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1501 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1498 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1496 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1495 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1493 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1492 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1490 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1488 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1486 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1484 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1482 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1479 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1478 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1475 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1473 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1472 - accuracy: 0.94 - ETA: 1:01 - loss: 0.1469 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1467 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1466 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1463 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1460 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1457 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1455 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1451 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1449 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1451 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1449 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1447 - accuracy: 0.94 - ETA: 1:00 - loss: 0.1445 - accuracy: 0.94 - ETA: 59s - loss: 0.1444 - accuracy: 0.9444 - ETA: 59s - loss: 0.1441 - accuracy: 0.944 - ETA: 59s - loss: 0.1440 - accuracy: 0.944 - ETA: 59s - loss: 0.1438 - accuracy: 0.944 - ETA: 59s - loss: 0.1438 - accuracy: 0.944 - ETA: 59s - loss: 0.1436 - accuracy: 0.944 - ETA: 59s - loss: 0.1434 - accuracy: 0.944 - ETA: 59s - loss: 0.1433 - accuracy: 0.944 - ETA: 59s - loss: 0.1431 - accuracy: 0.945 - ETA: 59s - loss: 0.1428 - accuracy: 0.945 - ETA: 59s - loss: 0.1426 - accuracy: 0.945 - ETA: 58s - loss: 0.1423 - accuracy: 0.945 - ETA: 58s - loss: 0.1420 - accuracy: 0.945 - ETA: 58s - loss: 0.1418 - accuracy: 0.945 - ETA: 58s - loss: 0.1415 - accuracy: 0.945 - ETA: 58s - loss: 0.1412 - accuracy: 0.945 - ETA: 58s - loss: 0.1412 - accuracy: 0.945 - ETA: 58s - loss: 0.1410 - accuracy: 0.945 - ETA: 58s - loss: 0.1408 - accuracy: 0.946 - ETA: 58s - loss: 0.1406 - accuracy: 0.946 - ETA: 58s - loss: 0.1404 - accuracy: 0.946 - ETA: 57s - loss: 0.1402 - accuracy: 0.946 - ETA: 57s - loss: 0.1400 - accuracy: 0.946 - ETA: 57s - loss: 0.1399 - accuracy: 0.946 - ETA: 57s - loss: 0.1397 - accuracy: 0.946 - ETA: 57s - loss: 0.1397 - accuracy: 0.946 - ETA: 57s - loss: 0.1396 - accuracy: 0.946 - ETA: 57s - loss: 0.1393 - accuracy: 0.946 - ETA: 57s - loss: 0.1392 - accuracy: 0.946 - ETA: 57s - loss: 0.1390 - accuracy: 0.946 - ETA: 57s - loss: 0.1388 - accuracy: 0.946 - ETA: 57s - loss: 0.1386 - accuracy: 0.946 - ETA: 56s - loss: 0.1385 - accuracy: 0.947 - ETA: 56s - loss: 0.1383 - accuracy: 0.947 - ETA: 56s - loss: 0.1382 - accuracy: 0.947 - ETA: 56s - loss: 0.1381 - accuracy: 0.947 - ETA: 56s - loss: 0.1379 - accuracy: 0.947 - ETA: 56s - loss: 0.1378 - accuracy: 0.947 - ETA: 56s - loss: 0.1376 - accuracy: 0.947 - ETA: 56s - loss: 0.1375 - accuracy: 0.947 - ETA: 56s - loss: 0.1374 - accuracy: 0.947 - ETA: 56s - loss: 0.1373 - accuracy: 0.947 - ETA: 55s - loss: 0.1371 - accuracy: 0.947 - ETA: 55s - loss: 0.1370 - accuracy: 0.947 - ETA: 55s - loss: 0.1368 - accuracy: 0.947 - ETA: 55s - loss: 0.1366 - accuracy: 0.947 - ETA: 55s - loss: 0.1364 - accuracy: 0.947 - ETA: 55s - loss: 0.1362 - accuracy: 0.948 - ETA: 55s - loss: 0.1360 - accuracy: 0.948 - ETA: 55s - loss: 0.1357 - accuracy: 0.948 - ETA: 55s - loss: 0.1355 - accuracy: 0.948 - ETA: 55s - loss: 0.1355 - accuracy: 0.948 - ETA: 55s - loss: 0.1353 - accuracy: 0.948 - ETA: 55s - loss: 0.1350 - accuracy: 0.948 - ETA: 55s - loss: 0.1349 - accuracy: 0.948 - ETA: 54s - loss: 0.1349 - accuracy: 0.948 - ETA: 54s - loss: 0.1347 - accuracy: 0.948 - ETA: 54s - loss: 0.1347 - accuracy: 0.9487" ] }, { "name": "stdout", "output_type": "stream", "text": [ " 743456/1806870 [===========>..................] - ETA: 54s - loss: 0.1345 - accuracy: 0.948 - ETA: 54s - loss: 0.1343 - accuracy: 0.948 - ETA: 54s - loss: 0.1342 - accuracy: 0.948 - ETA: 54s - loss: 0.1340 - accuracy: 0.949 - ETA: 54s - loss: 0.1338 - accuracy: 0.949 - ETA: 54s - loss: 0.1337 - accuracy: 0.949 - ETA: 54s - loss: 0.1334 - accuracy: 0.949 - ETA: 54s - loss: 0.1333 - accuracy: 0.949 - ETA: 54s - loss: 0.1331 - accuracy: 0.949 - ETA: 54s - loss: 0.1329 - accuracy: 0.949 - ETA: 54s - loss: 0.1327 - accuracy: 0.949 - ETA: 53s - loss: 0.1324 - accuracy: 0.949 - ETA: 53s - loss: 0.1323 - accuracy: 0.949 - ETA: 53s - loss: 0.1323 - accuracy: 0.949 - ETA: 53s - loss: 0.1321 - accuracy: 0.949 - ETA: 53s - loss: 0.1320 - accuracy: 0.949 - ETA: 53s - loss: 0.1318 - accuracy: 0.949 - ETA: 53s - loss: 0.1317 - accuracy: 0.950 - ETA: 53s - loss: 0.1315 - accuracy: 0.950 - ETA: 53s - loss: 0.1313 - accuracy: 0.950 - ETA: 53s - loss: 0.1311 - accuracy: 0.950 - ETA: 53s - loss: 0.1310 - accuracy: 0.950 - ETA: 52s - loss: 0.1308 - accuracy: 0.950 - ETA: 52s - loss: 0.1307 - accuracy: 0.950 - ETA: 52s - loss: 0.1305 - accuracy: 0.950 - ETA: 52s - loss: 0.1303 - accuracy: 0.950 - ETA: 52s - loss: 0.1301 - accuracy: 0.950 - ETA: 52s - loss: 0.1299 - accuracy: 0.950 - ETA: 52s - loss: 0.1298 - accuracy: 0.950 - ETA: 52s - loss: 0.1296 - accuracy: 0.950 - ETA: 52s - loss: 0.1295 - accuracy: 0.950 - ETA: 52s - loss: 0.1294 - accuracy: 0.951 - ETA: 52s - loss: 0.1292 - accuracy: 0.951 - ETA: 52s - loss: 0.1290 - accuracy: 0.951 - ETA: 51s - loss: 0.1289 - accuracy: 0.951 - ETA: 51s - loss: 0.1287 - accuracy: 0.951 - ETA: 51s - loss: 0.1286 - accuracy: 0.951 - ETA: 51s - loss: 0.1285 - accuracy: 0.951 - ETA: 51s - loss: 0.1283 - accuracy: 0.951 - ETA: 51s - loss: 0.1282 - accuracy: 0.951 - ETA: 51s - loss: 0.1280 - accuracy: 0.951 - ETA: 51s - loss: 0.1279 - accuracy: 0.951 - ETA: 51s - loss: 0.1278 - accuracy: 0.951 - ETA: 51s - loss: 0.1276 - accuracy: 0.951 - ETA: 51s - loss: 0.1275 - accuracy: 0.951 - ETA: 50s - loss: 0.1274 - accuracy: 0.951 - ETA: 50s - loss: 0.1272 - accuracy: 0.951 - ETA: 50s - loss: 0.1270 - accuracy: 0.952 - ETA: 50s - loss: 0.1268 - accuracy: 0.952 - ETA: 50s - loss: 0.1267 - accuracy: 0.952 - ETA: 50s - loss: 0.1266 - accuracy: 0.952 - ETA: 50s - loss: 0.1264 - accuracy: 0.952 - ETA: 50s - loss: 0.1262 - accuracy: 0.952 - ETA: 50s - loss: 0.1261 - accuracy: 0.952 - ETA: 50s - loss: 0.1259 - accuracy: 0.952 - ETA: 50s - loss: 0.1257 - accuracy: 0.952 - ETA: 50s - loss: 0.1255 - accuracy: 0.952 - ETA: 50s - loss: 0.1254 - accuracy: 0.952 - ETA: 50s - loss: 0.1253 - accuracy: 0.952 - ETA: 49s - loss: 0.1251 - accuracy: 0.952 - ETA: 49s - loss: 0.1249 - accuracy: 0.952 - ETA: 49s - loss: 0.1247 - accuracy: 0.953 - ETA: 49s - loss: 0.1246 - accuracy: 0.953 - ETA: 49s - loss: 0.1243 - accuracy: 0.953 - ETA: 49s - loss: 0.1243 - accuracy: 0.953 - ETA: 49s - loss: 0.1241 - accuracy: 0.953 - ETA: 49s - loss: 0.1239 - accuracy: 0.953 - ETA: 49s - loss: 0.1238 - accuracy: 0.953 - ETA: 49s - loss: 0.1237 - accuracy: 0.953 - ETA: 49s - loss: 0.1235 - accuracy: 0.953 - ETA: 49s - loss: 0.1234 - accuracy: 0.953 - ETA: 49s - loss: 0.1233 - accuracy: 0.953 - ETA: 49s - loss: 0.1232 - accuracy: 0.953 - ETA: 49s - loss: 0.1230 - accuracy: 0.953 - ETA: 48s - loss: 0.1229 - accuracy: 0.953 - ETA: 48s - loss: 0.1228 - accuracy: 0.953 - ETA: 48s - loss: 0.1227 - accuracy: 0.953 - ETA: 48s - loss: 0.1226 - accuracy: 0.953 - ETA: 48s - loss: 0.1225 - accuracy: 0.953 - ETA: 48s - loss: 0.1224 - accuracy: 0.953 - ETA: 48s - loss: 0.1223 - accuracy: 0.953 - ETA: 48s - loss: 0.1221 - accuracy: 0.954 - ETA: 48s - loss: 0.1220 - accuracy: 0.954 - ETA: 48s - loss: 0.1218 - accuracy: 0.954 - ETA: 48s - loss: 0.1217 - accuracy: 0.954 - ETA: 48s - loss: 0.1216 - accuracy: 0.954 - ETA: 48s - loss: 0.1215 - accuracy: 0.954 - ETA: 48s - loss: 0.1213 - accuracy: 0.954 - ETA: 48s - loss: 0.1211 - accuracy: 0.954 - ETA: 47s - loss: 0.1209 - accuracy: 0.954 - ETA: 47s - loss: 0.1208 - accuracy: 0.954 - ETA: 47s - loss: 0.1206 - accuracy: 0.954 - ETA: 47s - loss: 0.1205 - accuracy: 0.954 - ETA: 47s - loss: 0.1203 - accuracy: 0.954 - ETA: 47s - loss: 0.1202 - accuracy: 0.954 - ETA: 47s - loss: 0.1201 - accuracy: 0.954 - ETA: 47s - loss: 0.1199 - accuracy: 0.954 - ETA: 47s - loss: 0.1198 - accuracy: 0.954 - ETA: 47s - loss: 0.1196 - accuracy: 0.955 - ETA: 47s - loss: 0.1195 - accuracy: 0.955 - ETA: 47s - loss: 0.1193 - accuracy: 0.955 - ETA: 47s - loss: 0.1192 - accuracy: 0.955 - ETA: 47s - loss: 0.1191 - accuracy: 0.955 - ETA: 47s - loss: 0.1189 - accuracy: 0.955 - ETA: 47s - loss: 0.1188 - accuracy: 0.955 - ETA: 46s - loss: 0.1187 - accuracy: 0.955 - ETA: 46s - loss: 0.1185 - accuracy: 0.955 - ETA: 46s - loss: 0.1184 - accuracy: 0.955 - ETA: 46s - loss: 0.1182 - accuracy: 0.955 - ETA: 46s - loss: 0.1181 - accuracy: 0.955 - ETA: 46s - loss: 0.1180 - accuracy: 0.955 - ETA: 46s - loss: 0.1179 - accuracy: 0.955 - ETA: 46s - loss: 0.1178 - accuracy: 0.955 - ETA: 46s - loss: 0.1177 - accuracy: 0.955 - ETA: 46s - loss: 0.1176 - accuracy: 0.955 - ETA: 46s - loss: 0.1175 - accuracy: 0.955 - ETA: 46s - loss: 0.1174 - accuracy: 0.955 - ETA: 46s - loss: 0.1172 - accuracy: 0.956 - ETA: 45s - loss: 0.1171 - accuracy: 0.956 - ETA: 45s - loss: 0.1170 - accuracy: 0.956 - ETA: 45s - loss: 0.1168 - accuracy: 0.956 - ETA: 45s - loss: 0.1167 - accuracy: 0.956 - ETA: 45s - loss: 0.1165 - accuracy: 0.956 - ETA: 45s - loss: 0.1163 - accuracy: 0.956 - ETA: 45s - loss: 0.1162 - accuracy: 0.956 - ETA: 45s - loss: 0.1160 - accuracy: 0.956 - ETA: 45s - loss: 0.1159 - accuracy: 0.956 - ETA: 45s - loss: 0.1158 - accuracy: 0.956 - ETA: 45s - loss: 0.1156 - accuracy: 0.956 - ETA: 45s - loss: 0.1155 - accuracy: 0.956 - ETA: 44s - loss: 0.1153 - accuracy: 0.956 - ETA: 44s - loss: 0.1152 - accuracy: 0.956 - ETA: 44s - loss: 0.1150 - accuracy: 0.956 - ETA: 44s - loss: 0.1149 - accuracy: 0.956 - ETA: 44s - loss: 0.1147 - accuracy: 0.957 - ETA: 44s - loss: 0.1147 - accuracy: 0.957 - ETA: 44s - loss: 0.1146 - accuracy: 0.957 - ETA: 44s - loss: 0.1144 - accuracy: 0.957 - ETA: 44s - loss: 0.1143 - accuracy: 0.957 - ETA: 44s - loss: 0.1141 - accuracy: 0.957 - ETA: 44s - loss: 0.1140 - accuracy: 0.957 - ETA: 44s - loss: 0.1139 - accuracy: 0.957 - ETA: 44s - loss: 0.1138 - accuracy: 0.957 - ETA: 43s - loss: 0.1136 - accuracy: 0.957 - ETA: 43s - loss: 0.1135 - accuracy: 0.957 - ETA: 43s - loss: 0.1134 - accuracy: 0.957 - ETA: 43s - loss: 0.1132 - accuracy: 0.957 - ETA: 43s - loss: 0.1131 - accuracy: 0.957 - ETA: 43s - loss: 0.1130 - accuracy: 0.957 - ETA: 43s - loss: 0.1128 - accuracy: 0.957 - ETA: 43s - loss: 0.1127 - accuracy: 0.957 - ETA: 43s - loss: 0.1126 - accuracy: 0.957 - ETA: 43s - loss: 0.1125 - accuracy: 0.957 - ETA: 43s - loss: 0.1123 - accuracy: 0.958 - ETA: 43s - loss: 0.1121 - accuracy: 0.958 - ETA: 42s - loss: 0.1120 - accuracy: 0.958 - ETA: 42s - loss: 0.1119 - accuracy: 0.958 - ETA: 42s - loss: 0.1118 - accuracy: 0.958 - ETA: 42s - loss: 0.1117 - accuracy: 0.958 - ETA: 42s - loss: 0.1115 - accuracy: 0.958 - ETA: 42s - loss: 0.1114 - accuracy: 0.958 - ETA: 42s - loss: 0.1113 - accuracy: 0.958 - ETA: 42s - loss: 0.1112 - accuracy: 0.958 - ETA: 42s - loss: 0.1110 - accuracy: 0.958 - ETA: 42s - loss: 0.1109 - accuracy: 0.958 - ETA: 42s - loss: 0.1108 - accuracy: 0.958 - ETA: 42s - loss: 0.1107 - accuracy: 0.958 - ETA: 42s - loss: 0.1106 - accuracy: 0.958 - ETA: 42s - loss: 0.1105 - accuracy: 0.958 - ETA: 42s - loss: 0.1104 - accuracy: 0.958 - ETA: 41s - loss: 0.1102 - accuracy: 0.958 - ETA: 41s - loss: 0.1101 - accuracy: 0.958 - ETA: 41s - loss: 0.1100 - accuracy: 0.958 - ETA: 41s - loss: 0.1098 - accuracy: 0.959 - ETA: 41s - loss: 0.1097 - accuracy: 0.959 - ETA: 41s - loss: 0.1097 - accuracy: 0.959 - ETA: 41s - loss: 0.1095 - accuracy: 0.959 - ETA: 41s - loss: 0.1094 - accuracy: 0.959 - ETA: 41s - loss: 0.1093 - accuracy: 0.959 - ETA: 41s - loss: 0.1092 - accuracy: 0.959 - ETA: 41s - loss: 0.1091 - accuracy: 0.959 - ETA: 41s - loss: 0.1090 - accuracy: 0.959 - ETA: 41s - loss: 0.1089 - accuracy: 0.959 - ETA: 40s - loss: 0.1088 - accuracy: 0.959 - ETA: 40s - loss: 0.1086 - accuracy: 0.959 - ETA: 40s - loss: 0.1084 - accuracy: 0.9595" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1020928/1806870 [===============>..............] - ETA: 40s - loss: 0.1083 - accuracy: 0.959 - ETA: 40s - loss: 0.1082 - accuracy: 0.959 - ETA: 40s - loss: 0.1080 - accuracy: 0.959 - ETA: 40s - loss: 0.1079 - accuracy: 0.959 - ETA: 40s - loss: 0.1077 - accuracy: 0.959 - ETA: 40s - loss: 0.1076 - accuracy: 0.959 - ETA: 40s - loss: 0.1075 - accuracy: 0.959 - ETA: 40s - loss: 0.1074 - accuracy: 0.960 - ETA: 40s - loss: 0.1073 - accuracy: 0.960 - ETA: 40s - loss: 0.1072 - accuracy: 0.960 - ETA: 39s - loss: 0.1071 - accuracy: 0.960 - ETA: 39s - loss: 0.1070 - accuracy: 0.960 - ETA: 39s - loss: 0.1069 - accuracy: 0.960 - ETA: 39s - loss: 0.1068 - accuracy: 0.960 - ETA: 39s - loss: 0.1067 - accuracy: 0.960 - ETA: 39s - loss: 0.1068 - accuracy: 0.960 - ETA: 39s - loss: 0.1067 - accuracy: 0.960 - ETA: 39s - loss: 0.1066 - accuracy: 0.960 - ETA: 39s - loss: 0.1066 - accuracy: 0.960 - ETA: 39s - loss: 0.1065 - accuracy: 0.960 - ETA: 39s - loss: 0.1064 - accuracy: 0.960 - ETA: 39s - loss: 0.1063 - accuracy: 0.960 - ETA: 39s - loss: 0.1062 - accuracy: 0.960 - ETA: 39s - loss: 0.1061 - accuracy: 0.960 - ETA: 38s - loss: 0.1059 - accuracy: 0.960 - ETA: 38s - loss: 0.1058 - accuracy: 0.960 - ETA: 38s - loss: 0.1057 - accuracy: 0.960 - ETA: 38s - loss: 0.1056 - accuracy: 0.960 - ETA: 38s - loss: 0.1055 - accuracy: 0.960 - ETA: 38s - loss: 0.1054 - accuracy: 0.960 - ETA: 38s - loss: 0.1053 - accuracy: 0.960 - ETA: 38s - loss: 0.1052 - accuracy: 0.960 - ETA: 38s - loss: 0.1050 - accuracy: 0.960 - ETA: 38s - loss: 0.1049 - accuracy: 0.961 - ETA: 38s - loss: 0.1048 - accuracy: 0.961 - ETA: 38s - loss: 0.1047 - accuracy: 0.961 - ETA: 38s - loss: 0.1046 - accuracy: 0.961 - ETA: 38s - loss: 0.1046 - accuracy: 0.961 - ETA: 38s - loss: 0.1046 - accuracy: 0.961 - ETA: 38s - loss: 0.1045 - accuracy: 0.961 - ETA: 37s - loss: 0.1044 - accuracy: 0.961 - ETA: 37s - loss: 0.1043 - accuracy: 0.961 - ETA: 37s - loss: 0.1042 - accuracy: 0.961 - ETA: 37s - loss: 0.1041 - accuracy: 0.961 - ETA: 37s - loss: 0.1039 - accuracy: 0.961 - ETA: 37s - loss: 0.1038 - accuracy: 0.961 - ETA: 37s - loss: 0.1037 - accuracy: 0.961 - ETA: 37s - loss: 0.1036 - accuracy: 0.961 - ETA: 37s - loss: 0.1035 - accuracy: 0.961 - ETA: 37s - loss: 0.1034 - accuracy: 0.961 - ETA: 37s - loss: 0.1033 - accuracy: 0.961 - ETA: 37s - loss: 0.1032 - accuracy: 0.961 - ETA: 37s - loss: 0.1031 - accuracy: 0.961 - ETA: 37s - loss: 0.1030 - accuracy: 0.961 - ETA: 36s - loss: 0.1028 - accuracy: 0.961 - ETA: 36s - loss: 0.1027 - accuracy: 0.961 - ETA: 36s - loss: 0.1026 - accuracy: 0.961 - ETA: 36s - loss: 0.1025 - accuracy: 0.961 - ETA: 36s - loss: 0.1024 - accuracy: 0.962 - ETA: 36s - loss: 0.1023 - accuracy: 0.962 - ETA: 36s - loss: 0.1022 - accuracy: 0.962 - ETA: 36s - loss: 0.1021 - accuracy: 0.962 - ETA: 36s - loss: 0.1020 - accuracy: 0.962 - ETA: 36s - loss: 0.1019 - accuracy: 0.962 - ETA: 36s - loss: 0.1019 - accuracy: 0.962 - ETA: 36s - loss: 0.1018 - accuracy: 0.962 - ETA: 36s - loss: 0.1017 - accuracy: 0.962 - ETA: 36s - loss: 0.1016 - accuracy: 0.962 - ETA: 36s - loss: 0.1015 - accuracy: 0.962 - ETA: 36s - loss: 0.1014 - accuracy: 0.962 - ETA: 36s - loss: 0.1014 - accuracy: 0.962 - ETA: 35s - loss: 0.1013 - accuracy: 0.962 - ETA: 35s - loss: 0.1012 - accuracy: 0.962 - ETA: 35s - loss: 0.1011 - accuracy: 0.962 - ETA: 35s - loss: 0.1010 - accuracy: 0.962 - ETA: 35s - loss: 0.1009 - accuracy: 0.962 - ETA: 35s - loss: 0.1008 - accuracy: 0.962 - ETA: 35s - loss: 0.1007 - accuracy: 0.962 - ETA: 35s - loss: 0.1006 - accuracy: 0.962 - ETA: 35s - loss: 0.1005 - accuracy: 0.962 - ETA: 35s - loss: 0.1004 - accuracy: 0.962 - ETA: 35s - loss: 0.1003 - accuracy: 0.962 - ETA: 35s - loss: 0.1003 - accuracy: 0.962 - ETA: 35s - loss: 0.1002 - accuracy: 0.962 - ETA: 35s - loss: 0.1001 - accuracy: 0.962 - ETA: 35s - loss: 0.1001 - accuracy: 0.962 - ETA: 35s - loss: 0.1000 - accuracy: 0.962 - ETA: 34s - loss: 0.0999 - accuracy: 0.963 - ETA: 34s - loss: 0.0998 - accuracy: 0.963 - ETA: 34s - loss: 0.0997 - accuracy: 0.963 - ETA: 34s - loss: 0.0996 - accuracy: 0.963 - ETA: 34s - loss: 0.0995 - accuracy: 0.963 - ETA: 34s - loss: 0.0994 - accuracy: 0.963 - ETA: 34s - loss: 0.0993 - accuracy: 0.963 - ETA: 34s - loss: 0.0992 - accuracy: 0.963 - ETA: 34s - loss: 0.0991 - accuracy: 0.963 - ETA: 34s - loss: 0.0990 - accuracy: 0.963 - ETA: 34s - loss: 0.0988 - accuracy: 0.963 - ETA: 34s - loss: 0.0987 - accuracy: 0.963 - ETA: 34s - loss: 0.0987 - accuracy: 0.963 - ETA: 33s - loss: 0.0986 - accuracy: 0.963 - ETA: 33s - loss: 0.0985 - accuracy: 0.963 - ETA: 33s - loss: 0.0984 - accuracy: 0.963 - ETA: 33s - loss: 0.0983 - accuracy: 0.963 - ETA: 33s - loss: 0.0982 - accuracy: 0.963 - ETA: 33s - loss: 0.0981 - accuracy: 0.963 - ETA: 33s - loss: 0.0980 - accuracy: 0.963 - ETA: 33s - loss: 0.0980 - accuracy: 0.963 - ETA: 33s - loss: 0.0979 - accuracy: 0.963 - ETA: 33s - loss: 0.0978 - accuracy: 0.963 - ETA: 33s - loss: 0.0977 - accuracy: 0.963 - ETA: 33s - loss: 0.0976 - accuracy: 0.963 - ETA: 33s - loss: 0.0975 - accuracy: 0.963 - ETA: 33s - loss: 0.0974 - accuracy: 0.964 - ETA: 33s - loss: 0.0973 - accuracy: 0.964 - ETA: 32s - loss: 0.0972 - accuracy: 0.964 - ETA: 32s - loss: 0.0971 - accuracy: 0.964 - ETA: 32s - loss: 0.0971 - accuracy: 0.964 - ETA: 32s - loss: 0.0970 - accuracy: 0.964 - ETA: 32s - loss: 0.0969 - accuracy: 0.964 - ETA: 32s - loss: 0.0968 - accuracy: 0.964 - ETA: 32s - loss: 0.0967 - accuracy: 0.964 - ETA: 32s - loss: 0.0967 - accuracy: 0.964 - ETA: 32s - loss: 0.0966 - accuracy: 0.964 - ETA: 32s - loss: 0.0965 - accuracy: 0.964 - ETA: 32s - loss: 0.0964 - accuracy: 0.964 - ETA: 32s - loss: 0.0964 - accuracy: 0.964 - ETA: 32s - loss: 0.0963 - accuracy: 0.964 - ETA: 32s - loss: 0.0963 - accuracy: 0.964 - ETA: 32s - loss: 0.0962 - accuracy: 0.964 - ETA: 32s - loss: 0.0961 - accuracy: 0.964 - ETA: 32s - loss: 0.0961 - accuracy: 0.964 - ETA: 32s - loss: 0.0961 - accuracy: 0.964 - ETA: 32s - loss: 0.0960 - accuracy: 0.964 - ETA: 31s - loss: 0.0959 - accuracy: 0.964 - ETA: 31s - loss: 0.0959 - accuracy: 0.964 - ETA: 31s - loss: 0.0958 - accuracy: 0.964 - ETA: 31s - loss: 0.0957 - accuracy: 0.964 - ETA: 31s - loss: 0.0957 - accuracy: 0.964 - ETA: 31s - loss: 0.0956 - accuracy: 0.964 - ETA: 31s - loss: 0.0955 - accuracy: 0.964 - ETA: 31s - loss: 0.0954 - accuracy: 0.964 - ETA: 31s - loss: 0.0953 - accuracy: 0.964 - ETA: 31s - loss: 0.0952 - accuracy: 0.964 - ETA: 31s - loss: 0.0951 - accuracy: 0.964 - ETA: 31s - loss: 0.0951 - accuracy: 0.964 - ETA: 31s - loss: 0.0950 - accuracy: 0.964 - ETA: 31s - loss: 0.0950 - accuracy: 0.964 - ETA: 31s - loss: 0.0949 - accuracy: 0.965 - ETA: 31s - loss: 0.0948 - accuracy: 0.965 - ETA: 31s - loss: 0.0947 - accuracy: 0.965 - ETA: 31s - loss: 0.0946 - accuracy: 0.965 - ETA: 31s - loss: 0.0946 - accuracy: 0.965 - ETA: 31s - loss: 0.0945 - accuracy: 0.965 - ETA: 31s - loss: 0.0944 - accuracy: 0.965 - ETA: 30s - loss: 0.0943 - accuracy: 0.965 - ETA: 30s - loss: 0.0942 - accuracy: 0.965 - ETA: 30s - loss: 0.0942 - accuracy: 0.965 - ETA: 30s - loss: 0.0941 - accuracy: 0.965 - ETA: 30s - loss: 0.0940 - accuracy: 0.965 - ETA: 30s - loss: 0.0940 - accuracy: 0.965 - ETA: 30s - loss: 0.0939 - accuracy: 0.965 - ETA: 30s - loss: 0.0938 - accuracy: 0.965 - ETA: 30s - loss: 0.0938 - accuracy: 0.965 - ETA: 30s - loss: 0.0937 - accuracy: 0.965 - ETA: 30s - loss: 0.0936 - accuracy: 0.965 - ETA: 30s - loss: 0.0935 - accuracy: 0.965 - ETA: 30s - loss: 0.0935 - accuracy: 0.965 - ETA: 30s - loss: 0.0934 - accuracy: 0.965 - ETA: 30s - loss: 0.0934 - accuracy: 0.965 - ETA: 30s - loss: 0.0933 - accuracy: 0.965 - ETA: 30s - loss: 0.0932 - accuracy: 0.965 - ETA: 30s - loss: 0.0931 - accuracy: 0.965 - ETA: 30s - loss: 0.0930 - accuracy: 0.965 - ETA: 29s - loss: 0.0930 - accuracy: 0.965 - ETA: 29s - loss: 0.0930 - accuracy: 0.965 - ETA: 29s - loss: 0.0929 - accuracy: 0.965 - ETA: 29s - loss: 0.0928 - accuracy: 0.965 - ETA: 29s - loss: 0.0927 - accuracy: 0.965 - ETA: 29s - loss: 0.0926 - accuracy: 0.965 - ETA: 29s - loss: 0.0926 - accuracy: 0.965 - ETA: 29s - loss: 0.0925 - accuracy: 0.965 - ETA: 29s - loss: 0.0924 - accuracy: 0.965 - ETA: 29s - loss: 0.0924 - accuracy: 0.966 - ETA: 29s - loss: 0.0923 - accuracy: 0.966 - ETA: 29s - loss: 0.0922 - accuracy: 0.9660" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1303488/1806870 [====================>.........] - ETA: 29s - loss: 0.0921 - accuracy: 0.966 - ETA: 29s - loss: 0.0921 - accuracy: 0.966 - ETA: 29s - loss: 0.0920 - accuracy: 0.966 - ETA: 29s - loss: 0.0919 - accuracy: 0.966 - ETA: 29s - loss: 0.0919 - accuracy: 0.966 - ETA: 29s - loss: 0.0918 - accuracy: 0.966 - ETA: 28s - loss: 0.0918 - accuracy: 0.966 - ETA: 28s - loss: 0.0917 - accuracy: 0.966 - ETA: 28s - loss: 0.0917 - accuracy: 0.966 - ETA: 28s - loss: 0.0916 - accuracy: 0.966 - ETA: 28s - loss: 0.0915 - accuracy: 0.966 - ETA: 28s - loss: 0.0915 - accuracy: 0.966 - ETA: 28s - loss: 0.0914 - accuracy: 0.966 - ETA: 28s - loss: 0.0913 - accuracy: 0.966 - ETA: 28s - loss: 0.0912 - accuracy: 0.966 - ETA: 28s - loss: 0.0912 - accuracy: 0.966 - ETA: 28s - loss: 0.0911 - accuracy: 0.966 - ETA: 28s - loss: 0.0910 - accuracy: 0.966 - ETA: 28s - loss: 0.0909 - accuracy: 0.966 - ETA: 28s - loss: 0.0909 - accuracy: 0.966 - ETA: 28s - loss: 0.0908 - accuracy: 0.966 - ETA: 28s - loss: 0.0908 - accuracy: 0.966 - ETA: 28s - loss: 0.0907 - accuracy: 0.966 - ETA: 27s - loss: 0.0906 - accuracy: 0.966 - ETA: 27s - loss: 0.0906 - accuracy: 0.966 - ETA: 27s - loss: 0.0905 - accuracy: 0.966 - ETA: 27s - loss: 0.0904 - accuracy: 0.966 - ETA: 27s - loss: 0.0904 - accuracy: 0.966 - ETA: 27s - loss: 0.0903 - accuracy: 0.966 - ETA: 27s - loss: 0.0903 - accuracy: 0.966 - ETA: 27s - loss: 0.0902 - accuracy: 0.966 - ETA: 27s - loss: 0.0901 - accuracy: 0.966 - ETA: 27s - loss: 0.0900 - accuracy: 0.966 - ETA: 27s - loss: 0.0899 - accuracy: 0.966 - ETA: 27s - loss: 0.0899 - accuracy: 0.966 - ETA: 27s - loss: 0.0898 - accuracy: 0.967 - ETA: 27s - loss: 0.0897 - accuracy: 0.967 - ETA: 27s - loss: 0.0897 - accuracy: 0.967 - ETA: 27s - loss: 0.0897 - accuracy: 0.967 - ETA: 27s - loss: 0.0896 - accuracy: 0.967 - ETA: 27s - loss: 0.0895 - accuracy: 0.967 - ETA: 27s - loss: 0.0895 - accuracy: 0.967 - ETA: 27s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0894 - accuracy: 0.967 - ETA: 26s - loss: 0.0893 - accuracy: 0.967 - ETA: 26s - loss: 0.0892 - accuracy: 0.967 - ETA: 26s - loss: 0.0892 - accuracy: 0.967 - ETA: 26s - loss: 0.0891 - accuracy: 0.967 - ETA: 26s - loss: 0.0890 - accuracy: 0.967 - ETA: 26s - loss: 0.0890 - accuracy: 0.967 - ETA: 26s - loss: 0.0889 - accuracy: 0.967 - ETA: 26s - loss: 0.0888 - accuracy: 0.967 - ETA: 26s - loss: 0.0888 - accuracy: 0.967 - ETA: 26s - loss: 0.0887 - accuracy: 0.967 - ETA: 26s - loss: 0.0886 - accuracy: 0.967 - ETA: 26s - loss: 0.0885 - accuracy: 0.967 - ETA: 26s - loss: 0.0884 - accuracy: 0.967 - ETA: 25s - loss: 0.0883 - accuracy: 0.967 - ETA: 25s - loss: 0.0882 - accuracy: 0.967 - ETA: 25s - loss: 0.0882 - accuracy: 0.967 - ETA: 25s - loss: 0.0881 - accuracy: 0.967 - ETA: 25s - loss: 0.0880 - accuracy: 0.967 - ETA: 25s - loss: 0.0879 - accuracy: 0.967 - ETA: 25s - loss: 0.0879 - accuracy: 0.967 - ETA: 25s - loss: 0.0878 - accuracy: 0.967 - ETA: 25s - loss: 0.0878 - accuracy: 0.967 - ETA: 25s - loss: 0.0877 - accuracy: 0.967 - ETA: 25s - loss: 0.0877 - accuracy: 0.967 - ETA: 25s - loss: 0.0876 - accuracy: 0.967 - ETA: 25s - loss: 0.0876 - accuracy: 0.967 - ETA: 25s - loss: 0.0875 - accuracy: 0.967 - ETA: 25s - loss: 0.0874 - accuracy: 0.967 - ETA: 25s - loss: 0.0874 - accuracy: 0.967 - ETA: 25s - loss: 0.0873 - accuracy: 0.968 - ETA: 24s - loss: 0.0872 - accuracy: 0.968 - ETA: 24s - loss: 0.0871 - accuracy: 0.968 - ETA: 24s - loss: 0.0871 - accuracy: 0.968 - ETA: 24s - loss: 0.0870 - accuracy: 0.968 - ETA: 24s - loss: 0.0870 - accuracy: 0.968 - ETA: 24s - loss: 0.0869 - accuracy: 0.968 - ETA: 24s - loss: 0.0869 - accuracy: 0.968 - ETA: 24s - loss: 0.0868 - accuracy: 0.968 - ETA: 24s - loss: 0.0867 - accuracy: 0.968 - ETA: 24s - loss: 0.0866 - accuracy: 0.968 - ETA: 24s - loss: 0.0865 - accuracy: 0.968 - ETA: 24s - loss: 0.0865 - accuracy: 0.968 - ETA: 24s - loss: 0.0864 - accuracy: 0.968 - ETA: 24s - loss: 0.0864 - accuracy: 0.968 - ETA: 24s - loss: 0.0863 - accuracy: 0.968 - ETA: 24s - loss: 0.0862 - accuracy: 0.968 - ETA: 23s - loss: 0.0862 - accuracy: 0.968 - ETA: 23s - loss: 0.0861 - accuracy: 0.968 - ETA: 23s - loss: 0.0860 - accuracy: 0.968 - ETA: 23s - loss: 0.0859 - accuracy: 0.968 - ETA: 23s - loss: 0.0858 - accuracy: 0.968 - ETA: 23s - loss: 0.0858 - accuracy: 0.968 - ETA: 23s - loss: 0.0857 - accuracy: 0.968 - ETA: 23s - loss: 0.0857 - accuracy: 0.968 - ETA: 23s - loss: 0.0857 - accuracy: 0.968 - ETA: 23s - loss: 0.0856 - accuracy: 0.968 - ETA: 23s - loss: 0.0855 - accuracy: 0.968 - ETA: 23s - loss: 0.0855 - accuracy: 0.968 - ETA: 23s - loss: 0.0855 - accuracy: 0.968 - ETA: 23s - loss: 0.0854 - accuracy: 0.968 - ETA: 23s - loss: 0.0853 - accuracy: 0.968 - ETA: 23s - loss: 0.0853 - accuracy: 0.968 - ETA: 22s - loss: 0.0852 - accuracy: 0.968 - ETA: 22s - loss: 0.0852 - accuracy: 0.968 - ETA: 22s - loss: 0.0851 - accuracy: 0.968 - ETA: 22s - loss: 0.0850 - accuracy: 0.968 - ETA: 22s - loss: 0.0850 - accuracy: 0.968 - ETA: 22s - loss: 0.0849 - accuracy: 0.968 - ETA: 22s - loss: 0.0848 - accuracy: 0.968 - ETA: 22s - loss: 0.0848 - accuracy: 0.968 - ETA: 22s - loss: 0.0848 - accuracy: 0.969 - ETA: 22s - loss: 0.0847 - accuracy: 0.969 - ETA: 22s - loss: 0.0846 - accuracy: 0.969 - ETA: 22s - loss: 0.0845 - accuracy: 0.969 - ETA: 22s - loss: 0.0845 - accuracy: 0.969 - ETA: 22s - loss: 0.0844 - accuracy: 0.969 - ETA: 22s - loss: 0.0844 - accuracy: 0.969 - ETA: 22s - loss: 0.0843 - accuracy: 0.969 - ETA: 22s - loss: 0.0843 - accuracy: 0.969 - ETA: 21s - loss: 0.0842 - accuracy: 0.969 - ETA: 21s - loss: 0.0842 - accuracy: 0.969 - ETA: 21s - loss: 0.0841 - accuracy: 0.969 - ETA: 21s - loss: 0.0841 - accuracy: 0.969 - ETA: 21s - loss: 0.0840 - accuracy: 0.969 - ETA: 21s - loss: 0.0839 - accuracy: 0.969 - ETA: 21s - loss: 0.0839 - accuracy: 0.969 - ETA: 21s - loss: 0.0838 - accuracy: 0.969 - ETA: 21s - loss: 0.0838 - accuracy: 0.969 - ETA: 21s - loss: 0.0837 - accuracy: 0.969 - ETA: 21s - loss: 0.0837 - accuracy: 0.969 - ETA: 21s - loss: 0.0836 - accuracy: 0.969 - ETA: 21s - loss: 0.0835 - accuracy: 0.969 - ETA: 21s - loss: 0.0834 - accuracy: 0.969 - ETA: 21s - loss: 0.0834 - accuracy: 0.969 - ETA: 21s - loss: 0.0833 - accuracy: 0.969 - ETA: 21s - loss: 0.0832 - accuracy: 0.969 - ETA: 20s - loss: 0.0832 - accuracy: 0.969 - ETA: 20s - loss: 0.0831 - accuracy: 0.969 - ETA: 20s - loss: 0.0831 - accuracy: 0.969 - ETA: 20s - loss: 0.0831 - accuracy: 0.969 - ETA: 20s - loss: 0.0830 - accuracy: 0.969 - ETA: 20s - loss: 0.0829 - accuracy: 0.969 - ETA: 20s - loss: 0.0829 - accuracy: 0.969 - ETA: 20s - loss: 0.0828 - accuracy: 0.969 - ETA: 20s - loss: 0.0827 - accuracy: 0.969 - ETA: 20s - loss: 0.0827 - accuracy: 0.969 - ETA: 20s - loss: 0.0826 - accuracy: 0.969 - ETA: 20s - loss: 0.0826 - accuracy: 0.969 - ETA: 20s - loss: 0.0825 - accuracy: 0.969 - ETA: 20s - loss: 0.0825 - accuracy: 0.969 - ETA: 20s - loss: 0.0824 - accuracy: 0.969 - ETA: 20s - loss: 0.0824 - accuracy: 0.969 - ETA: 19s - loss: 0.0823 - accuracy: 0.969 - ETA: 19s - loss: 0.0823 - accuracy: 0.969 - ETA: 19s - loss: 0.0822 - accuracy: 0.970 - ETA: 19s - loss: 0.0821 - accuracy: 0.970 - ETA: 19s - loss: 0.0820 - accuracy: 0.970 - ETA: 19s - loss: 0.0820 - accuracy: 0.970 - ETA: 19s - loss: 0.0819 - accuracy: 0.970 - ETA: 19s - loss: 0.0819 - accuracy: 0.970 - ETA: 19s - loss: 0.0818 - accuracy: 0.970 - ETA: 19s - loss: 0.0818 - accuracy: 0.970 - ETA: 19s - loss: 0.0817 - accuracy: 0.970 - ETA: 19s - loss: 0.0816 - accuracy: 0.970 - ETA: 19s - loss: 0.0816 - accuracy: 0.970 - ETA: 19s - loss: 0.0815 - accuracy: 0.970 - ETA: 19s - loss: 0.0814 - accuracy: 0.970 - ETA: 19s - loss: 0.0814 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0813 - accuracy: 0.970 - ETA: 18s - loss: 0.0812 - accuracy: 0.970 - ETA: 18s - loss: 0.0812 - accuracy: 0.970 - ETA: 18s - loss: 0.0811 - accuracy: 0.970 - ETA: 18s - loss: 0.0810 - accuracy: 0.970 - ETA: 18s - loss: 0.0810 - accuracy: 0.970 - ETA: 18s - loss: 0.0810 - accuracy: 0.970 - ETA: 18s - loss: 0.0809 - accuracy: 0.970 - ETA: 18s - loss: 0.0808 - accuracy: 0.9705" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1592480/1806870 [=========================>....] - ETA: 18s - loss: 0.0808 - accuracy: 0.970 - ETA: 18s - loss: 0.0808 - accuracy: 0.970 - ETA: 18s - loss: 0.0807 - accuracy: 0.970 - ETA: 18s - loss: 0.0807 - accuracy: 0.970 - ETA: 18s - loss: 0.0806 - accuracy: 0.970 - ETA: 18s - loss: 0.0806 - accuracy: 0.970 - ETA: 17s - loss: 0.0805 - accuracy: 0.970 - ETA: 17s - loss: 0.0804 - accuracy: 0.970 - ETA: 17s - loss: 0.0804 - accuracy: 0.970 - ETA: 17s - loss: 0.0803 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0802 - accuracy: 0.970 - ETA: 17s - loss: 0.0801 - accuracy: 0.970 - ETA: 17s - loss: 0.0801 - accuracy: 0.970 - ETA: 17s - loss: 0.0800 - accuracy: 0.970 - ETA: 17s - loss: 0.0799 - accuracy: 0.970 - ETA: 17s - loss: 0.0799 - accuracy: 0.970 - ETA: 17s - loss: 0.0798 - accuracy: 0.970 - ETA: 17s - loss: 0.0798 - accuracy: 0.970 - ETA: 17s - loss: 0.0797 - accuracy: 0.970 - ETA: 17s - loss: 0.0797 - accuracy: 0.970 - ETA: 17s - loss: 0.0796 - accuracy: 0.970 - ETA: 16s - loss: 0.0796 - accuracy: 0.970 - ETA: 16s - loss: 0.0796 - accuracy: 0.971 - ETA: 16s - loss: 0.0795 - accuracy: 0.971 - ETA: 16s - loss: 0.0795 - accuracy: 0.971 - ETA: 16s - loss: 0.0794 - accuracy: 0.971 - ETA: 16s - loss: 0.0793 - accuracy: 0.971 - ETA: 16s - loss: 0.0793 - accuracy: 0.971 - ETA: 16s - loss: 0.0792 - accuracy: 0.971 - ETA: 16s - loss: 0.0792 - accuracy: 0.971 - ETA: 16s - loss: 0.0791 - accuracy: 0.971 - ETA: 16s - loss: 0.0791 - accuracy: 0.971 - ETA: 16s - loss: 0.0790 - accuracy: 0.971 - ETA: 16s - loss: 0.0790 - accuracy: 0.971 - ETA: 16s - loss: 0.0789 - accuracy: 0.971 - ETA: 16s - loss: 0.0789 - accuracy: 0.971 - ETA: 16s - loss: 0.0788 - accuracy: 0.971 - ETA: 16s - loss: 0.0788 - accuracy: 0.971 - ETA: 15s - loss: 0.0787 - accuracy: 0.971 - ETA: 15s - loss: 0.0786 - accuracy: 0.971 - ETA: 15s - loss: 0.0786 - accuracy: 0.971 - ETA: 15s - loss: 0.0785 - accuracy: 0.971 - ETA: 15s - loss: 0.0785 - accuracy: 0.971 - ETA: 15s - loss: 0.0784 - accuracy: 0.971 - ETA: 15s - loss: 0.0784 - accuracy: 0.971 - ETA: 15s - loss: 0.0783 - accuracy: 0.971 - ETA: 15s - loss: 0.0783 - accuracy: 0.971 - ETA: 15s - loss: 0.0782 - accuracy: 0.971 - ETA: 15s - loss: 0.0782 - accuracy: 0.971 - ETA: 15s - loss: 0.0781 - accuracy: 0.971 - ETA: 15s - loss: 0.0781 - accuracy: 0.971 - ETA: 15s - loss: 0.0780 - accuracy: 0.971 - ETA: 15s - loss: 0.0780 - accuracy: 0.971 - ETA: 15s - loss: 0.0780 - accuracy: 0.971 - ETA: 14s - loss: 0.0779 - accuracy: 0.971 - ETA: 14s - loss: 0.0779 - accuracy: 0.971 - ETA: 14s - loss: 0.0779 - accuracy: 0.971 - ETA: 14s - loss: 0.0778 - accuracy: 0.971 - ETA: 14s - loss: 0.0777 - accuracy: 0.971 - ETA: 14s - loss: 0.0777 - accuracy: 0.971 - ETA: 14s - loss: 0.0777 - accuracy: 0.971 - ETA: 14s - loss: 0.0776 - accuracy: 0.971 - ETA: 14s - loss: 0.0776 - accuracy: 0.971 - ETA: 14s - loss: 0.0775 - accuracy: 0.971 - ETA: 14s - loss: 0.0774 - accuracy: 0.971 - ETA: 14s - loss: 0.0774 - accuracy: 0.971 - ETA: 14s - loss: 0.0773 - accuracy: 0.971 - ETA: 14s - loss: 0.0773 - accuracy: 0.971 - ETA: 14s - loss: 0.0772 - accuracy: 0.971 - ETA: 14s - loss: 0.0772 - accuracy: 0.971 - ETA: 14s - loss: 0.0771 - accuracy: 0.971 - ETA: 13s - loss: 0.0770 - accuracy: 0.971 - ETA: 13s - loss: 0.0770 - accuracy: 0.971 - ETA: 13s - loss: 0.0770 - accuracy: 0.972 - ETA: 13s - loss: 0.0769 - accuracy: 0.972 - ETA: 13s - loss: 0.0769 - accuracy: 0.972 - ETA: 13s - loss: 0.0768 - accuracy: 0.972 - ETA: 13s - loss: 0.0768 - accuracy: 0.972 - ETA: 13s - loss: 0.0767 - accuracy: 0.972 - ETA: 13s - loss: 0.0767 - accuracy: 0.972 - ETA: 13s - loss: 0.0766 - accuracy: 0.972 - ETA: 13s - loss: 0.0766 - accuracy: 0.972 - ETA: 13s - loss: 0.0765 - accuracy: 0.972 - ETA: 13s - loss: 0.0765 - accuracy: 0.972 - ETA: 13s - loss: 0.0765 - accuracy: 0.972 - ETA: 13s - loss: 0.0764 - accuracy: 0.972 - ETA: 13s - loss: 0.0764 - accuracy: 0.972 - ETA: 13s - loss: 0.0764 - accuracy: 0.972 - ETA: 13s - loss: 0.0763 - accuracy: 0.972 - ETA: 12s - loss: 0.0763 - accuracy: 0.972 - ETA: 12s - loss: 0.0762 - accuracy: 0.972 - ETA: 12s - loss: 0.0762 - accuracy: 0.972 - ETA: 12s - loss: 0.0761 - accuracy: 0.972 - ETA: 12s - loss: 0.0761 - accuracy: 0.972 - ETA: 12s - loss: 0.0760 - accuracy: 0.972 - ETA: 12s - loss: 0.0760 - accuracy: 0.972 - ETA: 12s - loss: 0.0760 - accuracy: 0.972 - ETA: 12s - loss: 0.0759 - accuracy: 0.972 - ETA: 12s - loss: 0.0759 - accuracy: 0.972 - ETA: 12s - loss: 0.0759 - accuracy: 0.972 - ETA: 12s - loss: 0.0758 - accuracy: 0.972 - ETA: 12s - loss: 0.0758 - accuracy: 0.972 - ETA: 12s - loss: 0.0757 - accuracy: 0.972 - ETA: 12s - loss: 0.0757 - accuracy: 0.972 - ETA: 12s - loss: 0.0756 - accuracy: 0.972 - ETA: 12s - loss: 0.0757 - accuracy: 0.972 - ETA: 12s - loss: 0.0756 - accuracy: 0.972 - ETA: 11s - loss: 0.0756 - accuracy: 0.972 - ETA: 11s - loss: 0.0755 - accuracy: 0.972 - ETA: 11s - loss: 0.0755 - accuracy: 0.972 - ETA: 11s - loss: 0.0754 - accuracy: 0.972 - ETA: 11s - loss: 0.0754 - accuracy: 0.972 - ETA: 11s - loss: 0.0753 - accuracy: 0.972 - ETA: 11s - loss: 0.0753 - accuracy: 0.972 - ETA: 11s - loss: 0.0752 - accuracy: 0.972 - ETA: 11s - loss: 0.0752 - accuracy: 0.972 - ETA: 11s - loss: 0.0752 - accuracy: 0.972 - ETA: 11s - loss: 0.0751 - accuracy: 0.972 - ETA: 11s - loss: 0.0751 - accuracy: 0.972 - ETA: 11s - loss: 0.0750 - accuracy: 0.972 - ETA: 11s - loss: 0.0750 - accuracy: 0.972 - ETA: 11s - loss: 0.0750 - accuracy: 0.972 - ETA: 11s - loss: 0.0749 - accuracy: 0.972 - ETA: 11s - loss: 0.0748 - accuracy: 0.972 - ETA: 11s - loss: 0.0748 - accuracy: 0.972 - ETA: 10s - loss: 0.0747 - accuracy: 0.972 - ETA: 10s - loss: 0.0747 - accuracy: 0.972 - ETA: 10s - loss: 0.0746 - accuracy: 0.972 - ETA: 10s - loss: 0.0746 - accuracy: 0.972 - ETA: 10s - loss: 0.0745 - accuracy: 0.972 - ETA: 10s - loss: 0.0745 - accuracy: 0.972 - ETA: 10s - loss: 0.0745 - accuracy: 0.972 - ETA: 10s - loss: 0.0744 - accuracy: 0.972 - ETA: 10s - loss: 0.0744 - accuracy: 0.972 - ETA: 10s - loss: 0.0744 - accuracy: 0.973 - ETA: 10s - loss: 0.0743 - accuracy: 0.973 - ETA: 10s - loss: 0.0743 - accuracy: 0.973 - ETA: 10s - loss: 0.0742 - accuracy: 0.973 - ETA: 10s - loss: 0.0742 - accuracy: 0.973 - ETA: 10s - loss: 0.0742 - accuracy: 0.973 - ETA: 10s - loss: 0.0741 - accuracy: 0.973 - ETA: 10s - loss: 0.0741 - accuracy: 0.973 - ETA: 10s - loss: 0.0741 - accuracy: 0.973 - ETA: 9s - loss: 0.0740 - accuracy: 0.973 - ETA: 9s - loss: 0.0740 - accuracy: 0.97 - ETA: 9s - loss: 0.0740 - accuracy: 0.97 - ETA: 9s - loss: 0.0739 - accuracy: 0.97 - ETA: 9s - loss: 0.0738 - accuracy: 0.97 - ETA: 9s - loss: 0.0738 - accuracy: 0.97 - ETA: 9s - loss: 0.0737 - accuracy: 0.97 - ETA: 9s - loss: 0.0737 - accuracy: 0.97 - ETA: 9s - loss: 0.0736 - accuracy: 0.97 - ETA: 9s - loss: 0.0736 - accuracy: 0.97 - ETA: 9s - loss: 0.0736 - accuracy: 0.97 - ETA: 9s - loss: 0.0735 - accuracy: 0.97 - ETA: 9s - loss: 0.0735 - accuracy: 0.97 - ETA: 9s - loss: 0.0734 - accuracy: 0.97 - ETA: 9s - loss: 0.0734 - accuracy: 0.97 - ETA: 9s - loss: 0.0734 - accuracy: 0.97 - ETA: 8s - loss: 0.0733 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0732 - accuracy: 0.97 - ETA: 8s - loss: 0.0731 - accuracy: 0.97 - ETA: 8s - loss: 0.0731 - accuracy: 0.97 - ETA: 8s - loss: 0.0730 - accuracy: 0.97 - ETA: 8s - loss: 0.0730 - accuracy: 0.97 - ETA: 8s - loss: 0.0729 - accuracy: 0.97 - ETA: 8s - loss: 0.0729 - accuracy: 0.97 - ETA: 8s - loss: 0.0728 - accuracy: 0.97 - ETA: 8s - loss: 0.0728 - accuracy: 0.97 - ETA: 8s - loss: 0.0728 - accuracy: 0.97 - ETA: 8s - loss: 0.0727 - accuracy: 0.97 - ETA: 8s - loss: 0.0727 - accuracy: 0.97 - ETA: 8s - loss: 0.0726 - accuracy: 0.97 - ETA: 8s - loss: 0.0726 - accuracy: 0.97 - ETA: 7s - loss: 0.0725 - accuracy: 0.97 - ETA: 7s - loss: 0.0725 - accuracy: 0.97 - ETA: 7s - loss: 0.0724 - accuracy: 0.97 - ETA: 7s - loss: 0.0724 - accuracy: 0.97 - ETA: 7s - loss: 0.0724 - accuracy: 0.97 - ETA: 7s - loss: 0.0723 - accuracy: 0.97 - ETA: 7s - loss: 0.0723 - accuracy: 0.9738" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1806870/1806870 [==============================] - ETA: 7s - loss: 0.0723 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0722 - accuracy: 0.97 - ETA: 7s - loss: 0.0721 - accuracy: 0.97 - ETA: 7s - loss: 0.0721 - accuracy: 0.97 - ETA: 7s - loss: 0.0720 - accuracy: 0.97 - ETA: 7s - loss: 0.0720 - accuracy: 0.97 - ETA: 7s - loss: 0.0719 - accuracy: 0.97 - ETA: 7s - loss: 0.0719 - accuracy: 0.97 - ETA: 7s - loss: 0.0718 - accuracy: 0.97 - ETA: 6s - loss: 0.0718 - accuracy: 0.97 - ETA: 6s - loss: 0.0717 - accuracy: 0.97 - ETA: 6s - loss: 0.0717 - accuracy: 0.97 - ETA: 6s - loss: 0.0717 - accuracy: 0.97 - ETA: 6s - loss: 0.0716 - accuracy: 0.97 - ETA: 6s - loss: 0.0716 - accuracy: 0.97 - ETA: 6s - loss: 0.0715 - accuracy: 0.97 - ETA: 6s - loss: 0.0715 - accuracy: 0.97 - ETA: 6s - loss: 0.0715 - accuracy: 0.97 - ETA: 6s - loss: 0.0714 - accuracy: 0.97 - ETA: 6s - loss: 0.0714 - accuracy: 0.97 - ETA: 6s - loss: 0.0713 - accuracy: 0.97 - ETA: 6s - loss: 0.0713 - accuracy: 0.97 - ETA: 6s - loss: 0.0713 - accuracy: 0.97 - ETA: 6s - loss: 0.0712 - accuracy: 0.97 - ETA: 6s - loss: 0.0712 - accuracy: 0.97 - ETA: 6s - loss: 0.0712 - accuracy: 0.97 - ETA: 5s - loss: 0.0711 - accuracy: 0.97 - ETA: 5s - loss: 0.0711 - accuracy: 0.97 - ETA: 5s - loss: 0.0711 - accuracy: 0.97 - ETA: 5s - loss: 0.0710 - accuracy: 0.97 - ETA: 5s - loss: 0.0710 - accuracy: 0.97 - ETA: 5s - loss: 0.0709 - accuracy: 0.97 - ETA: 5s - loss: 0.0709 - accuracy: 0.97 - ETA: 5s - loss: 0.0709 - accuracy: 0.97 - ETA: 5s - loss: 0.0708 - accuracy: 0.97 - ETA: 5s - loss: 0.0708 - accuracy: 0.97 - ETA: 5s - loss: 0.0707 - accuracy: 0.97 - ETA: 5s - loss: 0.0707 - accuracy: 0.97 - ETA: 5s - loss: 0.0707 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0706 - accuracy: 0.97 - ETA: 5s - loss: 0.0705 - accuracy: 0.97 - ETA: 4s - loss: 0.0705 - accuracy: 0.97 - ETA: 4s - loss: 0.0704 - accuracy: 0.97 - ETA: 4s - loss: 0.0704 - accuracy: 0.97 - ETA: 4s - loss: 0.0704 - accuracy: 0.97 - ETA: 4s - loss: 0.0703 - accuracy: 0.97 - ETA: 4s - loss: 0.0703 - accuracy: 0.97 - ETA: 4s - loss: 0.0702 - accuracy: 0.97 - ETA: 4s - loss: 0.0702 - accuracy: 0.97 - ETA: 4s - loss: 0.0702 - accuracy: 0.97 - ETA: 4s - loss: 0.0701 - accuracy: 0.97 - ETA: 4s - loss: 0.0701 - accuracy: 0.97 - ETA: 4s - loss: 0.0700 - accuracy: 0.97 - ETA: 4s - loss: 0.0700 - accuracy: 0.97 - ETA: 4s - loss: 0.0700 - accuracy: 0.97 - ETA: 4s - loss: 0.0699 - accuracy: 0.97 - ETA: 4s - loss: 0.0699 - accuracy: 0.97 - ETA: 4s - loss: 0.0699 - accuracy: 0.97 - ETA: 4s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0698 - accuracy: 0.97 - ETA: 3s - loss: 0.0697 - accuracy: 0.97 - ETA: 3s - loss: 0.0697 - accuracy: 0.97 - ETA: 3s - loss: 0.0696 - accuracy: 0.97 - ETA: 3s - loss: 0.0696 - accuracy: 0.97 - ETA: 3s - loss: 0.0696 - accuracy: 0.97 - ETA: 3s - loss: 0.0695 - accuracy: 0.97 - ETA: 3s - loss: 0.0695 - accuracy: 0.97 - ETA: 3s - loss: 0.0694 - accuracy: 0.97 - ETA: 3s - loss: 0.0694 - accuracy: 0.97 - ETA: 3s - loss: 0.0694 - accuracy: 0.97 - ETA: 3s - loss: 0.0693 - accuracy: 0.97 - ETA: 3s - loss: 0.0693 - accuracy: 0.97 - ETA: 3s - loss: 0.0693 - accuracy: 0.97 - ETA: 3s - loss: 0.0692 - accuracy: 0.97 - ETA: 3s - loss: 0.0692 - accuracy: 0.97 - ETA: 2s - loss: 0.0692 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0691 - accuracy: 0.97 - ETA: 2s - loss: 0.0690 - accuracy: 0.97 - ETA: 2s - loss: 0.0690 - accuracy: 0.97 - ETA: 2s - loss: 0.0689 - accuracy: 0.97 - ETA: 2s - loss: 0.0689 - accuracy: 0.97 - ETA: 2s - loss: 0.0688 - accuracy: 0.97 - ETA: 2s - loss: 0.0688 - accuracy: 0.97 - ETA: 2s - loss: 0.0688 - accuracy: 0.97 - ETA: 2s - loss: 0.0687 - accuracy: 0.97 - ETA: 2s - loss: 0.0687 - accuracy: 0.97 - ETA: 2s - loss: 0.0687 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 2s - loss: 0.0686 - accuracy: 0.97 - ETA: 1s - loss: 0.0686 - accuracy: 0.97 - ETA: 1s - loss: 0.0685 - accuracy: 0.97 - ETA: 1s - loss: 0.0685 - accuracy: 0.97 - ETA: 1s - loss: 0.0684 - accuracy: 0.97 - ETA: 1s - loss: 0.0684 - accuracy: 0.97 - ETA: 1s - loss: 0.0684 - accuracy: 0.97 - ETA: 1s - loss: 0.0683 - accuracy: 0.97 - ETA: 1s - loss: 0.0683 - accuracy: 0.97 - ETA: 1s - loss: 0.0683 - accuracy: 0.97 - ETA: 1s - loss: 0.0682 - accuracy: 0.97 - ETA: 1s - loss: 0.0682 - accuracy: 0.97 - ETA: 1s - loss: 0.0681 - accuracy: 0.97 - ETA: 1s - loss: 0.0681 - accuracy: 0.97 - ETA: 1s - loss: 0.0681 - accuracy: 0.97 - ETA: 1s - loss: 0.0680 - accuracy: 0.97 - ETA: 1s - loss: 0.0680 - accuracy: 0.97 - ETA: 1s - loss: 0.0679 - accuracy: 0.97 - ETA: 0s - loss: 0.0679 - accuracy: 0.97 - ETA: 0s - loss: 0.0679 - accuracy: 0.97 - ETA: 0s - loss: 0.0678 - accuracy: 0.97 - ETA: 0s - loss: 0.0678 - accuracy: 0.97 - ETA: 0s - loss: 0.0678 - accuracy: 0.97 - ETA: 0s - loss: 0.0677 - accuracy: 0.97 - ETA: 0s - loss: 0.0677 - accuracy: 0.97 - ETA: 0s - loss: 0.0677 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0675 - accuracy: 0.97 - ETA: 0s - loss: 0.0675 - accuracy: 0.97 - ETA: 0s - loss: 0.0674 - accuracy: 0.97 - ETA: 0s - loss: 0.0674 - accuracy: 0.97 - ETA: 0s - loss: 0.0674 - accuracy: 0.97 - ETA: 0s - loss: 0.0673 - accuracy: 0.97 - ETA: 0s - loss: 0.0673 - accuracy: 0.97 - 67s 37us/step - loss: 0.0673 - accuracy: 0.9757 - val_loss: 0.0188 - val_accuracy: 0.9944\n", "Epoch 2/2\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ " 255680/1806870 [===>..........................] - ETA: 1:52:37 - loss: 0.0028 - accuracy: 1.000 - ETA: 4:13 - loss: 0.0256 - accuracy: 0.9901 - ETA: 2:40 - loss: 0.0382 - accuracy: 0.98 - ETA: 2:10 - loss: 0.0324 - accuracy: 0.99 - ETA: 1:57 - loss: 0.0302 - accuracy: 0.99 - ETA: 1:47 - loss: 0.0321 - accuracy: 0.98 - ETA: 1:41 - loss: 0.0311 - accuracy: 0.98 - ETA: 1:36 - loss: 0.0318 - accuracy: 0.98 - ETA: 1:34 - loss: 0.0307 - accuracy: 0.98 - ETA: 1:34 - loss: 0.0307 - accuracy: 0.98 - ETA: 1:33 - loss: 0.0352 - accuracy: 0.98 - ETA: 1:32 - loss: 0.0353 - accuracy: 0.98 - ETA: 1:32 - loss: 0.0343 - accuracy: 0.98 - ETA: 1:31 - loss: 0.0347 - accuracy: 0.98 - ETA: 1:29 - loss: 0.0354 - accuracy: 0.98 - ETA: 1:28 - loss: 0.0349 - accuracy: 0.98 - ETA: 1:26 - loss: 0.0345 - accuracy: 0.98 - ETA: 1:25 - loss: 0.0337 - accuracy: 0.98 - ETA: 1:23 - loss: 0.0336 - accuracy: 0.98 - ETA: 1:22 - loss: 0.0330 - accuracy: 0.98 - ETA: 1:22 - loss: 0.0331 - accuracy: 0.98 - ETA: 1:23 - loss: 0.0327 - accuracy: 0.98 - ETA: 1:22 - loss: 0.0322 - accuracy: 0.98 - ETA: 1:21 - loss: 0.0321 - accuracy: 0.98 - ETA: 1:20 - loss: 0.0313 - accuracy: 0.98 - ETA: 1:19 - loss: 0.0306 - accuracy: 0.98 - ETA: 1:17 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:17 - loss: 0.0307 - accuracy: 0.98 - ETA: 1:16 - loss: 0.0304 - accuracy: 0.98 - ETA: 1:16 - loss: 0.0310 - accuracy: 0.98 - ETA: 1:15 - loss: 0.0316 - accuracy: 0.98 - ETA: 1:15 - loss: 0.0318 - accuracy: 0.98 - ETA: 1:15 - loss: 0.0314 - accuracy: 0.98 - ETA: 1:14 - loss: 0.0309 - accuracy: 0.98 - ETA: 1:14 - loss: 0.0306 - accuracy: 0.98 - ETA: 1:13 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:13 - loss: 0.0305 - accuracy: 0.98 - ETA: 1:12 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:12 - loss: 0.0303 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0306 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0302 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0301 - accuracy: 0.98 - ETA: 1:11 - loss: 0.0298 - accuracy: 0.98 - ETA: 1:10 - loss: 0.0296 - accuracy: 0.99 - ETA: 1:10 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:10 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:10 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:09 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:09 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:09 - loss: 0.0299 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0303 - accuracy: 0.98 - ETA: 1:08 - loss: 0.0300 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0301 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0299 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0299 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0297 - accuracy: 0.99 - ETA: 1:08 - loss: 0.0298 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:07 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:06 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0297 - accuracy: 0.99 - ETA: 1:05 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0296 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0298 - accuracy: 0.98 - ETA: 1:04 - loss: 0.0296 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0295 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.98 - ETA: 1:04 - loss: 0.0295 - accuracy: 0.98 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0294 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:04 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:03 - loss: 0.0290 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0293 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0292 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0291 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:02 - loss: 0.0287 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0289 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0288 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0287 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0286 - accuracy: 0.99 - ETA: 1:01 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0284 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0285 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0284 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0283 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0281 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 1:00 - loss: 0.0282 - accuracy: 0.99 - ETA: 59s - loss: 0.0281 - accuracy: 0.9906 - ETA: 59s - loss: 0.0281 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0281 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0280 - accuracy: 0.990 - ETA: 59s - loss: 0.0279 - accuracy: 0.990 - ETA: 59s - loss: 0.0279 - accuracy: 0.990 - ETA: 59s - loss: 0.0278 - accuracy: 0.990 - ETA: 59s - loss: 0.0278 - accuracy: 0.990 - ETA: 59s - loss: 0.0277 - accuracy: 0.990 - ETA: 59s - loss: 0.0277 - accuracy: 0.990 - ETA: 59s - loss: 0.0276 - accuracy: 0.990 - ETA: 59s - loss: 0.0276 - accuracy: 0.990 - ETA: 59s - loss: 0.0276 - accuracy: 0.990 - ETA: 59s - loss: 0.0275 - accuracy: 0.990 - ETA: 59s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0273 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 58s - loss: 0.0274 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0272 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0274 - accuracy: 0.9908" ] }, { "name": "stdout", "output_type": "stream", "text": [ " 545536/1806870 [========>.....................] - ETA: 57s - loss: 0.0274 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0273 - accuracy: 0.990 - ETA: 57s - loss: 0.0274 - accuracy: 0.990 - ETA: 57s - loss: 0.0274 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0272 - accuracy: 0.990 - ETA: 56s - loss: 0.0271 - accuracy: 0.990 - ETA: 56s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.990 - ETA: 55s - loss: 0.0272 - accuracy: 0.990 - ETA: 55s - loss: 0.0272 - accuracy: 0.990 - ETA: 55s - loss: 0.0271 - accuracy: 0.991 - ETA: 55s - loss: 0.0271 - accuracy: 0.991 - ETA: 55s - loss: 0.0270 - accuracy: 0.991 - ETA: 55s - loss: 0.0270 - accuracy: 0.991 - ETA: 55s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0269 - accuracy: 0.991 - ETA: 54s - loss: 0.0271 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0270 - accuracy: 0.991 - ETA: 54s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0268 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0267 - accuracy: 0.991 - ETA: 53s - loss: 0.0268 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 53s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0268 - accuracy: 0.991 - ETA: 52s - loss: 0.0268 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 52s - loss: 0.0270 - accuracy: 0.991 - ETA: 52s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0270 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0269 - accuracy: 0.991 - ETA: 51s - loss: 0.0268 - accuracy: 0.991 - ETA: 51s - loss: 0.0268 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 51s - loss: 0.0267 - accuracy: 0.991 - ETA: 50s - loss: 0.0273 - accuracy: 0.991 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.991 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0275 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.991 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 50s - loss: 0.0274 - accuracy: 0.990 - ETA: 49s - loss: 0.0273 - accuracy: 0.991 - ETA: 49s - loss: 0.0273 - accuracy: 0.991 - ETA: 49s - loss: 0.0273 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0272 - accuracy: 0.991 - ETA: 49s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0271 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0269 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0270 - accuracy: 0.991 - ETA: 48s - loss: 0.0269 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0271 - accuracy: 0.991 - ETA: 47s - loss: 0.0271 - accuracy: 0.991 - ETA: 47s - loss: 0.0271 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 47s - loss: 0.0270 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0270 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 46s - loss: 0.0269 - accuracy: 0.991 - ETA: 45s - loss: 0.0269 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0268 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0267 - accuracy: 0.991 - ETA: 45s - loss: 0.0266 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0267 - accuracy: 0.991 - ETA: 44s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0266 - accuracy: 0.9911" ] }, { "name": "stdout", "output_type": "stream", "text": [ " 827360/1806870 [============>.................] - ETA: 43s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0266 - accuracy: 0.991 - ETA: 43s - loss: 0.0265 - accuracy: 0.991 - ETA: 43s - loss: 0.0265 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 43s - loss: 0.0264 - accuracy: 0.991 - ETA: 42s - loss: 0.0264 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0263 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 42s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0263 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0262 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 41s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0261 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 40s - loss: 0.0260 - accuracy: 0.991 - ETA: 39s - loss: 0.0260 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0259 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0258 - accuracy: 0.991 - ETA: 39s - loss: 0.0257 - accuracy: 0.991 - ETA: 39s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0257 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0255 - accuracy: 0.991 - ETA: 38s - loss: 0.0255 - accuracy: 0.991 - ETA: 38s - loss: 0.0255 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 38s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0256 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0255 - accuracy: 0.991 - ETA: 37s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 36s - loss: 0.0254 - accuracy: 0.991 - ETA: 36s - loss: 0.0255 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0255 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0253 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0255 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 35s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0254 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0253 - accuracy: 0.991 - ETA: 34s - loss: 0.0252 - accuracy: 0.991 - ETA: 34s - loss: 0.0252 - accuracy: 0.991 - ETA: 34s - loss: 0.0252 - accuracy: 0.991 - ETA: 33s - loss: 0.0252 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.9916" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1088864/1806870 [=================>............] - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0251 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0250 - accuracy: 0.991 - ETA: 33s - loss: 0.0249 - accuracy: 0.991 - ETA: 33s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0250 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0250 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 32s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0249 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 31s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0248 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0247 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 30s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0246 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 29s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0245 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 28s - loss: 0.0244 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 27s - loss: 0.0243 - accuracy: 0.991 - ETA: 26s - loss: 0.0243 - accuracy: 0.991 - ETA: 26s - loss: 0.0243 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0242 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.991 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 26s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.991 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0241 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.992 - ETA: 25s - loss: 0.0240 - accuracy: 0.9920" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1370528/1806870 [=====================>........] - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0240 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 24s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0239 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 23s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0238 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 22s - loss: 0.0237 - accuracy: 0.992 - ETA: 21s - loss: 0.0237 - accuracy: 0.992 - ETA: 21s - loss: 0.0237 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0236 - accuracy: 0.992 - ETA: 21s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0235 - accuracy: 0.992 - ETA: 20s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0235 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 19s - loss: 0.0234 - accuracy: 0.992 - ETA: 18s - loss: 0.0234 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0233 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 18s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0232 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 17s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0231 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 16s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0230 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 15s - loss: 0.0229 - accuracy: 0.9923" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1661024/1806870 [==========================>...] - ETA: 15s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0229 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 14s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0228 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 13s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0227 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 12s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 11s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0226 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 10s - loss: 0.0225 - accuracy: 0.992 - ETA: 9s - loss: 0.0225 - accuracy: 0.992 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0225 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 9s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0224 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 8s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0223 - accuracy: 0.99 - ETA: 7s - loss: 0.0222 - accuracy: 0.99 - ETA: 7s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0222 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 6s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0221 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.99 - ETA: 5s - loss: 0.0220 - accuracy: 0.9926" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1806870/1806870 [==============================] - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0220 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 4s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0219 - accuracy: 0.99 - ETA: 3s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0218 - accuracy: 0.99 - ETA: 2s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 1s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - ETA: 0s - loss: 0.0216 - accuracy: 0.99 - 65s 36us/step - loss: 0.0216 - accuracy: 0.9928 - val_loss: 0.0080 - val_accuracy: 0.9976\n" ] } ], "source": [ "#begin federated\n", "\n", "earlystopping = EarlyStopping(monitor = 'val_loss',\n", " min_delta = 0.01,\n", " patience = 50,\n", " verbose = 0,\n", " baseline = 2,\n", " restore_best_weights = True)\n", "\n", "checkpoint = ModelCheckpoint('test.h8',\n", " monitor='val_loss',\n", " mode='min',\n", " save_best_only=True,\n", " verbose=0)\n", " \n", "model = Sequential()\n", "model.add(Dense(70, input_dim=Features_number, activation='relu'))\n", "model.add(Dense(50, activation='relu'))\n", "model.add(Dense(50, activation='relu'))\n", "model.add(Dense(2, activation='softmax'))\n", "model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])\n", "history = model.fit(X_train, y_train,\n", "epochs=2,\n", "validation_data=(X_test, y_test),\n", "callbacks = [checkpoint, earlystopping],\n", "shuffle=True)" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "#AUXILIARY METHODS FOR FEDERATED LEARNING\n", "\n", "# RETURN INDICES TO LAYERS WITH WEIGHTS AND BIASES\n", "def trainable_layers(model):\n", " return [i for i, layer in enumerate(model.layers) if len(layer.get_weights()) > 0]\n", "\n", "# RETURN WEIGHTS AND BIASES OF A MODEL\n", "def get_parameters(model):\n", " weights = []\n", " biases = []\n", " index = trainable_layers(model)\n", " for i in index:\n", " weights.append(copy.deepcopy(model.layers[i].get_weights()[0]))\n", " biases.append(copy.deepcopy(model.layers[i].get_weights()[1])) \n", " \n", " return weights, biases\n", " \n", "# SET WEIGHTS AND BIASES OF A MODEL\n", "def set_parameters(model, weights, biases):\n", " index = trainable_layers(model)\n", " for i, j in enumerate(index):\n", " model.layers[j].set_weights([weights[i], biases[i]])\n", " \n", "# DEPRECATED: RETURN THE GRADIENTS OF THE MODEL AFTER AN UPDATE \n", "def get_gradients(model, inputs, outputs):\n", " \"\"\" Gets gradient of model for given inputs and outputs for all weights\"\"\"\n", " grads = model.optimizer.get_gradients(model.total_loss, model.trainable_weights)\n", " symb_inputs = (model._feed_inputs + model._feed_targets + model._feed_sample_weights)\n", " f = K.function(symb_inputs, grads)\n", " x, y, sample_weight = model._standardize_user_data(inputs, outputs)\n", " output_grad = f(x + y + sample_weight)\n", " \n", " w_grad = [w for i,w in enumerate(output_grad) if i%2==0]\n", " b_grad = [w for i,w in enumerate(output_grad) if i%2==1]\n", " \n", " return w_grad, b_grad\n", "\n", "# RETURN THE DIFFERENCE OF MODELS' WEIGHTS AND BIASES AFTER AN UPDATE \n", "# NOTE: LEARNING RATE IS APPLIED, SO THE UPDATE IS DIFFERENT FROM THE\n", "# GRADIENTS. IN CASE VANILLA SGD IS USED, THE GRADIENTS ARE OBTAINED\n", "# AS (UPDATES / LEARNING_RATE)\n", "def get_updates(model, inputs, outputs, batch_size, epochs):\n", " w, b = get_parameters(model)\n", " #model.train_on_batch(inputs, outputs)\n", " model.fit(inputs, outputs, batch_size=batch_size, epochs=epochs, verbose=0)\n", " w_new, b_new = get_parameters(model)\n", " \n", " weight_updates = [old - new for old,new in zip(w, w_new)]\n", " bias_updates = [old - new for old,new in zip(b, b_new)]\n", " \n", " return weight_updates, bias_updates\n", "\n", "# UPDATE THE MODEL'S WEIGHTS AND PARAMETERS WITH AN UPDATE\n", "def apply_updates(model, eta, w_new, b_new):\n", " w, b = get_parameters(model)\n", " new_weights = [theta - eta*delta for theta,delta in zip(w, w_new)]\n", " new_biases = [theta - eta*delta for theta,delta in zip(b, b_new)]\n", " set_parameters(model, new_weights, new_biases)\n", " \n", "# FEDERATED AGGREGATION FUNCTION\n", "def aggregate(n_layers, n_peers, f, w_updates, b_updates):\n", " agg_w = [f([w_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]\n", " agg_b = [f([b_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]\n", " return agg_w, agg_b\n", "\n", "# SOLVE NANS\n", "def nans_to_zero(W, B):\n", " W0 = [np.nan_to_num(w, nan=0.0, posinf=0.0, neginf=0.0) for w in W]\n", " B0 = [np.nan_to_num(b, nan=0.0, posinf=0.0, neginf=0.0) for b in B]\n", " return W0, B0\n", "\n", "def build_forest(X,y):\n", " clf=RandomForestClassifier(n_estimators=1000, max_depth=7, random_state=0, verbose = 1)\n", " clf.fit(X,y)\n", " return clf\n", "\n", "# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS\n", "def dist_weights(w_a, w_b):\n", " wf_a = flatten_weights(w_a)\n", " wf_b = flatten_weights(w_b)\n", " return euclidean(wf_a, wf_b)\n", "\n", "# TRANSFORM ALL WEIGHT TENSORS TO 1D ARRAY\n", "def flatten_weights(w_in):\n", " h = w_in[0].reshape(-1)\n", " for w in w_in[1:]:\n", " h = np.append(h, w.reshape(-1))\n", " return h\n", " " ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "def byzantine_attack_data(inputs):\n", " attack_persentage = 40\n", " number_of_attacked_samples = len(inputs) * attack_persentage /100\n", " number_of_attacked_samples = int(number_of_attacked_samples)\n", " sampels_attacked = random.sample(range(len(inputs)), number_of_attacked_samples)\n", " if data_set == 'adult':\n", " z=0\n", " C=0\n", " z=inputs.max(axis = 0)\n", " C=inputs.min(axis = 0)\n", " for i in range(len(inputs)):\n", " if i in sampels_attacked:\n", " for j in range(len(inputs[0])):\n", " inputs[i][j]= random.uniform(z[j], C[j])\n", " return inputs" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "def poisoning_attack_data(h, feature_attacked):\n", " attack_persentage = 60\n", " number_of_attacked_samples = len(h) * attack_persentage /100\n", " number_of_attacked_samples = int(number_of_attacked_samples)\n", " sampels_attacked = random.sample(range(len(h)), number_of_attacked_samples)\n", " if data_set == 'adult':\n", " z=0\n", " C=0\n", " z=h.max(axis = 0)\n", " C=h.min(axis = 0)\n", " for i in range(len(h)):\n", " if i in sampels_attacked:\n", " for j in range(len(feature_attacked)):\n", " h[i][feature_attacked[j]]= random.uniform(z[feature_attacked[j]], C[feature_attacked[j]])\n", " return h" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "def label_flipping_attack_data(z):\n", " attack_persentage = 50\n", " number_of_attacked_samples = len(z) * attack_persentage /100\n", " number_of_attacked_samples = int(number_of_attacked_samples)\n", " sampels_attacked = random.sample(range(len(z)), number_of_attacked_samples)\n", " if data_set == 'adult':\n", " for i in range(len(z)):\n", " if i in sampels_attacked:\n", " for j in range(len(z[i])):\n", " if z[i][j] == 0:\n", " z[i][j] = 1\n", " else:\n", " z[i][j] = 0\n", " \n", " return z" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [], "source": [ "# scan the forest for trees maches the wrong predictions of the black-box\n", "def scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local):\n", " sum_feature_improtance= 0\n", " overal_wrong_feature_importance = 0\n", " counter = 0\n", " second_counter = 0\n", " never_seen = 0\n", " avr_wrong_importance = 0\n", " FL_predict1 = np.argmax(FL_predict1, axis=1)\n", " forest_predictions = np.argmax(forest_predictions, axis=1)\n", " y_test_local = np.argmax(y_test_local, axis=1)\n", " FL_wrong = 0\n", " for i in range (len(FL_predict1)):\n", " i_tree = 0\n", "# if the black-box got a wrong prediction\n", " if (FL_predict1[i] != y_test_local[i]):\n", " FL_wrong = FL_wrong + 1\n", "# getting the prediction of the trees one by one\n", " for tree_in_forest in forest.estimators_:\n", " sample = X_test_local[i].reshape(1, -1)\n", " temp = forest.estimators_[i_tree].predict(sample)\n", " temp = np.argmax(temp, axis=1)\n", "# print('the prediction of the t')\n", "# print(temp)\n", " i_tree = i_tree + 1\n", "# if the prediction of the tree maches the predictions of the black-box\n", " if(FL_predict1[i] == temp):\n", "# getting the features importances\n", " sum_feature_improtance = sum_feature_improtance + tree_in_forest.feature_importances_\n", " counter = counter + 1\n", "# if we have trees maches the black-box predictions\n", " if(counter>0):\n", " ave_feature_importence = sum_feature_improtance/counter\n", " overal_wrong_feature_importance = ave_feature_importence + overal_wrong_feature_importance\n", " second_counter = second_counter + 1\n", " counter = 0\n", " sum_feature_improtance = 0\n", "# if there is no trees maches the black-box predictions\n", " else:\n", " if(FL_predict1[i] != y_test_local[i]):\n", " never_seen = never_seen +1\n", "\n", "# getting the average features importances for all the samples that had wrong predictions.\n", " if(second_counter>0):\n", " avr_wrong_importance = overal_wrong_feature_importance / second_counter\n", " return avr_wrong_importance" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[0, 1, 2, 3]" ] }, "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ "trainable_layers(model)" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "([array([[ 0.08338594, -0.1254255 , -0.49211267, ..., 0.24750227,\n", " -0.65377104, -0.05224424],\n", " [-0.01734522, -0.1021487 , 0.14053608, ..., -0.28375858,\n", " -0.15333907, -0.21018188],\n", " [ 0.0459124 , 0.17106615, -0.2748136 , ..., 0.29032764,\n", " -0.12925142, 0.06985184],\n", " ...,\n", " [-0.15448453, 0.17909111, 0.18795453, ..., 0.46541557,\n", " 0.01830631, -0.0534319 ],\n", " [-0.15942477, 0.09147607, 0.06007228, ..., 0.11995307,\n", " 0.5220185 , -0.16314192],\n", " [ 0.09670959, -0.1825741 , -0.24682267, ..., 0.21973692,\n", " 0.26263914, 0.1036981 ]], dtype=float32),\n", " array([[-0.13370994, 0.05705584, -0.01060855, ..., 0.03942909,\n", " -0.05360872, 0.18457419],\n", " [ 0.18660966, 0.02259383, -0.06519584, ..., 0.12901685,\n", " -0.03150385, -0.05975418],\n", " [-0.18495369, -0.3026115 , 0.15492044, ..., 0.3693038 ,\n", " 0.42332208, 0.17241667],\n", " ...,\n", " [-0.41482276, -0.14596964, -0.10000814, ..., -0.07026828,\n", " -0.13250498, -0.11739882],\n", " [-0.1614849 , -0.264906 , 0.1812628 , ..., 0.51671666,\n", " 0.15924722, 0.18865098],\n", " [-0.22190864, 0.16413453, -0.15392351, ..., -0.02723815,\n", " -0.19859377, -0.2072882 ]], dtype=float32),\n", " array([[ 0.47770685, -0.7960992 , 0.4646107 , ..., 0.04795517,\n", " -0.10576709, 0.5649921 ],\n", " [-0.37207457, -0.25275326, -0.18699329, ..., 0.24836566,\n", " -0.3587133 , -0.03917937],\n", " [ 0.087133 , -0.14499295, -0.01567947, ..., -0.02826147,\n", " 0.18539242, -0.2073498 ],\n", " ...,\n", " [-0.54771024, -0.56772953, -1.081203 , ..., 0.84306604,\n", " 0.14480972, 0.12935087],\n", " [-1.2092329 , 0.72066 , -0.76670545, ..., 0.3710571 ,\n", " -0.02872824, 0.13071369],\n", " [-0.24432653, -0.20408279, 0.18815796, ..., -0.24326074,\n", " -0.81241286, -0.38458872]], dtype=float32),\n", " array([[ 1.1354454 , -0.8150371 ],\n", " [ 0.8239793 , -0.38024253],\n", " [ 0.18105277, -0.22851984],\n", " [ 0.36343297, -0.68712914],\n", " [-0.14369975, 0.20164204],\n", " [-1.3597993 , 1.1511468 ],\n", " [ 0.17626402, -0.44736794],\n", " [ 0.783137 , -1.2214484 ],\n", " [ 0.2721217 , -0.06518261],\n", " [ 0.8918733 , -0.77505213],\n", " [-0.656868 , 1.1960154 ],\n", " [-1.6819351 , 1.3945241 ],\n", " [ 0.61881614, -0.7248123 ],\n", " [-0.21632305, 0.10406036],\n", " [ 0.17342244, 0.00396303],\n", " [ 1.9393425 , -2.2392154 ],\n", " [ 0.25582054, -0.09821833],\n", " [-1.4448843 , 1.351973 ],\n", " [-0.7736038 , 1.014288 ],\n", " [ 1.5285544 , -1.3971529 ],\n", " [ 2.319214 , -1.9343866 ],\n", " [ 0.4609306 , -0.22342049],\n", " [ 0.17103793, -0.22433195],\n", " [ 0.84780014, -0.5512217 ],\n", " [ 0.16438304, -0.4931989 ],\n", " [ 1.15009 , -1.2441653 ],\n", " [-0.7512086 , 0.69693834],\n", " [-1.7812865 , 1.8653326 ],\n", " [ 0.9114694 , -1.0244646 ],\n", " [ 0.68447626, -0.45571706],\n", " [-0.9834174 , 1.2978134 ],\n", " [ 2.1663804 , -1.6653944 ],\n", " [-0.2522568 , -0.24315625],\n", " [-0.30284685, 0.6635906 ],\n", " [-1.0407605 , 0.75330424],\n", " [-1.5959861 , 1.6432168 ],\n", " [-1.6533945 , 1.4116566 ],\n", " [-1.3818094 , 0.96237814],\n", " [ 0.49473518, -0.7128965 ],\n", " [-1.1544157 , 1.2103665 ],\n", " [-0.31084502, -0.06416508],\n", " [ 0.07730638, -0.34380186],\n", " [ 0.00517065, -0.07974567],\n", " [ 0.76227933, -0.46143502],\n", " [-2.0212495 , 2.1503792 ],\n", " [-1.419235 , 1.1352861 ],\n", " [ 0.40954936, -0.76466006],\n", " [-0.592412 , 1.0149235 ],\n", " [-0.9884663 , 1.3155804 ],\n", " [-1.633649 , 1.6489463 ]], dtype=float32)],\n", " [array([-1.58836432e-02, -1.20011568e-02, -2.30990946e-02, 0.00000000e+00,\n", " 0.00000000e+00, 0.00000000e+00, -6.30686581e-02, -6.26324415e-02,\n", " -3.39236371e-02, -6.14111274e-02, 0.00000000e+00, -4.05886732e-02,\n", " 0.00000000e+00, -6.10261457e-03, -3.06573324e-02, 0.00000000e+00,\n", " -1.63990387e-03, 0.00000000e+00, -1.11483254e-01, -1.81604289e-02,\n", " -5.61644835e-03, 1.37938242e-02, 0.00000000e+00, -2.23437846e-02,\n", " 0.00000000e+00, -6.16606697e-02, 1.70837268e-01, 5.55998720e-02,\n", " -1.64261945e-02, 1.20512873e-01, -6.44331053e-03, 0.00000000e+00,\n", " -3.33447531e-02, -5.07780984e-02, 3.10869161e-02, 7.47342259e-02,\n", " 7.46760815e-02, -5.52449860e-02, -8.27614740e-02, -2.39871517e-02,\n", " -3.01994607e-02, -4.17327993e-02, -1.15456417e-01, 8.73452723e-02,\n", " -7.59378746e-02, 0.00000000e+00, -3.00336909e-02, 6.94403024e-09,\n", " -4.00215089e-02, -1.37547646e-02, -4.61019538e-02, 5.88711686e-02,\n", " -6.63833246e-02, -2.25303844e-02, -1.15773613e-02, 3.89949568e-02,\n", " -3.27692814e-02, -1.54451123e-02, 1.45327836e-01, -7.80039234e-03,\n", " -3.16219591e-02, 0.00000000e+00, 1.14573305e-02, -2.14286316e-02,\n", " -3.89366113e-02, 1.13435954e-01, -4.89240699e-02, -1.66841432e-01,\n", " -3.22352685e-02, -1.43911066e-02], dtype=float32),\n", " array([-1.1628173e-01, 1.4605005e-01, -8.3894879e-02, -1.7480729e-02,\n", " 9.5676459e-02, -1.5144591e-01, -8.5667908e-02, -2.8433751e-02,\n", " -5.2900422e-02, -4.8166331e-02, 3.3510438e-01, -9.0990268e-02,\n", " 5.9481107e-02, 4.4261031e-02, 1.3082844e-01, -1.3972101e-01,\n", " -7.2492525e-02, -3.3382095e-02, -2.3739910e-01, -1.8229088e-01,\n", " -9.5385693e-02, 3.0102465e-02, 6.0827412e-02, 3.3912200e-01,\n", " 7.5339697e-02, -2.1391104e-01, -1.0858524e-01, 3.6047959e-01,\n", " -3.7770301e-01, 9.7297080e-02, -1.0461237e-01, -7.4935108e-02,\n", " -8.2315236e-02, 1.9678907e-01, 1.1734279e-06, -2.2777809e-01,\n", " -1.2057750e-01, -2.1283591e-02, 1.3623047e-01, -1.9323155e-01,\n", " -3.3379752e-02, -3.5926573e-02, 2.8303096e-01, 9.3418181e-02,\n", " -1.3997810e-01, 9.8838598e-02, -2.3493488e-01, -1.9615906e-01,\n", " -1.0158879e-02, -7.1108431e-02], dtype=float32),\n", " array([-0.33614156, -0.19233358, -0.02823093, 0.48520288, 0.23908217,\n", " 0.27522194, 0.01314433, -0.01497711, -0.0828162 , -0.23802279,\n", " 0.32634258, 0.12124245, 0.18269299, 0.08887233, -0.06745057,\n", " -0.556095 , -0.11532133, 0.40208554, -0.01085546, 0.4505257 ,\n", " -0.36126408, 0.01500582, 0.07917931, 0.09815152, 0.33817917,\n", " -0.2813558 , 0.35876733, -0.36583313, -0.1797759 , -0.13289435,\n", " 0.04141649, -0.31939384, -0.10274614, 0.1994237 , -0.14661174,\n", " -0.42591807, 0.28506443, 0.04345732, 0.2656843 , -0.30315512,\n", " -0.07892313, -0.06843591, -0.09118145, 0.41733742, 0.17977186,\n", " 0.25952908, 0.4197849 , 0.3480925 , -0.3922683 , -0.05383924],\n", " dtype=float32),\n", " array([ 0.30379787, -0.3038005 ], dtype=float32)])" ] }, "execution_count": 14, "metadata": {}, "output_type": "execute_result" } ], "source": [ "get_parameters(model)" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "([array([[ 2.8014183e-06, 0.0000000e+00, -4.5895576e-06, ...,\n", " -2.2205949e-02, 0.0000000e+00, 0.0000000e+00],\n", " [ 2.1923333e-06, 0.0000000e+00, -7.8976154e-06, ...,\n", " 4.9105823e-02, 0.0000000e+00, 0.0000000e+00],\n", " [ 1.5608966e-06, 0.0000000e+00, -3.7252903e-06, ...,\n", " 7.3180795e-03, 0.0000000e+00, 0.0000000e+00],\n", " ...,\n", " [ 1.5050173e-06, 0.0000000e+00, -7.4207783e-06, ...,\n", " -2.6787940e-01, 0.0000000e+00, 0.0000000e+00],\n", " [ 1.8626451e-06, 0.0000000e+00, -8.1695616e-06, ...,\n", " 1.6834244e-02, 0.0000000e+00, 0.0000000e+00],\n", " [ 1.7285347e-06, 0.0000000e+00, -4.6193600e-06, ...,\n", " -5.3731605e-02, 0.0000000e+00, 0.0000000e+00]], dtype=float32),\n", " array([[ 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, ...,\n", " -4.1723251e-07, -1.3783574e-07, 0.0000000e+00],\n", " [ 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, ...,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00],\n", " [ 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, ...,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00],\n", " ...,\n", " [-8.3480060e-02, -7.9958737e-03, 0.0000000e+00, ...,\n", " 1.0635569e-01, -3.4096837e-04, 6.9890119e-02],\n", " [ 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, ...,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00],\n", " [ 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, ...,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00]], dtype=float32),\n", " array([[-0.04616958, 0.59762466, -0.26143235, ..., -0.06850739,\n", " -0.4217179 , 0.08980078],\n", " [-0.17200631, 0.3219571 , 0.27451754, ..., 0.07618259,\n", " 0.48217025, 0.26553166],\n", " [ 0. , 0. , 0. , ..., 0. ,\n", " 0. , 0. ],\n", " ...,\n", " [ 0.37040782, 0.77656084, -0.17277646, ..., -0.04284477,\n", " 0.27722144, -0.29050782],\n", " [ 0.49921095, -0.3744073 , -0.79761034, ..., 0.06160343,\n", " 0.26952648, 0.44993538],\n", " [ 0.12304485, 0.08797711, 0.14218739, ..., 0.40071172,\n", " 0.43845785, 0.22023249]], dtype=float32),\n", " array([[ 0.01556432, -0.01553738],\n", " [-0.80137116, 0.8013473 ],\n", " [ 0.05863538, -0.05863395],\n", " [ 0.09222463, -0.09223729],\n", " [ 0.22395234, -0.22392958],\n", " [-0.25126195, 0.25127405],\n", " [ 0.19329323, -0.1932973 ],\n", " [-0.42185718, 0.4218619 ],\n", " [-0.09126899, 0.09127331],\n", " [-0.13490325, 0.13491231],\n", " [ 0.07229513, -0.07227075],\n", " [-0.1858536 , 0.18585992],\n", " [ 0.3444129 , -0.3444115 ],\n", " [-0.04673225, 0.04675576],\n", " [-0.744485 , 0.7444947 ],\n", " [ 0.59019566, -0.5901569 ],\n", " [ 0.00766785, -0.0076676 ],\n", " [ 0.1348064 , -0.13479984],\n", " [ 0.19738197, -0.19736266],\n", " [ 0.20221901, -0.20221877],\n", " [ 0.2895143 , -0.28952658],\n", " [ 0.20282978, -0.20283176],\n", " [-0.76195276, 0.7619654 ],\n", " [ 0.26977128, -0.2697831 ],\n", " [ 0.3250053 , -0.32500228],\n", " [-0.15308547, 0.15309238],\n", " [-0.10742784, 0.10744089],\n", " [-0.15101504, 0.15102363],\n", " [-0.6646027 , 0.6646162 ],\n", " [ 0.23995936, -0.23995501],\n", " [-0.30166405, 0.30165863],\n", " [ 0.4730208 , -0.47301006],\n", " [ 0. , 0. ],\n", " [-0.02773407, 0.02774608],\n", " [ 0.07481575, -0.07481062],\n", " [-0.51466 , 0.51466644],\n", " [-0.02005684, 0.02005637],\n", " [-0.11586785, 0.11588269],\n", " [ 0.16156894, -0.16156316],\n", " [-0.30240393, 0.3024137 ],\n", " [ 0.8963616 , -0.89635366],\n", " [-0.06791203, 0.06791377],\n", " [-0.17452781, 0.17454016],\n", " [ 0.08860135, -0.08860841],\n", " [-0.6942698 , 0.69428754],\n", " [-0.17503893, 0.17506146],\n", " [ 0.16636667, -0.16637546],\n", " [-0.25057983, 0.25059932],\n", " [ 0.2041834 , -0.20417154],\n", " [-0.01968396, 0.01968992]], dtype=float32)],\n", " [array([ 3.3285469e-06, 0.0000000e+00, -1.2369826e-05, 0.0000000e+00,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 8.8644736e-03,\n", " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,\n", " 0.0000000e+00, 0.0000000e+00, 6.6848241e-02, 0.0000000e+00,\n", " 0.0000000e+00, -2.9313583e-03, 0.0000000e+00, 0.0000000e+00,\n", " 0.0000000e+00, 0.0000000e+00, -9.5326707e-02, -3.0721266e-02,\n", " 8.1290156e-03, -6.8495750e-02, 0.0000000e+00, 0.0000000e+00,\n", " 1.8126052e-02, 0.0000000e+00, 0.0000000e+00, -6.3957557e-02,\n", " -4.2761512e-02, -4.0137991e-03, 2.5934458e-02, 0.0000000e+00,\n", " 0.0000000e+00, 4.7971878e-02, 8.1122592e-02, -6.5605357e-02,\n", " 0.0000000e+00, 0.0000000e+00, -1.6420111e-03, 5.2435798e-09,\n", " 0.0000000e+00, 0.0000000e+00, 4.5959245e-02, -1.8143710e-02,\n", " 9.4908625e-03, 4.1580014e-04, 0.0000000e+00, -6.7653313e-02,\n", " 0.0000000e+00, 0.0000000e+00, -1.2950024e-01, 0.0000000e+00,\n", " 0.0000000e+00, 0.0000000e+00, -1.7437223e-02, 0.0000000e+00,\n", " -2.3985766e-03, -8.6141318e-02, 0.0000000e+00, 1.0811789e-01,\n", " 0.0000000e+00, 0.0000000e+00], dtype=float32),\n", " array([-0.00627401, -0.191559 , 0. , 0.00838837, 0.0529352 ,\n", " 0.12782945, 0. , 0.0156331 , 0. , 0. ,\n", " -0.11151084, 0.0588697 , -0.02708764, 0.07176588, -0.02906452,\n", " 0.11350146, 0. , 0.06241233, 0.14389527, 0.15049464,\n", " 0.1770727 , -0.03121255, 0.02478044, -0.2777393 , -0.09406517,\n", " 0.12997295, 0.13041441, -0.1484769 , 0.2955907 , 0.00798142,\n", " 0. , 0. , 0.0003342 , 0.05636339, 0. ,\n", " 0.06773217, 0.04719278, 0.12417503, -0.039441 , 0.14855272,\n", " 0.03889409, 0.10604867, -0.0547139 , -0.06550588, 0.14823543,\n", " 0.01506494, 0.08808593, 0.1861152 , 0.01942448, 0.11284278],\n", " dtype=float32),\n", " array([ 2.54564315e-01, 1.08867824e-01, 5.61506003e-02, -1.75794929e-01,\n", " 2.33219430e-01, -1.58185065e-02, -1.31240949e-01, 8.01615715e-02,\n", " 2.55392641e-02, 3.14171731e-01, -1.29695922e-01, -5.60564399e-02,\n", " -3.62583399e-02, 3.38414431e-01, 2.22075596e-01, 1.01215661e-01,\n", " 2.62457654e-02, -2.69618690e-01, 1.52954599e-02, -1.20112836e-01,\n", " 2.02051371e-01, -7.11961389e-02, -3.22458982e-01, 2.04761773e-01,\n", " 9.38781500e-02, 2.39238501e-01, -9.64630842e-02, 2.50120312e-01,\n", " -3.17509770e-02, 2.22122446e-01, 3.26190665e-02, 4.22861874e-02,\n", " -2.37673521e-06, 4.52675968e-02, 2.75943756e-01, 1.95241719e-01,\n", " -2.34854549e-01, -7.68669993e-02, -7.01770782e-02, 2.47255087e-01,\n", " 2.31929541e-01, 1.45128936e-01, 4.07781303e-01, -2.12165058e-01,\n", " 3.37380767e-02, -2.01950014e-01, -1.65518045e-01, -4.14324701e-02,\n", " 4.91949677e-01, 2.43567675e-01], dtype=float32),\n", " array([-0.139424 , 0.13942933], dtype=float32)])" ] }, "execution_count": 15, "metadata": {}, "output_type": "execute_result" } ], "source": [ "get_updates(model, X_train, y_train, 32, 2)" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [], "source": [ "W = get_parameters(model)[0]\n", "B = get_parameters(model)[1]" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [], "source": [ "# BASELINE SCENARIO\n", "#buid the model as base line for the shards (sequential)\n", "# Number of peers\n", "#accordin to what we need\n", "ss = int(len(X_train)/n_peers)\n", "inputs_in = X_train[0*ss:0*ss+ss]\n", "outputs_in = y_train[0*ss:0*ss+ss]\n", "def build_model(X_t, y_t):\n", " model = Sequential()\n", " model.add(Dense(70, input_dim=Features_number, activation='relu'))\n", " model.add(Dense(50, activation='relu'))\n", " model.add(Dense(50, activation='relu'))\n", " model.add(Dense(2, activation='softmax'))\n", " model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])\n", " model.fit(X_t,\n", " y_t, \n", " batch_size=32, \n", " epochs=250, \n", " verbose=0,\n", " validation_data=((X_test, y_test)))\n", " return model\n" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"sequential_1\"\n", "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "dense_1 (Dense) (None, 70) 2800 \n", "_________________________________________________________________\n", "dense_2 (Dense) (None, 50) 3550 \n", "_________________________________________________________________\n", "dense_3 (Dense) (None, 50) 2550 \n", "_________________________________________________________________\n", "dense_4 (Dense) (None, 2) 102 \n", "=================================================================\n", "Total params: 9,002\n", "Trainable params: 9,002\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ] }, { "data": { "text/plain": [ "None" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "display(model.summary())" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [], "source": [ "# predict probabilities for test set\n", "yhat_probs = model.predict(X_test, verbose=0)\n", "# predict crisp classes for test set\n", "yhat_classes = model.predict_classes(X_test, verbose=0)" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Accuracy: 0.997338\n", "Precision: 0.996095\n", "Recall: 0.999387\n", "F1 score: 0.997738\n" ] } ], "source": [ "# accuracy: (tp + tn) / (p + n)\n", "accuracy = accuracy_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n", "print('Accuracy: %f' % accuracy)\n", "# precision tp / (tp + fp)\n", "precision = precision_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n", "print('Precision: %f' % precision)\n", "# recall: tp / (tp + fn)\n", "recall = recall_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n", "print('Recall: %f' % recall)\n", "# f1: 2 tp / (2 tp + fp + fn)\n", "f1 = f1_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n", "print('F1 score: %f' % f1)" ] }, { "cell_type": "code", "execution_count": 21, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([[55804, 313],\n", " [ 49, 79836]], dtype=int64)" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQsAAADtCAYAAACoP1B5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAP6ElEQVR4nO3dbaxlVXnA8f8zw8ugdXgRRSK20nRCpSagUqQ1aVQqjLQRP9gENIUYkmkMGkybtNgvtFoT2w+1klobEqdCo1BCS2oMMp1QjWmCCChFYERGrDKFMuLgS0vk5d6nH/a6nePtvXevI+vMmb3P/5es3HPW2XffxQGerLe9nshMJKnPpnk3QNIwGCwkVTFYSKpisJBUxWAhqYrBQlKVI+bdAGkMzn/TC/P7B5aqrr373qd3Zeb2GTepOYOF1MATB5a4Y9cpVdceefK3Tpxxc2bCYCE1kSzl8rwbMVMGC6mBBJYZ925og4XUQJI8m3VzFkNlsJAaGXvPYuGXTiNie0Q8GBF7I+LKebdnbCJiZ0Tsj4j75t2WWUpgiawqQ7XQwSIiNgMfB94KnA5cHBGnz7dVo/MpYHDLhD+LZbKqDNVCBwvgbGBvZj6cmc8ANwAXzrlNo5KZXwIOzLsds5bAUmZVGapFDxYvBx6ZeL+v1ElTW64sQ7XoE5yxRt1wQ7/mJgc+H1Fj0YPFPuAVE+9PAR6dU1s0YJnw7LhjxcIHizuBbRFxKvCfwEXAO+fbJA1TsLRmR3U8FnrOIjOfA94L7AL2ADdm5v3zbdW4RMT1wO3AaRGxLyIum3ebZiGB5awrQ7XoPQsy8xbglnm3Y6wy8+J5t+FQGXvPYuGDhdRCtynLYCGpwnIaLCT1sGchqUoSPJub592MmVro1ZAVEbFj3m0Yu7F/xys9i5oyVAaLzqj/Qz5MjPw7DpZyU1UZKochUgPdSVnDDQQ1ZhIsjjr2mNzysq2zuPVMHH3Si9h62knD2i7zzWfn3YKpbOEFbI0TBvUd/4T/4Zl8unrcMOQhRo2ZBIstL9vKr/7tu2ZxaxWbzn2k/yI9L3fkbdXXZsaghxg1xv1PJx1Cy0RV6RMRp0XEPRPlRxHx/og4ISJ2R8RD5efx5fqIiKvLaW/3RsRrJ+51abn+oYi4dKL+dRHx9fI7V0dEb8MMFlIDSfBMHlFVeu+V+WBmnpmZZwKvA54CbgauBG7LzG3AbeU9dCe9bStlB/AJgIg4AbgKeD3dQU9XrQSYcs2Oid/rPc3MYCE1sDLBWVOmdC7wrcz8Dt0pbteW+muBt5fXFwLXZefLwHERcTJwPrA7Mw9k5pPAbmB7+WxrZt6emQlcN3GvdbkaIjWyVL/d+8SIuGvi/TWZec06114EXF9en5SZjwFk5mMR8dJSv96JbxvV71ujfkMGC6mBJFiq7zU8kZln9V0UEUcBbwM+0Hfpmk2avn5DDkOkRpZzU1WZwluBr2bm4+X942UIQfm5v9Svd+LbRvWnrFG/IYOF1EC33XtTVZnCxRwcggB8FlhZ0bgU+OeJ+kvKqsg5wA/LcGUXcF5EHF8mNs8DdpXPfhwR55RVkEsm7rUuhyFSA60fJIuIFwBvAX5vovojwI3ltLHvAr9T6m8BLgD20q2cvBsgMw9ExIfojo8E+GBmrqRleA9dTpdjgM+XsiGDhdRAJk03ZWXmU8CLV9V9n251ZPW1CVy+zn12AjvXqL8LePU0bTJYSE3UbbgaMoOF1ECXkWzcU4AGC6mRKScvB8dgITWQhGdwSqpjz0JSr0U4g9NgITXQZSSzZyGpgidlSeqVGfYsJNVxn4WkXt3hNw5DJPUa/4G9BgupgQSXTiX1cwenpGpmJJPUqzvPwp6FpAoOQyT16uYsHIZIqjD27d7jDoXSIZIEzy1vrio1IuK4iLgpIr4REXsi4tfMdSqNRKvEyMXHgFsz85eBM4A9mOtUGr6V1ZCa0icitgK/AXyyu3c+k5k/wFyn0jhMMcHZl+v0F4HvAX8XEWcAdwNXYK5Tafim3MHZl+v0COC1wPsy846I+BgHhxxrMdepNCQN5yz2Afsy847y/ia64GGuU2noumP1oqr03ivzv4BHIuK0UnUu8ADmOpVGIKN6WbTS+4BPR8RRwMN0+Us3Ya5TadhaH36TmfcAa81rmOtUGjqfDZHUa2XOYsyqJjgjYntEPFi2hm60hCMtrFYTnIer3p5FRGwGPg68hW7J5c6I+GxmPjDrxklD4UlZnbOBvZn5MEBE3EC3vdRgIa1IeM5H1NfcMvr62TRHGqZFmLOoCRZVW0MjYgfdU2wcfdKLnmezpOEZe7Co6Tett2X0p2TmNZl5VmaeddSxx7RqnzQIK3MWY57grAkWdwLbIuLUspvsIrrtpZImZEZVGareYUhmPhcR76XbZ74Z2JmZ98+8ZdLAmL4QyMxb6PafS1pD5vjnLNzBKTURLC27dCqpwpDnI2oYLKQG3GchqU528xZjZrCQGnE1RFKvxDkLSVWGvTuzhsFCamR5edzBYtwLw9Ihktl2u3dE/EfJRXrPSkIic51KIzGDB8nelJlnTiQkMtepNAaZdeV5mGuuU4OF1MgUw5ATI+KuibJjrdsB/xIRd098/lO5TgFznUpDk0z1+HlfrlOAN2TmoyX58e6I+MYG15rrVBqSrCxV98p8tPzcD9xMN+dgrlNp8BJyOapKn4h4YUS8aOU1XY7S+zDXqTQODXdwngTcXFYzjwA+k5m3RsSdmOtUGr5WD5KVtBtnrFH/fcx1Kg2bz4ZIqpOAwUJSDc+zkFTHYCGpX92y6JAZLKQW0glOSbUchkiqY89CUg17FpKqGCwk9SoPko2ZwUJqxZ6FpCounUqqEfYsJPWa5hisgTJYSE2EwxBJlexZSKqyPO8GzJbBQmphAQ6/8XRvqZHIulJ9v4jNEfG1iPhceX9qRNxR8pb+Q0QcVeqPLu/3ls9fOXGPD5T6ByPi/In67aVub0Rcufpvr8VgIbXSMnFI5wpgz8T7Pwc+WnKdPglcVuovA57MzF8CPlquIyJOBy4CfoUul+nflAC0Gfg4XY7U04GLy7Ubms0w5JvPsuncR/qv089s16P3zLsJo3f2+U/N7W9HxCnAbwEfBn6/5Pd4M/DOcsm1wJ/QJTi+sLwGuAn463L9hcANmfk08O2I2EuXrAhgbzlFnIi4oVz7wEZtsmchNTLFMKQm1+lfAX/IwWnTFwM/yMznyvvJ/KT/l9O0fP7Dcv20OVA35ASn1EqjXKcR8dvA/sy8OyLeuFK91l/s+Wy9+rU6Cb0DJIOF1ELScun0DcDbIuICYAuwla6ncVxEHFF6D5P5SVdymu6LiCOAY4EDrJ/rlA3q1+UwRGqk1WpIZn4gM0/JzFfSTVD+a2a+C/gC8I5y2epcpys5UN9Rrs9Sf1FZLTkV2AZ8hS6d4bayunJU+Ruf7WuXPQupldnv4Pwj4IaI+DPga8AnS/0ngb8vE5gH6P7nJzPvj4gb6SYunwMuz8wlgIh4L13i5M3Azsy8v++PGyykVmYQLDLzi8AXy+uHObiaMXnNTziYJHn1Zx+mW1FZXX8LXULlagYLqYFpN1wNkcFCamXk270NFlIr9iwk1QifOpXUyzkLSdUMFpKqGCwk1Rj7MMTt3pKq2LOQWhl5z8JgIbWQLp1KqmXPQlKfYPwTnAYLqRWDhaRe7uCUVM1gIamGqyGS6tizkNRr+mxjg+N2b6mRVqd7R8SWiPhKRPx7RNwfEX9a6s11Ko1Cu1ynTwNvzswzgDOB7RFxDnPOdWqwkBppmDckM/O/y9sjS0m6XKc3lfprgbeX1xeW95TPz12d6zQzvw2s5Do9m5LrNDOfAVZynW7IYCG1Ut+z6M11WnoA9wD7gd3AtzDXqTR8U6YC2DDXKUBJBnRmRBwH3Ay8aq3LVv78Op81zXVqz0Jqpd2cxcFbZv6ALsnQOZRcp+WjtXKdUpnrdKMcqOsyWEiNNFwNeUnpURARxwC/CezBXKfSSLTbZ3EycG1ZtdgE3JiZn4uIBzDXqTQCjYJFZt4LvGaNenOdSoPnU6eSqhksJNXwqVNJVRyGSOq3AE+dGiykVgwWkvoswunevTs4I2JnROyPiPsORYOkwZrBdu/DSc1270/RPQsvaQORWVWGqncYkplfmjx5R9IaTF8oqdpwOw1VmgWLcoDHDoAtvKDVbaXBWPgJzlqZeU1mnpWZZx3J0a1uKw3HyCc4HYZILSzAg2Q1S6fXA7cDp0XEvoi4rO93pIW06D2LzLz4UDREGrJF2JTlMERqJJbHHS0MFlILAx9i1DBYSI2MfVOWp3tLrTSa4IyIV0TEFyJiT8l1ekWpPyEidpdcp7sj4vhSHxFxdclbem9EvHbiXpeW6x+KiEsn6l8XEV8vv3N1yWC2IYOF1EirVAB0J3H/QWa+ii5fyOUlF+mVwG0l1+lt5T10OUu3lbID+AR0wQW4Cng93UG/V60EmHLNjonf633+y2AhtZBAZl3pu1XmY5n51fL6x3Q5Q17OT+c0XZ3r9LqSI/XLdMmITgbOB3Zn5oHMfJIuDeL28tnWzLy95Be5buJe63LOQmpkijmLEyPiron312TmNWves3uI8zXAHcBJmfkYdAElIl5aLps2p+nLy+vV9RsyWEgNTLnPojfXKUBE/Bzwj8D7M/NHG0wrTJvrdL36DTkMkVqoHYJUnmcREUfSBYpPZ+Y/lerHyxCC8nN/qZ82p+m+8np1/YYMFlIjDXOdBl1Kwj2Z+ZcTH03mNF2d6/SSsipyDvDDMlzZBZwXEceXic3zgF3lsx9HxDnlb10yca91OQyRWmm3KesNwO8CX4+Ie0rdHwMfAW4sz2d9l4MpC28BLgD2Ak8B7wbIzAMR8SG6RMgAH8zMA+X1e+hOwTsG+HwpGzJYSI20ejYkM/+NtecVAM5d4/oELl/nXjuBnWvU3wW8epp2GSykFhLw2RBJNca+3dtgIbUy4JO7axgspEY8z0JSPx9Rl1Sj28E57mhhsJBacYJTUg17FpL6ZbrPQlIdV0Mk1XEYIqmXWdQlVbNnIanKuGOFwUJqxaVTSf0SWDJYSOoRpD0LSZUMFpKqjDxYeLq31ELSPUhWUypExM6I2B8R903UmetUGoPIrCqVPsX/zz9qrlNpFBomGcrMLwEHVlWb61QavExYrt7vXZ3rdBVznUqjUP9sSFWu0ymY61QaksZzFmsx16k0Cg3nLNZhrlNp8BpnJIuI64E30s1v7KNb1ZhrrtPIGWwkiYjvAd9pfuPZORF4Yt6NGLkhfse/kJkvqbnw2C0vy1//+Uv7LwRufegv7m48Z3FIzKRnUfsFHy4i4q4h/ssbkoX4jke+g9NhiNRCAkvjPirLYCE1kZAGi0VQsyFGz8/4v2OHIeNXuXtOz8Pov+PGqyGHI4OF1Io9C0lVDBaSemXC0tK8WzFTBgupFXsWkqoYLCT1M4u6pBoJ6aYsSVXsWUiq4pyFpF4unUqqlfUH9g6SwUJq4nkfmXfYM1hILfggmaRqLp1K6pNA2rOQ1Cs9KUtSpRz50ulMUgFIiyYibqVLd1DjiczszVp+uDFYSKpi+kJJVQwWkqoYLCRVMVhIqmKwkFTlfwEtz8FVI10/LgAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "# confusion matrix\n", "mat = confusion_matrix(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n", "\n", "display(mat)\n", "plt.matshow(mat);\n", "plt.colorbar()\n", "plt.show()\n" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [], "source": [ "# the dectinary\n", "FI_dic1= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}\n", "ave_FI_dic= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}\n", "targeted_Features ={0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}\n", "rounds_attack_detected ={0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Initializing network.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "100%|████████████████████████████████████████████████████████████████████████████| 100/100 [00:00<00:00, 100342.20it/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Round 1.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ " 57%|██████████████████████████████████████████████▏ | 57/100 [02:21<01:44, 2.44s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n", "[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 12.6s finished\n", "[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n", "[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.4s finished\n", "100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [04:35<00:00, 2.75s/it]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Global model loss: 0.4784254086968245; global model accuracy: 0.8537300825119019\n", "Round 2.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ " 32%|█████████████████████████▉ | 32/100 [01:19<02:44, 2.42s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n", "[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.5s finished\n", "100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [06:19<00:00, 3.79s/it]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Global model loss: 1.913670726969011; global model accuracy: 0.7795841097831726\n", "Round 3.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "\r", " 0%| | 0/100 [00:00 0:\n", " ave_FI_dic[t-1] = abs(FI_dic1[t] - FI_dic1[t-1])\n", " average_overall_changes = 0\n", " if t > 1:\n", " for r in range(0,t):\n", " average_overall_changes = average_overall_changes + ave_FI_dic[r]\n", " average_overall_changes = average_overall_changes / t\n", " total_Changes=0\n", " average_overall_changes_one_val = 0\n", " for s in range(len(ave_FI_dic[t-1])):\n", " total_Changes = total_Changes + ave_FI_dic[t-1][s]\n", " average_overall_changes_one_val = average_overall_changes_one_val + average_overall_changes[s]\n", " print('total_Changes in this round: ', total_Changes)\n", " print('average changes: ' ,average_overall_changes_one_val)\n", " threshold1 = alpha * average_overall_changes_one_val\n", " print(threshold1)\n", " if total_Changes >= threshold1:\n", " for ra in range(len(ave_FI_dic[t-1])):\n", " rounds_attack_detected[t+1] = 1\n", " if ave_FI_dic[t-1][ra] > beta * total_Changes:\n", " print('attack have been detected')\n", " targeted_Features[t+1] =names[ra]\n", " print(\"attack detected on feature \", names[ra])\n", " \n", " \n", "\n", "\n", "\n", "# atttacker peer side\n", "\n", "\n", " if(t+1>=start_attack_round and t+1<=end_attack_round): \n", " if (i in mal):\n", " print(\"I am peer \",i,\"I started the attack, at round\", t+1)\n", " #attack\n", " if attack_type == 'Byzantine':\n", " inputs = byzantine_attack_data(inputs)\n", " elif attack_type == 'poisoning':\n", " inputs = poisoning_attack_data(inputs, feature_attacked)\n", " elif attack_type == 'label_flipping':\n", " outputs = label_flipping_attack_data(outputs)\n", "\n", " local_weight_updates, local_bias_updates = get_updates(local_model, \n", " inputs, outputs, \n", " local_batch_size, n_local_rounds)\n", " if clear_nans:\n", " local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)\n", "\n", "\n", "\n", " else:\n", " # Benign peer\n", " # Train local model \n", " local_weight_updates, local_bias_updates = get_updates(local_model, \n", " inputs, outputs, \n", " local_batch_size, n_local_rounds)\n", " if clear_nans:\n", " local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)\n", "\n", " # Send updates to the server\n", " network_weight_updates.append(local_weight_updates)\n", " network_bias_updates.append(local_bias_updates)\n", "\n", "\n", " ## END OF CLIENT SIDE ##########################################################\n", "\n", " ######################################\n", " # SERVER SIDE AGGREGATION MECHANISMS #\n", " ######################################\n", "\n", "\n", " # Aggregate client updates\n", " aggregated_weights, aggregated_biases = aggregate(n_layers, \n", " n_participants, \n", " np.mean, \n", " network_weight_updates, \n", " network_bias_updates)\n", "\n", " if clear_nans:\n", " aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n", "\n", " # Apply updates to global model\n", " apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n", "\n", " # Proceed as in first case\n", " aggregated_weights, aggregated_biases = aggregate(n_layers, \n", " n_participants, \n", " np.mean, \n", " network_weight_updates, \n", " network_bias_updates)\n", " if clear_nans:\n", " aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n", "\n", " apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n", "\n", " ###################\n", " # COMPUTE METRICS #\n", " ###################\n", "\n", " # Global model accuracy\n", " score = global_model.evaluate(X_test, y_test, verbose=0)\n", " print(f'Global model loss: {score[0]}; global model accuracy: {score[1]}')\n", " metrics['accuracy'].append(score[1])\n", "\n", "\n", " # Accuracy without the target\n", " score = global_model.evaluate(X_test, y_test, verbose=0)\n", " metrics['acc_no_target'].append(score[1])\n", "\n", "\n", " # Distance of individual updates to the final aggregation\n", " metrics['update_distances'].append([dist_weights(aggregated_weights, w_i) for w_i in network_weight_updates])\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# sort the feature according to the last epoch and print it with importances\n", "\n", "sort_index = np.argsort(FI_dic1[9])\n", "for x in sort_index:\n", " print(names[x], ', ', FI_dic1[9][x])" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.4" } }, "nbformat": 4, "nbformat_minor": 2 }