From eedc06c5008c40c18c41d136a4e39da69e215e6c Mon Sep 17 00:00:00 2001
From: Rami <77787226+RamiHaf@users.noreply.github.com>
Date: Tue, 26 Jan 2021 12:05:29 +0100
Subject: [PATCH] Add files via upload
---
activity_only_basic_FI.ipynb | 3515 ++++++++++++++++++++++++++++++++++
1 file changed, 3515 insertions(+)
create mode 100644 activity_only_basic_FI.ipynb
diff --git a/activity_only_basic_FI.ipynb b/activity_only_basic_FI.ipynb
new file mode 100644
index 0000000..60cc00d
--- /dev/null
+++ b/activity_only_basic_FI.ipynb
@@ -0,0 +1,3515 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 45,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#IMPORTS\n",
+ "\n",
+ "import numpy as np\n",
+ "import random\n",
+ "import tensorflow as tf\n",
+ "import tensorflow.keras as kr\n",
+ "import tensorflow.keras.backend as K\n",
+ "from tensorflow.keras.models import Model\n",
+ "from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense\n",
+ "from tensorflow.keras.datasets import mnist\n",
+ "import os\n",
+ "import csv\n",
+ "\n",
+ "from scipy.spatial.distance import euclidean\n",
+ "from sklearn.metrics import confusion_matrix\n",
+ "\n",
+ "from time import sleep\n",
+ "from tqdm import tqdm\n",
+ "\n",
+ "import copy\n",
+ "import numpy\n",
+ "from sklearn.datasets import make_classification\n",
+ "from sklearn.ensemble import RandomForestClassifier\n",
+ "import pandas as pd\n",
+ "import matplotlib.pyplot as plt\n",
+ "import math\n",
+ "import seaborn as sns\n",
+ "from numpy.random import RandomState\n",
+ "import scipy as scp\n",
+ "from sklearn.model_selection import train_test_split\n",
+ "from sklearn.compose import ColumnTransformer\n",
+ "from sklearn.preprocessing import OneHotEncoder, LabelEncoder\n",
+ "from keras.models import Sequential\n",
+ "from keras.layers import Dense\n",
+ "from keras import optimizers\n",
+ "from keras.callbacks import EarlyStopping,ModelCheckpoint\n",
+ "from keras.utils import to_categorical\n",
+ "from keras import backend as K\n",
+ "from itertools import product\n",
+ "from sklearn.metrics import accuracy_score\n",
+ "from sklearn.metrics import precision_score\n",
+ "from sklearn.metrics import recall_score\n",
+ "from sklearn.metrics import f1_score\n",
+ "from sklearn.metrics import roc_auc_score\n",
+ "from sklearn.metrics import confusion_matrix\n",
+ "\n",
+ "from sklearn import mixture\n",
+ "\n",
+ "from mpl_toolkits.mplot3d import Axes3D\n",
+ "import matplotlib.pyplot as plt\n",
+ "%matplotlib inline"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 46,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "feature_attacked = [3,5,8]\n",
+ "rs = RandomState(92) #To reproduce the same results each time we run this notebook"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 47,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Load dataset into a pandas DataFrame\n",
+ "activity = pd.read_csv(\"D:/explaineblity/activity_3_original.csv\", sep=',')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 48,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "to_drop = ['subject', 'timestamp', 'heart_rate','activityID']\n",
+ "activity.drop(axis=1, columns=to_drop, inplace=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 49,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " motion | \n",
+ " temp_hand | \n",
+ " acceleration_16_x_hand | \n",
+ " acceleration_16_y_hand | \n",
+ " acceleration_16_z_hand | \n",
+ " acceleration_6_x_hand | \n",
+ " acceleration_6_y_hand | \n",
+ " acceleration_6_z_hand | \n",
+ " gyroscope_x_hand | \n",
+ " gyroscope_y_hand | \n",
+ " ... | \n",
+ " acceleration_16_z_ankle | \n",
+ " acceleration_6_x_ankle | \n",
+ " acceleration_6_y_ankle | \n",
+ " acceleration_6_z_ankle | \n",
+ " gyroscope_x_ankle | \n",
+ " gyroscope_y_ankle | \n",
+ " gyroscope_z_ankle | \n",
+ " magnetometer_x_ankle | \n",
+ " magnetometer_y_ankle | \n",
+ " magnetometer_z_ankle | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " n | \n",
+ " 30.375 | \n",
+ " 2.21530 | \n",
+ " 8.27915 | \n",
+ " 5.58753 | \n",
+ " 2.24689 | \n",
+ " 8.55387 | \n",
+ " 5.77143 | \n",
+ " -0.004750 | \n",
+ " 0.037579 | \n",
+ " ... | \n",
+ " 0.095156 | \n",
+ " 9.63162 | \n",
+ " -1.76757 | \n",
+ " 0.265761 | \n",
+ " 0.002908 | \n",
+ " -0.027714 | \n",
+ " 0.001752 | \n",
+ " -61.1081 | \n",
+ " -36.8636 | \n",
+ " -58.3696 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " n | \n",
+ " 30.375 | \n",
+ " 2.29196 | \n",
+ " 7.67288 | \n",
+ " 5.74467 | \n",
+ " 2.27373 | \n",
+ " 8.14592 | \n",
+ " 5.78739 | \n",
+ " -0.171710 | \n",
+ " 0.025479 | \n",
+ " ... | \n",
+ " -0.020804 | \n",
+ " 9.58649 | \n",
+ " -1.75247 | \n",
+ " 0.250816 | \n",
+ " 0.020882 | \n",
+ " 0.000945 | \n",
+ " 0.006007 | \n",
+ " -60.8916 | \n",
+ " -36.3197 | \n",
+ " -58.3656 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " n | \n",
+ " 30.375 | \n",
+ " 2.29090 | \n",
+ " 7.14240 | \n",
+ " 5.82342 | \n",
+ " 2.26966 | \n",
+ " 7.66268 | \n",
+ " 5.78846 | \n",
+ " -0.238241 | \n",
+ " 0.011214 | \n",
+ " ... | \n",
+ " -0.059173 | \n",
+ " 9.60196 | \n",
+ " -1.73721 | \n",
+ " 0.356632 | \n",
+ " -0.035392 | \n",
+ " -0.052422 | \n",
+ " -0.004882 | \n",
+ " -60.3407 | \n",
+ " -35.7842 | \n",
+ " -58.6119 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " n | \n",
+ " 30.375 | \n",
+ " 2.21800 | \n",
+ " 7.14365 | \n",
+ " 5.89930 | \n",
+ " 2.22177 | \n",
+ " 7.25535 | \n",
+ " 5.88000 | \n",
+ " -0.192912 | \n",
+ " 0.019053 | \n",
+ " ... | \n",
+ " 0.094385 | \n",
+ " 9.58674 | \n",
+ " -1.78264 | \n",
+ " 0.311453 | \n",
+ " -0.032514 | \n",
+ " -0.018844 | \n",
+ " 0.026950 | \n",
+ " -60.7646 | \n",
+ " -37.1028 | \n",
+ " -57.8799 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " n | \n",
+ " 30.375 | \n",
+ " 2.30106 | \n",
+ " 7.25857 | \n",
+ " 6.09259 | \n",
+ " 2.20720 | \n",
+ " 7.24042 | \n",
+ " 5.95555 | \n",
+ " -0.069961 | \n",
+ " -0.018328 | \n",
+ " ... | \n",
+ " 0.095775 | \n",
+ " 9.64677 | \n",
+ " -1.75240 | \n",
+ " 0.295902 | \n",
+ " 0.001351 | \n",
+ " -0.048878 | \n",
+ " -0.006328 | \n",
+ " -60.2040 | \n",
+ " -37.1225 | \n",
+ " -57.8847 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
5 rows × 40 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " motion temp_hand acceleration_16_x_hand acceleration_16_y_hand \\\n",
+ "0 n 30.375 2.21530 8.27915 \n",
+ "1 n 30.375 2.29196 7.67288 \n",
+ "2 n 30.375 2.29090 7.14240 \n",
+ "3 n 30.375 2.21800 7.14365 \n",
+ "4 n 30.375 2.30106 7.25857 \n",
+ "\n",
+ " acceleration_16_z_hand acceleration_6_x_hand acceleration_6_y_hand \\\n",
+ "0 5.58753 2.24689 8.55387 \n",
+ "1 5.74467 2.27373 8.14592 \n",
+ "2 5.82342 2.26966 7.66268 \n",
+ "3 5.89930 2.22177 7.25535 \n",
+ "4 6.09259 2.20720 7.24042 \n",
+ "\n",
+ " acceleration_6_z_hand gyroscope_x_hand gyroscope_y_hand ... \\\n",
+ "0 5.77143 -0.004750 0.037579 ... \n",
+ "1 5.78739 -0.171710 0.025479 ... \n",
+ "2 5.78846 -0.238241 0.011214 ... \n",
+ "3 5.88000 -0.192912 0.019053 ... \n",
+ "4 5.95555 -0.069961 -0.018328 ... \n",
+ "\n",
+ " acceleration_16_z_ankle acceleration_6_x_ankle acceleration_6_y_ankle \\\n",
+ "0 0.095156 9.63162 -1.76757 \n",
+ "1 -0.020804 9.58649 -1.75247 \n",
+ "2 -0.059173 9.60196 -1.73721 \n",
+ "3 0.094385 9.58674 -1.78264 \n",
+ "4 0.095775 9.64677 -1.75240 \n",
+ "\n",
+ " acceleration_6_z_ankle gyroscope_x_ankle gyroscope_y_ankle \\\n",
+ "0 0.265761 0.002908 -0.027714 \n",
+ "1 0.250816 0.020882 0.000945 \n",
+ "2 0.356632 -0.035392 -0.052422 \n",
+ "3 0.311453 -0.032514 -0.018844 \n",
+ "4 0.295902 0.001351 -0.048878 \n",
+ "\n",
+ " gyroscope_z_ankle magnetometer_x_ankle magnetometer_y_ankle \\\n",
+ "0 0.001752 -61.1081 -36.8636 \n",
+ "1 0.006007 -60.8916 -36.3197 \n",
+ "2 -0.004882 -60.3407 -35.7842 \n",
+ "3 0.026950 -60.7646 -37.1028 \n",
+ "4 -0.006328 -60.2040 -37.1225 \n",
+ "\n",
+ " magnetometer_z_ankle \n",
+ "0 -58.3696 \n",
+ "1 -58.3656 \n",
+ "2 -58.6119 \n",
+ "3 -57.8799 \n",
+ "4 -57.8847 \n",
+ "\n",
+ "[5 rows x 40 columns]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "display(activity.head())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 50,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "activity = pd.concat([activity,pd.get_dummies(activity['motion'], prefix='motion')],axis=1)\n",
+ "activity.drop('motion', axis=1, inplace=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 51,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " temp_hand | \n",
+ " acceleration_16_x_hand | \n",
+ " acceleration_16_y_hand | \n",
+ " acceleration_16_z_hand | \n",
+ " acceleration_6_x_hand | \n",
+ " acceleration_6_y_hand | \n",
+ " acceleration_6_z_hand | \n",
+ " gyroscope_x_hand | \n",
+ " gyroscope_y_hand | \n",
+ " gyroscope_z_hand | \n",
+ " ... | \n",
+ " acceleration_6_y_ankle | \n",
+ " acceleration_6_z_ankle | \n",
+ " gyroscope_x_ankle | \n",
+ " gyroscope_y_ankle | \n",
+ " gyroscope_z_ankle | \n",
+ " magnetometer_x_ankle | \n",
+ " magnetometer_y_ankle | \n",
+ " magnetometer_z_ankle | \n",
+ " motion_n | \n",
+ " motion_y | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 30.375 | \n",
+ " 2.21530 | \n",
+ " 8.27915 | \n",
+ " 5.58753 | \n",
+ " 2.24689 | \n",
+ " 8.55387 | \n",
+ " 5.77143 | \n",
+ " -0.004750 | \n",
+ " 0.037579 | \n",
+ " -0.011145 | \n",
+ " ... | \n",
+ " -1.76757 | \n",
+ " 0.265761 | \n",
+ " 0.002908 | \n",
+ " -0.027714 | \n",
+ " 0.001752 | \n",
+ " -61.1081 | \n",
+ " -36.8636 | \n",
+ " -58.3696 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " 30.375 | \n",
+ " 2.29196 | \n",
+ " 7.67288 | \n",
+ " 5.74467 | \n",
+ " 2.27373 | \n",
+ " 8.14592 | \n",
+ " 5.78739 | \n",
+ " -0.171710 | \n",
+ " 0.025479 | \n",
+ " -0.009538 | \n",
+ " ... | \n",
+ " -1.75247 | \n",
+ " 0.250816 | \n",
+ " 0.020882 | \n",
+ " 0.000945 | \n",
+ " 0.006007 | \n",
+ " -60.8916 | \n",
+ " -36.3197 | \n",
+ " -58.3656 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 30.375 | \n",
+ " 2.29090 | \n",
+ " 7.14240 | \n",
+ " 5.82342 | \n",
+ " 2.26966 | \n",
+ " 7.66268 | \n",
+ " 5.78846 | \n",
+ " -0.238241 | \n",
+ " 0.011214 | \n",
+ " 0.000831 | \n",
+ " ... | \n",
+ " -1.73721 | \n",
+ " 0.356632 | \n",
+ " -0.035392 | \n",
+ " -0.052422 | \n",
+ " -0.004882 | \n",
+ " -60.3407 | \n",
+ " -35.7842 | \n",
+ " -58.6119 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " 30.375 | \n",
+ " 2.21800 | \n",
+ " 7.14365 | \n",
+ " 5.89930 | \n",
+ " 2.22177 | \n",
+ " 7.25535 | \n",
+ " 5.88000 | \n",
+ " -0.192912 | \n",
+ " 0.019053 | \n",
+ " 0.013374 | \n",
+ " ... | \n",
+ " -1.78264 | \n",
+ " 0.311453 | \n",
+ " -0.032514 | \n",
+ " -0.018844 | \n",
+ " 0.026950 | \n",
+ " -60.7646 | \n",
+ " -37.1028 | \n",
+ " -57.8799 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " 30.375 | \n",
+ " 2.30106 | \n",
+ " 7.25857 | \n",
+ " 6.09259 | \n",
+ " 2.20720 | \n",
+ " 7.24042 | \n",
+ " 5.95555 | \n",
+ " -0.069961 | \n",
+ " -0.018328 | \n",
+ " 0.004582 | \n",
+ " ... | \n",
+ " -1.75240 | \n",
+ " 0.295902 | \n",
+ " 0.001351 | \n",
+ " -0.048878 | \n",
+ " -0.006328 | \n",
+ " -60.2040 | \n",
+ " -37.1225 | \n",
+ " -57.8847 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
5 rows × 41 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " temp_hand acceleration_16_x_hand acceleration_16_y_hand \\\n",
+ "0 30.375 2.21530 8.27915 \n",
+ "1 30.375 2.29196 7.67288 \n",
+ "2 30.375 2.29090 7.14240 \n",
+ "3 30.375 2.21800 7.14365 \n",
+ "4 30.375 2.30106 7.25857 \n",
+ "\n",
+ " acceleration_16_z_hand acceleration_6_x_hand acceleration_6_y_hand \\\n",
+ "0 5.58753 2.24689 8.55387 \n",
+ "1 5.74467 2.27373 8.14592 \n",
+ "2 5.82342 2.26966 7.66268 \n",
+ "3 5.89930 2.22177 7.25535 \n",
+ "4 6.09259 2.20720 7.24042 \n",
+ "\n",
+ " acceleration_6_z_hand gyroscope_x_hand gyroscope_y_hand \\\n",
+ "0 5.77143 -0.004750 0.037579 \n",
+ "1 5.78739 -0.171710 0.025479 \n",
+ "2 5.78846 -0.238241 0.011214 \n",
+ "3 5.88000 -0.192912 0.019053 \n",
+ "4 5.95555 -0.069961 -0.018328 \n",
+ "\n",
+ " gyroscope_z_hand ... acceleration_6_y_ankle acceleration_6_z_ankle \\\n",
+ "0 -0.011145 ... -1.76757 0.265761 \n",
+ "1 -0.009538 ... -1.75247 0.250816 \n",
+ "2 0.000831 ... -1.73721 0.356632 \n",
+ "3 0.013374 ... -1.78264 0.311453 \n",
+ "4 0.004582 ... -1.75240 0.295902 \n",
+ "\n",
+ " gyroscope_x_ankle gyroscope_y_ankle gyroscope_z_ankle \\\n",
+ "0 0.002908 -0.027714 0.001752 \n",
+ "1 0.020882 0.000945 0.006007 \n",
+ "2 -0.035392 -0.052422 -0.004882 \n",
+ "3 -0.032514 -0.018844 0.026950 \n",
+ "4 0.001351 -0.048878 -0.006328 \n",
+ "\n",
+ " magnetometer_x_ankle magnetometer_y_ankle magnetometer_z_ankle motion_n \\\n",
+ "0 -61.1081 -36.8636 -58.3696 1 \n",
+ "1 -60.8916 -36.3197 -58.3656 1 \n",
+ "2 -60.3407 -35.7842 -58.6119 1 \n",
+ "3 -60.7646 -37.1028 -57.8799 1 \n",
+ "4 -60.2040 -37.1225 -57.8847 1 \n",
+ "\n",
+ " motion_y \n",
+ "0 0 \n",
+ "1 0 \n",
+ "2 0 \n",
+ "3 0 \n",
+ "4 0 \n",
+ "\n",
+ "[5 rows x 41 columns]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "display(activity.head())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 52,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['temp_hand',\n",
+ " 'acceleration_16_x_hand',\n",
+ " 'acceleration_16_y_hand',\n",
+ " 'acceleration_16_z_hand',\n",
+ " 'acceleration_6_x_hand',\n",
+ " 'acceleration_6_y_hand',\n",
+ " 'acceleration_6_z_hand',\n",
+ " 'gyroscope_x_hand',\n",
+ " 'gyroscope_y_hand',\n",
+ " 'gyroscope_z_hand',\n",
+ " 'magnetometer_x_hand',\n",
+ " 'magnetometer_y_hand',\n",
+ " 'magnetometer_z_hand',\n",
+ " 'temp_chest',\n",
+ " 'acceleration_16_x_chest',\n",
+ " 'acceleration_16_y_chest',\n",
+ " 'acceleration_16_z_chest',\n",
+ " 'acceleration_6_x_chest',\n",
+ " 'acceleration_6_y_chest',\n",
+ " 'acceleration_6_z_chest',\n",
+ " 'gyroscope_x_chest',\n",
+ " 'gyroscope_y_chest',\n",
+ " 'gyroscope_z_chest',\n",
+ " 'magnetometer_x_chest',\n",
+ " 'magnetometer_y_chest',\n",
+ " 'magnetometer_z_chest',\n",
+ " 'temp_ankle',\n",
+ " 'acceleration_16_x_ankle',\n",
+ " 'acceleration_16_y_ankle',\n",
+ " 'acceleration_16_z_ankle',\n",
+ " 'acceleration_6_x_ankle',\n",
+ " 'acceleration_6_y_ankle',\n",
+ " 'acceleration_6_z_ankle',\n",
+ " 'gyroscope_x_ankle',\n",
+ " 'gyroscope_y_ankle',\n",
+ " 'gyroscope_z_ankle',\n",
+ " 'magnetometer_x_ankle',\n",
+ " 'magnetometer_y_ankle',\n",
+ " 'magnetometer_z_ankle']"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "class_label = [ 'motion_n', 'motion_y']\n",
+ "predictors = [a for a in activity.columns.values if a not in class_label]\n",
+ "\n",
+ "for p in predictors:\n",
+ " activity[p].fillna(activity[p].mean(), inplace=True)\n",
+ "\n",
+ "display(predictors)\n",
+ "for p in predictors:\n",
+ " activity[p] = (activity[p]-activity[p].min()) / (activity[p].max() - activity[p].min())\n",
+ " activity[p].astype('float32')\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 53,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "activity = activity.to_numpy()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 54,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(1942872, 41)"
+ ]
+ },
+ "execution_count": 54,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "activity.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 55,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train, X_test, y_train, y_test = train_test_split(activity[:,:-2],activity[:,-2:], test_size=0.07, random_state=rs)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 56,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Train on 1806870 samples, validate on 136002 samples\n",
+ "Epoch 1/2\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " 468416/1806870 [======>.......................] - ETA: 1:54:54 - loss: 0.6740 - accuracy: 0.625 - ETA: 2:24 - loss: 0.6243 - accuracy: 0.6502 - ETA: 1:29 - loss: 0.5579 - accuracy: 0.71 - ETA: 1:09 - loss: 0.4978 - accuracy: 0.75 - ETA: 1:00 - loss: 0.4544 - accuracy: 0.78 - ETA: 55s - loss: 0.4229 - accuracy: 0.8029 - ETA: 52s - loss: 0.3976 - accuracy: 0.816 - ETA: 49s - loss: 0.3798 - accuracy: 0.826 - ETA: 47s - loss: 0.3648 - accuracy: 0.834 - ETA: 46s - loss: 0.3515 - accuracy: 0.841 - ETA: 45s - loss: 0.3383 - accuracy: 0.847 - ETA: 44s - loss: 0.3275 - accuracy: 0.854 - ETA: 43s - loss: 0.3230 - accuracy: 0.856 - ETA: 42s - loss: 0.3164 - accuracy: 0.859 - ETA: 41s - loss: 0.3093 - accuracy: 0.863 - ETA: 41s - loss: 0.3022 - accuracy: 0.867 - ETA: 40s - loss: 0.2951 - accuracy: 0.870 - ETA: 40s - loss: 0.2895 - accuracy: 0.873 - ETA: 39s - loss: 0.2853 - accuracy: 0.875 - ETA: 39s - loss: 0.2801 - accuracy: 0.878 - ETA: 39s - loss: 0.2769 - accuracy: 0.879 - ETA: 38s - loss: 0.2723 - accuracy: 0.882 - ETA: 38s - loss: 0.2683 - accuracy: 0.884 - ETA: 38s - loss: 0.2645 - accuracy: 0.886 - ETA: 38s - loss: 0.2614 - accuracy: 0.887 - ETA: 38s - loss: 0.2584 - accuracy: 0.889 - ETA: 37s - loss: 0.2558 - accuracy: 0.890 - ETA: 37s - loss: 0.2524 - accuracy: 0.892 - ETA: 37s - loss: 0.2500 - accuracy: 0.893 - ETA: 37s - loss: 0.2470 - accuracy: 0.895 - ETA: 37s - loss: 0.2446 - accuracy: 0.896 - ETA: 37s - loss: 0.2424 - accuracy: 0.897 - ETA: 37s - loss: 0.2401 - accuracy: 0.898 - ETA: 36s - loss: 0.2375 - accuracy: 0.900 - ETA: 36s - loss: 0.2349 - accuracy: 0.901 - ETA: 36s - loss: 0.2333 - accuracy: 0.902 - ETA: 36s - loss: 0.2313 - accuracy: 0.903 - ETA: 36s - loss: 0.2291 - accuracy: 0.904 - ETA: 36s - loss: 0.2273 - accuracy: 0.905 - ETA: 36s - loss: 0.2252 - accuracy: 0.906 - ETA: 36s - loss: 0.2240 - accuracy: 0.907 - ETA: 35s - loss: 0.2229 - accuracy: 0.907 - ETA: 35s - loss: 0.2211 - accuracy: 0.908 - ETA: 35s - loss: 0.2195 - accuracy: 0.909 - ETA: 35s - loss: 0.2176 - accuracy: 0.910 - ETA: 35s - loss: 0.2159 - accuracy: 0.911 - ETA: 35s - loss: 0.2140 - accuracy: 0.912 - ETA: 35s - loss: 0.2125 - accuracy: 0.913 - ETA: 35s - loss: 0.2107 - accuracy: 0.913 - ETA: 35s - loss: 0.2090 - accuracy: 0.914 - ETA: 34s - loss: 0.2075 - accuracy: 0.915 - ETA: 34s - loss: 0.2062 - accuracy: 0.916 - ETA: 34s - loss: 0.2049 - accuracy: 0.917 - ETA: 34s - loss: 0.2040 - accuracy: 0.917 - ETA: 34s - loss: 0.2029 - accuracy: 0.917 - ETA: 34s - loss: 0.2018 - accuracy: 0.918 - ETA: 34s - loss: 0.2007 - accuracy: 0.919 - ETA: 34s - loss: 0.1996 - accuracy: 0.919 - ETA: 34s - loss: 0.1985 - accuracy: 0.919 - ETA: 34s - loss: 0.1973 - accuracy: 0.920 - ETA: 34s - loss: 0.1963 - accuracy: 0.921 - ETA: 33s - loss: 0.1952 - accuracy: 0.921 - ETA: 33s - loss: 0.1944 - accuracy: 0.922 - ETA: 33s - loss: 0.1932 - accuracy: 0.922 - ETA: 33s - loss: 0.1924 - accuracy: 0.922 - ETA: 33s - loss: 0.1916 - accuracy: 0.923 - ETA: 33s - loss: 0.1904 - accuracy: 0.923 - ETA: 33s - loss: 0.1892 - accuracy: 0.924 - ETA: 33s - loss: 0.1881 - accuracy: 0.925 - ETA: 33s - loss: 0.1869 - accuracy: 0.925 - ETA: 33s - loss: 0.1860 - accuracy: 0.926 - ETA: 33s - loss: 0.1852 - accuracy: 0.926 - ETA: 33s - loss: 0.1847 - accuracy: 0.926 - ETA: 33s - loss: 0.1839 - accuracy: 0.927 - ETA: 32s - loss: 0.1831 - accuracy: 0.927 - ETA: 32s - loss: 0.1824 - accuracy: 0.927 - ETA: 32s - loss: 0.1817 - accuracy: 0.928 - ETA: 32s - loss: 0.1811 - accuracy: 0.928 - ETA: 32s - loss: 0.1803 - accuracy: 0.928 - ETA: 32s - loss: 0.1796 - accuracy: 0.929 - ETA: 32s - loss: 0.1790 - accuracy: 0.929 - ETA: 32s - loss: 0.1782 - accuracy: 0.929 - ETA: 32s - loss: 0.1774 - accuracy: 0.930 - ETA: 32s - loss: 0.1766 - accuracy: 0.930 - ETA: 32s - loss: 0.1761 - accuracy: 0.930 - ETA: 32s - loss: 0.1754 - accuracy: 0.931 - ETA: 32s - loss: 0.1747 - accuracy: 0.931 - ETA: 32s - loss: 0.1740 - accuracy: 0.931 - ETA: 31s - loss: 0.1732 - accuracy: 0.932 - ETA: 31s - loss: 0.1728 - accuracy: 0.932 - ETA: 31s - loss: 0.1722 - accuracy: 0.932 - ETA: 31s - loss: 0.1715 - accuracy: 0.932 - ETA: 31s - loss: 0.1712 - accuracy: 0.933 - ETA: 31s - loss: 0.1707 - accuracy: 0.933 - ETA: 31s - loss: 0.1702 - accuracy: 0.933 - ETA: 31s - loss: 0.1695 - accuracy: 0.933 - ETA: 31s - loss: 0.1688 - accuracy: 0.934 - ETA: 31s - loss: 0.1683 - accuracy: 0.934 - ETA: 31s - loss: 0.1676 - accuracy: 0.934 - ETA: 31s - loss: 0.1670 - accuracy: 0.935 - ETA: 31s - loss: 0.1664 - accuracy: 0.935 - ETA: 31s - loss: 0.1663 - accuracy: 0.935 - ETA: 31s - loss: 0.1660 - accuracy: 0.935 - ETA: 31s - loss: 0.1655 - accuracy: 0.935 - ETA: 30s - loss: 0.1649 - accuracy: 0.936 - ETA: 30s - loss: 0.1646 - accuracy: 0.936 - ETA: 30s - loss: 0.1640 - accuracy: 0.936 - ETA: 30s - loss: 0.1635 - accuracy: 0.936 - ETA: 30s - loss: 0.1629 - accuracy: 0.936 - ETA: 30s - loss: 0.1623 - accuracy: 0.937 - ETA: 30s - loss: 0.1620 - accuracy: 0.937 - ETA: 30s - loss: 0.1613 - accuracy: 0.937 - ETA: 30s - loss: 0.1609 - accuracy: 0.937 - ETA: 30s - loss: 0.1604 - accuracy: 0.938 - ETA: 30s - loss: 0.1599 - accuracy: 0.938 - ETA: 30s - loss: 0.1598 - accuracy: 0.938 - ETA: 30s - loss: 0.1591 - accuracy: 0.938 - ETA: 30s - loss: 0.1586 - accuracy: 0.938 - ETA: 30s - loss: 0.1583 - accuracy: 0.939 - ETA: 30s - loss: 0.1578 - accuracy: 0.939 - ETA: 30s - loss: 0.1577 - accuracy: 0.939 - ETA: 29s - loss: 0.1574 - accuracy: 0.939 - ETA: 29s - loss: 0.1571 - accuracy: 0.939 - ETA: 29s - loss: 0.1567 - accuracy: 0.939 - ETA: 29s - loss: 0.1562 - accuracy: 0.940 - ETA: 29s - loss: 0.1556 - accuracy: 0.940 - ETA: 29s - loss: 0.1552 - accuracy: 0.940 - ETA: 29s - loss: 0.1549 - accuracy: 0.940 - ETA: 29s - loss: 0.1548 - accuracy: 0.940 - ETA: 29s - loss: 0.1545 - accuracy: 0.940 - ETA: 29s - loss: 0.1540 - accuracy: 0.940 - ETA: 29s - loss: 0.1536 - accuracy: 0.941 - ETA: 29s - loss: 0.1532 - accuracy: 0.941 - ETA: 29s - loss: 0.1530 - accuracy: 0.941 - ETA: 29s - loss: 0.1526 - accuracy: 0.941 - ETA: 29s - loss: 0.1522 - accuracy: 0.941 - ETA: 29s - loss: 0.1520 - accuracy: 0.941 - ETA: 29s - loss: 0.1517 - accuracy: 0.942 - ETA: 29s - loss: 0.1513 - accuracy: 0.942 - ETA: 29s - loss: 0.1509 - accuracy: 0.942 - ETA: 28s - loss: 0.1505 - accuracy: 0.942 - ETA: 28s - loss: 0.1501 - accuracy: 0.942 - ETA: 28s - loss: 0.1497 - accuracy: 0.942 - ETA: 28s - loss: 0.1494 - accuracy: 0.943 - ETA: 28s - loss: 0.1490 - accuracy: 0.943 - ETA: 28s - loss: 0.1487 - accuracy: 0.943 - ETA: 28s - loss: 0.1483 - accuracy: 0.943 - ETA: 28s - loss: 0.1481 - accuracy: 0.943 - ETA: 28s - loss: 0.1478 - accuracy: 0.943 - ETA: 28s - loss: 0.1475 - accuracy: 0.943 - ETA: 28s - loss: 0.1473 - accuracy: 0.943 - ETA: 28s - loss: 0.1470 - accuracy: 0.944 - ETA: 28s - loss: 0.1467 - accuracy: 0.944 - ETA: 28s - loss: 0.1465 - accuracy: 0.944 - ETA: 28s - loss: 0.1461 - accuracy: 0.944 - ETA: 28s - loss: 0.1457 - accuracy: 0.944 - ETA: 28s - loss: 0.1452 - accuracy: 0.944 - ETA: 27s - loss: 0.1449 - accuracy: 0.945 - ETA: 27s - loss: 0.1445 - accuracy: 0.945 - ETA: 27s - loss: 0.1440 - accuracy: 0.945 - ETA: 27s - loss: 0.1438 - accuracy: 0.945 - ETA: 27s - loss: 0.1436 - accuracy: 0.945 - ETA: 27s - loss: 0.1433 - accuracy: 0.945 - ETA: 27s - loss: 0.1430 - accuracy: 0.945 - ETA: 27s - loss: 0.1429 - accuracy: 0.945 - ETA: 27s - loss: 0.1426 - accuracy: 0.946 - ETA: 27s - loss: 0.1425 - accuracy: 0.946 - ETA: 27s - loss: 0.1421 - accuracy: 0.946 - ETA: 27s - loss: 0.1419 - accuracy: 0.946 - ETA: 27s - loss: 0.1417 - accuracy: 0.946 - ETA: 27s - loss: 0.1414 - accuracy: 0.946 - ETA: 27s - loss: 0.1411 - accuracy: 0.946 - ETA: 27s - loss: 0.1408 - accuracy: 0.946 - ETA: 27s - loss: 0.1405 - accuracy: 0.947 - ETA: 27s - loss: 0.1402 - accuracy: 0.947 - ETA: 26s - loss: 0.1404 - accuracy: 0.947 - ETA: 26s - loss: 0.1401 - accuracy: 0.947 - ETA: 26s - loss: 0.1398 - accuracy: 0.947 - ETA: 26s - loss: 0.1396 - accuracy: 0.947 - ETA: 26s - loss: 0.1393 - accuracy: 0.947 - ETA: 26s - loss: 0.1390 - accuracy: 0.947 - ETA: 26s - loss: 0.1387 - accuracy: 0.947 - ETA: 26s - loss: 0.1386 - accuracy: 0.947 - ETA: 26s - loss: 0.1383 - accuracy: 0.9479\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " 948576/1806870 [==============>...............] - ETA: 26s - loss: 0.1380 - accuracy: 0.948 - ETA: 26s - loss: 0.1379 - accuracy: 0.948 - ETA: 26s - loss: 0.1377 - accuracy: 0.948 - ETA: 26s - loss: 0.1374 - accuracy: 0.948 - ETA: 26s - loss: 0.1372 - accuracy: 0.948 - ETA: 26s - loss: 0.1369 - accuracy: 0.948 - ETA: 26s - loss: 0.1366 - accuracy: 0.948 - ETA: 26s - loss: 0.1363 - accuracy: 0.948 - ETA: 26s - loss: 0.1360 - accuracy: 0.948 - ETA: 25s - loss: 0.1357 - accuracy: 0.949 - ETA: 25s - loss: 0.1354 - accuracy: 0.949 - ETA: 25s - loss: 0.1352 - accuracy: 0.949 - ETA: 25s - loss: 0.1350 - accuracy: 0.949 - ETA: 25s - loss: 0.1347 - accuracy: 0.949 - ETA: 25s - loss: 0.1344 - accuracy: 0.949 - ETA: 25s - loss: 0.1341 - accuracy: 0.949 - ETA: 25s - loss: 0.1340 - accuracy: 0.949 - ETA: 25s - loss: 0.1337 - accuracy: 0.950 - ETA: 25s - loss: 0.1335 - accuracy: 0.950 - ETA: 25s - loss: 0.1333 - accuracy: 0.950 - ETA: 25s - loss: 0.1331 - accuracy: 0.950 - ETA: 25s - loss: 0.1328 - accuracy: 0.950 - ETA: 25s - loss: 0.1326 - accuracy: 0.950 - ETA: 25s - loss: 0.1323 - accuracy: 0.950 - ETA: 25s - loss: 0.1321 - accuracy: 0.950 - ETA: 25s - loss: 0.1320 - accuracy: 0.950 - ETA: 25s - loss: 0.1318 - accuracy: 0.950 - ETA: 25s - loss: 0.1315 - accuracy: 0.950 - ETA: 24s - loss: 0.1313 - accuracy: 0.951 - ETA: 24s - loss: 0.1311 - accuracy: 0.951 - ETA: 24s - loss: 0.1310 - accuracy: 0.951 - ETA: 24s - loss: 0.1307 - accuracy: 0.951 - ETA: 24s - loss: 0.1304 - accuracy: 0.951 - ETA: 24s - loss: 0.1302 - accuracy: 0.951 - ETA: 24s - loss: 0.1301 - accuracy: 0.951 - ETA: 24s - loss: 0.1299 - accuracy: 0.951 - ETA: 24s - loss: 0.1298 - accuracy: 0.951 - ETA: 24s - loss: 0.1296 - accuracy: 0.951 - ETA: 24s - loss: 0.1293 - accuracy: 0.951 - ETA: 24s - loss: 0.1291 - accuracy: 0.951 - ETA: 24s - loss: 0.1289 - accuracy: 0.952 - ETA: 24s - loss: 0.1286 - accuracy: 0.952 - ETA: 24s - loss: 0.1284 - accuracy: 0.952 - ETA: 24s - loss: 0.1282 - accuracy: 0.952 - ETA: 24s - loss: 0.1279 - accuracy: 0.952 - ETA: 24s - loss: 0.1277 - accuracy: 0.952 - ETA: 23s - loss: 0.1275 - accuracy: 0.952 - ETA: 23s - loss: 0.1273 - accuracy: 0.952 - ETA: 23s - loss: 0.1272 - accuracy: 0.952 - ETA: 23s - loss: 0.1270 - accuracy: 0.952 - ETA: 23s - loss: 0.1269 - accuracy: 0.952 - ETA: 23s - loss: 0.1267 - accuracy: 0.953 - ETA: 23s - loss: 0.1265 - accuracy: 0.953 - ETA: 23s - loss: 0.1262 - accuracy: 0.953 - ETA: 23s - loss: 0.1260 - accuracy: 0.953 - ETA: 23s - loss: 0.1258 - accuracy: 0.953 - ETA: 23s - loss: 0.1256 - accuracy: 0.953 - ETA: 23s - loss: 0.1253 - accuracy: 0.953 - ETA: 23s - loss: 0.1251 - accuracy: 0.953 - ETA: 23s - loss: 0.1249 - accuracy: 0.953 - ETA: 23s - loss: 0.1248 - accuracy: 0.953 - ETA: 23s - loss: 0.1245 - accuracy: 0.953 - ETA: 23s - loss: 0.1243 - accuracy: 0.954 - ETA: 23s - loss: 0.1241 - accuracy: 0.954 - ETA: 23s - loss: 0.1240 - accuracy: 0.954 - ETA: 22s - loss: 0.1238 - accuracy: 0.954 - ETA: 22s - loss: 0.1236 - accuracy: 0.954 - ETA: 22s - loss: 0.1234 - accuracy: 0.954 - ETA: 22s - loss: 0.1232 - accuracy: 0.954 - ETA: 22s - loss: 0.1230 - accuracy: 0.954 - ETA: 22s - loss: 0.1229 - accuracy: 0.954 - ETA: 22s - loss: 0.1227 - accuracy: 0.954 - ETA: 22s - loss: 0.1225 - accuracy: 0.954 - ETA: 22s - loss: 0.1223 - accuracy: 0.954 - ETA: 22s - loss: 0.1221 - accuracy: 0.954 - ETA: 22s - loss: 0.1219 - accuracy: 0.954 - ETA: 22s - loss: 0.1218 - accuracy: 0.955 - ETA: 22s - loss: 0.1217 - accuracy: 0.955 - ETA: 22s - loss: 0.1215 - accuracy: 0.955 - ETA: 22s - loss: 0.1214 - accuracy: 0.955 - ETA: 22s - loss: 0.1211 - accuracy: 0.955 - ETA: 22s - loss: 0.1210 - accuracy: 0.955 - ETA: 22s - loss: 0.1207 - accuracy: 0.955 - ETA: 22s - loss: 0.1206 - accuracy: 0.955 - ETA: 21s - loss: 0.1204 - accuracy: 0.955 - ETA: 21s - loss: 0.1203 - accuracy: 0.955 - ETA: 21s - loss: 0.1200 - accuracy: 0.955 - ETA: 21s - loss: 0.1199 - accuracy: 0.955 - ETA: 21s - loss: 0.1197 - accuracy: 0.955 - ETA: 21s - loss: 0.1196 - accuracy: 0.955 - ETA: 21s - loss: 0.1195 - accuracy: 0.956 - ETA: 21s - loss: 0.1193 - accuracy: 0.956 - ETA: 21s - loss: 0.1191 - accuracy: 0.956 - ETA: 21s - loss: 0.1189 - accuracy: 0.956 - ETA: 21s - loss: 0.1188 - accuracy: 0.956 - ETA: 21s - loss: 0.1187 - accuracy: 0.956 - ETA: 21s - loss: 0.1185 - accuracy: 0.956 - ETA: 21s - loss: 0.1184 - accuracy: 0.956 - ETA: 21s - loss: 0.1183 - accuracy: 0.956 - ETA: 21s - loss: 0.1181 - accuracy: 0.956 - ETA: 21s - loss: 0.1179 - accuracy: 0.956 - ETA: 21s - loss: 0.1177 - accuracy: 0.956 - ETA: 21s - loss: 0.1176 - accuracy: 0.956 - ETA: 21s - loss: 0.1173 - accuracy: 0.956 - ETA: 20s - loss: 0.1172 - accuracy: 0.956 - ETA: 20s - loss: 0.1171 - accuracy: 0.957 - ETA: 20s - loss: 0.1169 - accuracy: 0.957 - ETA: 20s - loss: 0.1167 - accuracy: 0.957 - ETA: 20s - loss: 0.1166 - accuracy: 0.957 - ETA: 20s - loss: 0.1165 - accuracy: 0.957 - ETA: 20s - loss: 0.1163 - accuracy: 0.957 - ETA: 20s - loss: 0.1162 - accuracy: 0.957 - ETA: 20s - loss: 0.1160 - accuracy: 0.957 - ETA: 20s - loss: 0.1158 - accuracy: 0.957 - ETA: 20s - loss: 0.1156 - accuracy: 0.957 - ETA: 20s - loss: 0.1154 - accuracy: 0.957 - ETA: 20s - loss: 0.1152 - accuracy: 0.957 - ETA: 20s - loss: 0.1150 - accuracy: 0.957 - ETA: 20s - loss: 0.1149 - accuracy: 0.957 - ETA: 20s - loss: 0.1147 - accuracy: 0.957 - ETA: 20s - loss: 0.1146 - accuracy: 0.958 - ETA: 20s - loss: 0.1145 - accuracy: 0.958 - ETA: 20s - loss: 0.1143 - accuracy: 0.958 - ETA: 19s - loss: 0.1142 - accuracy: 0.958 - ETA: 19s - loss: 0.1140 - accuracy: 0.958 - ETA: 19s - loss: 0.1139 - accuracy: 0.958 - ETA: 19s - loss: 0.1137 - accuracy: 0.958 - ETA: 19s - loss: 0.1135 - accuracy: 0.958 - ETA: 19s - loss: 0.1133 - accuracy: 0.958 - ETA: 19s - loss: 0.1132 - accuracy: 0.958 - ETA: 19s - loss: 0.1130 - accuracy: 0.958 - ETA: 19s - loss: 0.1129 - accuracy: 0.958 - ETA: 19s - loss: 0.1128 - accuracy: 0.958 - ETA: 19s - loss: 0.1127 - accuracy: 0.958 - ETA: 19s - loss: 0.1126 - accuracy: 0.958 - ETA: 19s - loss: 0.1124 - accuracy: 0.958 - ETA: 19s - loss: 0.1123 - accuracy: 0.958 - ETA: 19s - loss: 0.1121 - accuracy: 0.959 - ETA: 19s - loss: 0.1120 - accuracy: 0.959 - ETA: 19s - loss: 0.1118 - accuracy: 0.959 - ETA: 19s - loss: 0.1117 - accuracy: 0.959 - ETA: 19s - loss: 0.1116 - accuracy: 0.959 - ETA: 19s - loss: 0.1115 - accuracy: 0.959 - ETA: 18s - loss: 0.1113 - accuracy: 0.959 - ETA: 18s - loss: 0.1112 - accuracy: 0.959 - ETA: 18s - loss: 0.1110 - accuracy: 0.959 - ETA: 18s - loss: 0.1108 - accuracy: 0.959 - ETA: 18s - loss: 0.1107 - accuracy: 0.959 - ETA: 18s - loss: 0.1105 - accuracy: 0.959 - ETA: 18s - loss: 0.1104 - accuracy: 0.959 - ETA: 18s - loss: 0.1103 - accuracy: 0.959 - ETA: 18s - loss: 0.1101 - accuracy: 0.959 - ETA: 18s - loss: 0.1099 - accuracy: 0.959 - ETA: 18s - loss: 0.1098 - accuracy: 0.959 - ETA: 18s - loss: 0.1097 - accuracy: 0.960 - ETA: 18s - loss: 0.1096 - accuracy: 0.960 - ETA: 18s - loss: 0.1095 - accuracy: 0.960 - ETA: 18s - loss: 0.1094 - accuracy: 0.960 - ETA: 18s - loss: 0.1093 - accuracy: 0.960 - ETA: 18s - loss: 0.1092 - accuracy: 0.960 - ETA: 18s - loss: 0.1090 - accuracy: 0.960 - ETA: 18s - loss: 0.1089 - accuracy: 0.960 - ETA: 17s - loss: 0.1088 - accuracy: 0.960 - ETA: 17s - loss: 0.1086 - accuracy: 0.960 - ETA: 17s - loss: 0.1085 - accuracy: 0.960 - ETA: 17s - loss: 0.1085 - accuracy: 0.960 - ETA: 17s - loss: 0.1084 - accuracy: 0.960 - ETA: 17s - loss: 0.1082 - accuracy: 0.960 - ETA: 17s - loss: 0.1080 - accuracy: 0.960 - ETA: 17s - loss: 0.1079 - accuracy: 0.960 - ETA: 17s - loss: 0.1078 - accuracy: 0.960 - ETA: 17s - loss: 0.1077 - accuracy: 0.960 - ETA: 17s - loss: 0.1075 - accuracy: 0.960 - ETA: 17s - loss: 0.1073 - accuracy: 0.961 - ETA: 17s - loss: 0.1071 - accuracy: 0.961 - ETA: 17s - loss: 0.1070 - accuracy: 0.961 - ETA: 17s - loss: 0.1069 - accuracy: 0.961 - ETA: 17s - loss: 0.1068 - accuracy: 0.961 - ETA: 17s - loss: 0.1066 - accuracy: 0.961 - ETA: 17s - loss: 0.1066 - accuracy: 0.961 - ETA: 17s - loss: 0.1065 - accuracy: 0.961 - ETA: 17s - loss: 0.1064 - accuracy: 0.961 - ETA: 16s - loss: 0.1062 - accuracy: 0.961 - ETA: 16s - loss: 0.1061 - accuracy: 0.961 - ETA: 16s - loss: 0.1060 - accuracy: 0.961 - ETA: 16s - loss: 0.1058 - accuracy: 0.9616"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "1435872/1806870 [======================>.......] - ETA: 16s - loss: 0.1057 - accuracy: 0.961 - ETA: 16s - loss: 0.1055 - accuracy: 0.961 - ETA: 16s - loss: 0.1055 - accuracy: 0.961 - ETA: 16s - loss: 0.1054 - accuracy: 0.961 - ETA: 16s - loss: 0.1052 - accuracy: 0.961 - ETA: 16s - loss: 0.1051 - accuracy: 0.961 - ETA: 16s - loss: 0.1050 - accuracy: 0.961 - ETA: 16s - loss: 0.1048 - accuracy: 0.961 - ETA: 16s - loss: 0.1047 - accuracy: 0.962 - ETA: 16s - loss: 0.1046 - accuracy: 0.962 - ETA: 16s - loss: 0.1045 - accuracy: 0.962 - ETA: 16s - loss: 0.1043 - accuracy: 0.962 - ETA: 16s - loss: 0.1043 - accuracy: 0.962 - ETA: 16s - loss: 0.1041 - accuracy: 0.962 - ETA: 16s - loss: 0.1041 - accuracy: 0.962 - ETA: 15s - loss: 0.1039 - accuracy: 0.962 - ETA: 15s - loss: 0.1038 - accuracy: 0.962 - ETA: 15s - loss: 0.1037 - accuracy: 0.962 - ETA: 15s - loss: 0.1035 - accuracy: 0.962 - ETA: 15s - loss: 0.1034 - accuracy: 0.962 - ETA: 15s - loss: 0.1033 - accuracy: 0.962 - ETA: 15s - loss: 0.1031 - accuracy: 0.962 - ETA: 15s - loss: 0.1030 - accuracy: 0.962 - ETA: 15s - loss: 0.1028 - accuracy: 0.962 - ETA: 15s - loss: 0.1027 - accuracy: 0.962 - ETA: 15s - loss: 0.1026 - accuracy: 0.962 - ETA: 15s - loss: 0.1025 - accuracy: 0.962 - ETA: 15s - loss: 0.1024 - accuracy: 0.962 - ETA: 15s - loss: 0.1023 - accuracy: 0.962 - ETA: 15s - loss: 0.1022 - accuracy: 0.963 - ETA: 15s - loss: 0.1021 - accuracy: 0.963 - ETA: 15s - loss: 0.1020 - accuracy: 0.963 - ETA: 15s - loss: 0.1019 - accuracy: 0.963 - ETA: 15s - loss: 0.1018 - accuracy: 0.963 - ETA: 15s - loss: 0.1017 - accuracy: 0.963 - ETA: 14s - loss: 0.1015 - accuracy: 0.963 - ETA: 14s - loss: 0.1014 - accuracy: 0.963 - ETA: 14s - loss: 0.1014 - accuracy: 0.963 - ETA: 14s - loss: 0.1013 - accuracy: 0.963 - ETA: 14s - loss: 0.1011 - accuracy: 0.963 - ETA: 14s - loss: 0.1010 - accuracy: 0.963 - ETA: 14s - loss: 0.1009 - accuracy: 0.963 - ETA: 14s - loss: 0.1008 - accuracy: 0.963 - ETA: 14s - loss: 0.1007 - accuracy: 0.963 - ETA: 14s - loss: 0.1006 - accuracy: 0.963 - ETA: 14s - loss: 0.1005 - accuracy: 0.963 - ETA: 14s - loss: 0.1004 - accuracy: 0.963 - ETA: 14s - loss: 0.1003 - accuracy: 0.963 - ETA: 14s - loss: 0.1002 - accuracy: 0.963 - ETA: 14s - loss: 0.1002 - accuracy: 0.963 - ETA: 14s - loss: 0.1000 - accuracy: 0.963 - ETA: 14s - loss: 0.0999 - accuracy: 0.963 - ETA: 14s - loss: 0.0998 - accuracy: 0.963 - ETA: 14s - loss: 0.0997 - accuracy: 0.963 - ETA: 13s - loss: 0.0996 - accuracy: 0.964 - ETA: 13s - loss: 0.0995 - accuracy: 0.964 - ETA: 13s - loss: 0.0994 - accuracy: 0.964 - ETA: 13s - loss: 0.0993 - accuracy: 0.964 - ETA: 13s - loss: 0.0993 - accuracy: 0.964 - ETA: 13s - loss: 0.0992 - accuracy: 0.964 - ETA: 13s - loss: 0.0990 - accuracy: 0.964 - ETA: 13s - loss: 0.0989 - accuracy: 0.964 - ETA: 13s - loss: 0.0988 - accuracy: 0.964 - ETA: 13s - loss: 0.0987 - accuracy: 0.964 - ETA: 13s - loss: 0.0986 - accuracy: 0.964 - ETA: 13s - loss: 0.0985 - accuracy: 0.964 - ETA: 13s - loss: 0.0984 - accuracy: 0.964 - ETA: 13s - loss: 0.0983 - accuracy: 0.964 - ETA: 13s - loss: 0.0982 - accuracy: 0.964 - ETA: 13s - loss: 0.0981 - accuracy: 0.964 - ETA: 13s - loss: 0.0980 - accuracy: 0.964 - ETA: 13s - loss: 0.0979 - accuracy: 0.964 - ETA: 13s - loss: 0.0978 - accuracy: 0.964 - ETA: 13s - loss: 0.0977 - accuracy: 0.964 - ETA: 12s - loss: 0.0977 - accuracy: 0.964 - ETA: 12s - loss: 0.0976 - accuracy: 0.964 - ETA: 12s - loss: 0.0974 - accuracy: 0.964 - ETA: 12s - loss: 0.0973 - accuracy: 0.964 - ETA: 12s - loss: 0.0972 - accuracy: 0.964 - ETA: 12s - loss: 0.0971 - accuracy: 0.965 - ETA: 12s - loss: 0.0970 - accuracy: 0.965 - ETA: 12s - loss: 0.0969 - accuracy: 0.965 - ETA: 12s - loss: 0.0968 - accuracy: 0.965 - ETA: 12s - loss: 0.0968 - accuracy: 0.965 - ETA: 12s - loss: 0.0967 - accuracy: 0.965 - ETA: 12s - loss: 0.0966 - accuracy: 0.965 - ETA: 12s - loss: 0.0965 - accuracy: 0.965 - ETA: 12s - loss: 0.0964 - accuracy: 0.965 - ETA: 12s - loss: 0.0963 - accuracy: 0.965 - ETA: 12s - loss: 0.0962 - accuracy: 0.965 - ETA: 12s - loss: 0.0961 - accuracy: 0.965 - ETA: 12s - loss: 0.0960 - accuracy: 0.965 - ETA: 12s - loss: 0.0959 - accuracy: 0.965 - ETA: 11s - loss: 0.0958 - accuracy: 0.965 - ETA: 11s - loss: 0.0957 - accuracy: 0.965 - ETA: 11s - loss: 0.0956 - accuracy: 0.965 - ETA: 11s - loss: 0.0955 - accuracy: 0.965 - ETA: 11s - loss: 0.0954 - accuracy: 0.965 - ETA: 11s - loss: 0.0953 - accuracy: 0.965 - ETA: 11s - loss: 0.0952 - accuracy: 0.965 - ETA: 11s - loss: 0.0952 - accuracy: 0.965 - ETA: 11s - loss: 0.0951 - accuracy: 0.965 - ETA: 11s - loss: 0.0950 - accuracy: 0.965 - ETA: 11s - loss: 0.0949 - accuracy: 0.965 - ETA: 11s - loss: 0.0948 - accuracy: 0.965 - ETA: 11s - loss: 0.0947 - accuracy: 0.965 - ETA: 11s - loss: 0.0946 - accuracy: 0.965 - ETA: 11s - loss: 0.0945 - accuracy: 0.966 - ETA: 11s - loss: 0.0944 - accuracy: 0.966 - ETA: 11s - loss: 0.0943 - accuracy: 0.966 - ETA: 11s - loss: 0.0942 - accuracy: 0.966 - ETA: 11s - loss: 0.0941 - accuracy: 0.966 - ETA: 11s - loss: 0.0940 - accuracy: 0.966 - ETA: 10s - loss: 0.0939 - accuracy: 0.966 - ETA: 10s - loss: 0.0938 - accuracy: 0.966 - ETA: 10s - loss: 0.0937 - accuracy: 0.966 - ETA: 10s - loss: 0.0936 - accuracy: 0.966 - ETA: 10s - loss: 0.0935 - accuracy: 0.966 - ETA: 10s - loss: 0.0934 - accuracy: 0.966 - ETA: 10s - loss: 0.0933 - accuracy: 0.966 - ETA: 10s - loss: 0.0932 - accuracy: 0.966 - ETA: 10s - loss: 0.0931 - accuracy: 0.966 - ETA: 10s - loss: 0.0930 - accuracy: 0.966 - ETA: 10s - loss: 0.0929 - accuracy: 0.966 - ETA: 10s - loss: 0.0928 - accuracy: 0.966 - ETA: 10s - loss: 0.0927 - accuracy: 0.966 - ETA: 10s - loss: 0.0926 - accuracy: 0.966 - ETA: 10s - loss: 0.0925 - accuracy: 0.966 - ETA: 10s - loss: 0.0924 - accuracy: 0.966 - ETA: 10s - loss: 0.0923 - accuracy: 0.966 - ETA: 10s - loss: 0.0923 - accuracy: 0.966 - ETA: 10s - loss: 0.0922 - accuracy: 0.966 - ETA: 10s - loss: 0.0921 - accuracy: 0.966 - ETA: 9s - loss: 0.0920 - accuracy: 0.966 - ETA: 9s - loss: 0.0920 - accuracy: 0.96 - ETA: 9s - loss: 0.0919 - accuracy: 0.96 - ETA: 9s - loss: 0.0918 - accuracy: 0.96 - ETA: 9s - loss: 0.0917 - accuracy: 0.96 - ETA: 9s - loss: 0.0917 - accuracy: 0.96 - ETA: 9s - loss: 0.0916 - accuracy: 0.96 - ETA: 9s - loss: 0.0915 - accuracy: 0.96 - ETA: 9s - loss: 0.0914 - accuracy: 0.96 - ETA: 9s - loss: 0.0913 - accuracy: 0.96 - ETA: 9s - loss: 0.0912 - accuracy: 0.96 - ETA: 9s - loss: 0.0911 - accuracy: 0.96 - ETA: 9s - loss: 0.0910 - accuracy: 0.96 - ETA: 9s - loss: 0.0909 - accuracy: 0.96 - ETA: 9s - loss: 0.0908 - accuracy: 0.96 - ETA: 9s - loss: 0.0907 - accuracy: 0.96 - ETA: 9s - loss: 0.0906 - accuracy: 0.96 - ETA: 9s - loss: 0.0905 - accuracy: 0.96 - ETA: 9s - loss: 0.0905 - accuracy: 0.96 - ETA: 9s - loss: 0.0904 - accuracy: 0.96 - ETA: 8s - loss: 0.0904 - accuracy: 0.96 - ETA: 8s - loss: 0.0903 - accuracy: 0.96 - ETA: 8s - loss: 0.0902 - accuracy: 0.96 - ETA: 8s - loss: 0.0901 - accuracy: 0.96 - ETA: 8s - loss: 0.0900 - accuracy: 0.96 - ETA: 8s - loss: 0.0899 - accuracy: 0.96 - ETA: 8s - loss: 0.0898 - accuracy: 0.96 - ETA: 8s - loss: 0.0897 - accuracy: 0.96 - ETA: 8s - loss: 0.0896 - accuracy: 0.96 - ETA: 8s - loss: 0.0896 - accuracy: 0.96 - ETA: 8s - loss: 0.0895 - accuracy: 0.96 - ETA: 8s - loss: 0.0894 - accuracy: 0.96 - ETA: 8s - loss: 0.0893 - accuracy: 0.96 - ETA: 8s - loss: 0.0892 - accuracy: 0.96 - ETA: 8s - loss: 0.0891 - accuracy: 0.96 - ETA: 8s - loss: 0.0890 - accuracy: 0.96 - ETA: 8s - loss: 0.0889 - accuracy: 0.96 - ETA: 8s - loss: 0.0889 - accuracy: 0.96 - ETA: 8s - loss: 0.0888 - accuracy: 0.96 - ETA: 7s - loss: 0.0887 - accuracy: 0.96 - ETA: 7s - loss: 0.0886 - accuracy: 0.96 - ETA: 7s - loss: 0.0885 - accuracy: 0.96 - ETA: 7s - loss: 0.0885 - accuracy: 0.96 - ETA: 7s - loss: 0.0884 - accuracy: 0.96 - ETA: 7s - loss: 0.0883 - accuracy: 0.96 - ETA: 7s - loss: 0.0882 - accuracy: 0.96 - ETA: 7s - loss: 0.0881 - accuracy: 0.96 - ETA: 7s - loss: 0.0880 - accuracy: 0.96 - ETA: 7s - loss: 0.0879 - accuracy: 0.96 - ETA: 7s - loss: 0.0878 - accuracy: 0.96 - ETA: 7s - loss: 0.0878 - accuracy: 0.96 - ETA: 7s - loss: 0.0877 - accuracy: 0.96 - ETA: 7s - loss: 0.0876 - accuracy: 0.96 - ETA: 7s - loss: 0.0876 - accuracy: 0.96 - ETA: 7s - loss: 0.0875 - accuracy: 0.9687"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "1806870/1806870 [==============================] - ETA: 7s - loss: 0.0874 - accuracy: 0.96 - ETA: 7s - loss: 0.0873 - accuracy: 0.96 - ETA: 7s - loss: 0.0873 - accuracy: 0.96 - ETA: 7s - loss: 0.0872 - accuracy: 0.96 - ETA: 6s - loss: 0.0871 - accuracy: 0.96 - ETA: 6s - loss: 0.0870 - accuracy: 0.96 - ETA: 6s - loss: 0.0870 - accuracy: 0.96 - ETA: 6s - loss: 0.0869 - accuracy: 0.96 - ETA: 6s - loss: 0.0868 - accuracy: 0.96 - ETA: 6s - loss: 0.0867 - accuracy: 0.96 - ETA: 6s - loss: 0.0867 - accuracy: 0.96 - ETA: 6s - loss: 0.0866 - accuracy: 0.96 - ETA: 6s - loss: 0.0866 - accuracy: 0.96 - ETA: 6s - loss: 0.0865 - accuracy: 0.96 - ETA: 6s - loss: 0.0864 - accuracy: 0.96 - ETA: 6s - loss: 0.0863 - accuracy: 0.96 - ETA: 6s - loss: 0.0863 - accuracy: 0.96 - ETA: 6s - loss: 0.0862 - accuracy: 0.96 - ETA: 6s - loss: 0.0861 - accuracy: 0.96 - ETA: 6s - loss: 0.0860 - accuracy: 0.96 - ETA: 6s - loss: 0.0860 - accuracy: 0.96 - ETA: 6s - loss: 0.0859 - accuracy: 0.96 - ETA: 6s - loss: 0.0858 - accuracy: 0.96 - ETA: 6s - loss: 0.0857 - accuracy: 0.96 - ETA: 6s - loss: 0.0857 - accuracy: 0.96 - ETA: 5s - loss: 0.0856 - accuracy: 0.96 - ETA: 5s - loss: 0.0856 - accuracy: 0.96 - ETA: 5s - loss: 0.0855 - accuracy: 0.96 - ETA: 5s - loss: 0.0854 - accuracy: 0.96 - ETA: 5s - loss: 0.0853 - accuracy: 0.96 - ETA: 5s - loss: 0.0852 - accuracy: 0.96 - ETA: 5s - loss: 0.0852 - accuracy: 0.96 - ETA: 5s - loss: 0.0851 - accuracy: 0.96 - ETA: 5s - loss: 0.0850 - accuracy: 0.96 - ETA: 5s - loss: 0.0849 - accuracy: 0.96 - ETA: 5s - loss: 0.0849 - accuracy: 0.96 - ETA: 5s - loss: 0.0848 - accuracy: 0.96 - ETA: 5s - loss: 0.0847 - accuracy: 0.96 - ETA: 5s - loss: 0.0846 - accuracy: 0.96 - ETA: 5s - loss: 0.0846 - accuracy: 0.96 - ETA: 5s - loss: 0.0845 - accuracy: 0.96 - ETA: 5s - loss: 0.0844 - accuracy: 0.96 - ETA: 5s - loss: 0.0844 - accuracy: 0.96 - ETA: 5s - loss: 0.0843 - accuracy: 0.96 - ETA: 5s - loss: 0.0842 - accuracy: 0.96 - ETA: 4s - loss: 0.0841 - accuracy: 0.96 - ETA: 4s - loss: 0.0840 - accuracy: 0.97 - ETA: 4s - loss: 0.0839 - accuracy: 0.97 - ETA: 4s - loss: 0.0838 - accuracy: 0.97 - ETA: 4s - loss: 0.0838 - accuracy: 0.97 - ETA: 4s - loss: 0.0837 - accuracy: 0.97 - ETA: 4s - loss: 0.0837 - accuracy: 0.97 - ETA: 4s - loss: 0.0836 - accuracy: 0.97 - ETA: 4s - loss: 0.0835 - accuracy: 0.97 - ETA: 4s - loss: 0.0834 - accuracy: 0.97 - ETA: 4s - loss: 0.0833 - accuracy: 0.97 - ETA: 4s - loss: 0.0832 - accuracy: 0.97 - ETA: 4s - loss: 0.0832 - accuracy: 0.97 - ETA: 4s - loss: 0.0831 - accuracy: 0.97 - ETA: 4s - loss: 0.0830 - accuracy: 0.97 - ETA: 4s - loss: 0.0830 - accuracy: 0.97 - ETA: 4s - loss: 0.0829 - accuracy: 0.97 - ETA: 4s - loss: 0.0828 - accuracy: 0.97 - ETA: 4s - loss: 0.0828 - accuracy: 0.97 - ETA: 4s - loss: 0.0827 - accuracy: 0.97 - ETA: 3s - loss: 0.0826 - accuracy: 0.97 - ETA: 3s - loss: 0.0826 - accuracy: 0.97 - ETA: 3s - loss: 0.0825 - accuracy: 0.97 - ETA: 3s - loss: 0.0825 - accuracy: 0.97 - ETA: 3s - loss: 0.0824 - accuracy: 0.97 - ETA: 3s - loss: 0.0823 - accuracy: 0.97 - ETA: 3s - loss: 0.0822 - accuracy: 0.97 - ETA: 3s - loss: 0.0822 - accuracy: 0.97 - ETA: 3s - loss: 0.0821 - accuracy: 0.97 - ETA: 3s - loss: 0.0821 - accuracy: 0.97 - ETA: 3s - loss: 0.0820 - accuracy: 0.97 - ETA: 3s - loss: 0.0819 - accuracy: 0.97 - ETA: 3s - loss: 0.0819 - accuracy: 0.97 - ETA: 3s - loss: 0.0818 - accuracy: 0.97 - ETA: 3s - loss: 0.0818 - accuracy: 0.97 - ETA: 3s - loss: 0.0817 - accuracy: 0.97 - ETA: 3s - loss: 0.0816 - accuracy: 0.97 - ETA: 3s - loss: 0.0815 - accuracy: 0.97 - ETA: 3s - loss: 0.0814 - accuracy: 0.97 - ETA: 3s - loss: 0.0814 - accuracy: 0.97 - ETA: 3s - loss: 0.0813 - accuracy: 0.97 - ETA: 2s - loss: 0.0812 - accuracy: 0.97 - ETA: 2s - loss: 0.0811 - accuracy: 0.97 - ETA: 2s - loss: 0.0810 - accuracy: 0.97 - ETA: 2s - loss: 0.0809 - accuracy: 0.97 - ETA: 2s - loss: 0.0809 - accuracy: 0.97 - ETA: 2s - loss: 0.0808 - accuracy: 0.97 - ETA: 2s - loss: 0.0808 - accuracy: 0.97 - ETA: 2s - loss: 0.0807 - accuracy: 0.97 - ETA: 2s - loss: 0.0806 - accuracy: 0.97 - ETA: 2s - loss: 0.0805 - accuracy: 0.97 - ETA: 2s - loss: 0.0805 - accuracy: 0.97 - ETA: 2s - loss: 0.0804 - accuracy: 0.97 - ETA: 2s - loss: 0.0803 - accuracy: 0.97 - ETA: 2s - loss: 0.0803 - accuracy: 0.97 - ETA: 2s - loss: 0.0802 - accuracy: 0.97 - ETA: 2s - loss: 0.0802 - accuracy: 0.97 - ETA: 2s - loss: 0.0801 - accuracy: 0.97 - ETA: 2s - loss: 0.0800 - accuracy: 0.97 - ETA: 2s - loss: 0.0800 - accuracy: 0.97 - ETA: 1s - loss: 0.0799 - accuracy: 0.97 - ETA: 1s - loss: 0.0798 - accuracy: 0.97 - ETA: 1s - loss: 0.0797 - accuracy: 0.97 - ETA: 1s - loss: 0.0797 - accuracy: 0.97 - ETA: 1s - loss: 0.0796 - accuracy: 0.97 - ETA: 1s - loss: 0.0795 - accuracy: 0.97 - ETA: 1s - loss: 0.0795 - accuracy: 0.97 - ETA: 1s - loss: 0.0794 - accuracy: 0.97 - ETA: 1s - loss: 0.0793 - accuracy: 0.97 - ETA: 1s - loss: 0.0793 - accuracy: 0.97 - ETA: 1s - loss: 0.0792 - accuracy: 0.97 - ETA: 1s - loss: 0.0791 - accuracy: 0.97 - ETA: 1s - loss: 0.0790 - accuracy: 0.97 - ETA: 1s - loss: 0.0790 - accuracy: 0.97 - ETA: 1s - loss: 0.0789 - accuracy: 0.97 - ETA: 1s - loss: 0.0788 - accuracy: 0.97 - ETA: 1s - loss: 0.0788 - accuracy: 0.97 - ETA: 1s - loss: 0.0787 - accuracy: 0.97 - ETA: 1s - loss: 0.0787 - accuracy: 0.97 - ETA: 1s - loss: 0.0786 - accuracy: 0.97 - ETA: 0s - loss: 0.0786 - accuracy: 0.97 - ETA: 0s - loss: 0.0785 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0782 - accuracy: 0.97 - ETA: 0s - loss: 0.0782 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0780 - accuracy: 0.97 - ETA: 0s - loss: 0.0779 - accuracy: 0.97 - ETA: 0s - loss: 0.0779 - accuracy: 0.97 - ETA: 0s - loss: 0.0778 - accuracy: 0.97 - ETA: 0s - loss: 0.0777 - accuracy: 0.97 - ETA: 0s - loss: 0.0776 - accuracy: 0.97 - ETA: 0s - loss: 0.0776 - accuracy: 0.97 - ETA: 0s - loss: 0.0775 - accuracy: 0.97 - ETA: 0s - loss: 0.0774 - accuracy: 0.97 - ETA: 0s - loss: 0.0774 - accuracy: 0.97 - ETA: 0s - loss: 0.0773 - accuracy: 0.97 - 37s 20us/step - loss: 0.0773 - accuracy: 0.9725 - val_loss: 0.0357 - val_accuracy: 0.9882\n",
+ "\n",
+ "Epoch 00001: val_loss improved from inf to 0.03567, saving model to test.h8\n",
+ "Epoch 2/2\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " 474496/1806870 [======>.......................] - ETA: 51:48 - loss: 0.0022 - accuracy: 1.000 - ETA: 1:13 - loss: 0.0302 - accuracy: 0.990 - ETA: 53s - loss: 0.0260 - accuracy: 0.9923 - ETA: 47s - loss: 0.0219 - accuracy: 0.993 - ETA: 43s - loss: 0.0236 - accuracy: 0.993 - ETA: 41s - loss: 0.0304 - accuracy: 0.991 - ETA: 40s - loss: 0.0327 - accuracy: 0.990 - ETA: 39s - loss: 0.0327 - accuracy: 0.990 - ETA: 38s - loss: 0.0317 - accuracy: 0.990 - ETA: 38s - loss: 0.0308 - accuracy: 0.990 - ETA: 37s - loss: 0.0303 - accuracy: 0.990 - ETA: 37s - loss: 0.0307 - accuracy: 0.990 - ETA: 37s - loss: 0.0311 - accuracy: 0.990 - ETA: 36s - loss: 0.0312 - accuracy: 0.990 - ETA: 36s - loss: 0.0306 - accuracy: 0.990 - ETA: 36s - loss: 0.0319 - accuracy: 0.989 - ETA: 36s - loss: 0.0315 - accuracy: 0.989 - ETA: 35s - loss: 0.0318 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 35s - loss: 0.0323 - accuracy: 0.989 - ETA: 35s - loss: 0.0319 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 34s - loss: 0.0316 - accuracy: 0.989 - ETA: 34s - loss: 0.0318 - accuracy: 0.989 - ETA: 34s - loss: 0.0327 - accuracy: 0.989 - ETA: 34s - loss: 0.0325 - accuracy: 0.989 - ETA: 34s - loss: 0.0323 - accuracy: 0.989 - ETA: 34s - loss: 0.0324 - accuracy: 0.989 - ETA: 34s - loss: 0.0321 - accuracy: 0.989 - ETA: 34s - loss: 0.0323 - accuracy: 0.989 - ETA: 34s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0318 - accuracy: 0.989 - ETA: 33s - loss: 0.0318 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0321 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0320 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0324 - accuracy: 0.989 - ETA: 33s - loss: 0.0323 - accuracy: 0.989 - ETA: 32s - loss: 0.0327 - accuracy: 0.988 - ETA: 32s - loss: 0.0325 - accuracy: 0.988 - ETA: 32s - loss: 0.0324 - accuracy: 0.989 - ETA: 32s - loss: 0.0325 - accuracy: 0.988 - ETA: 32s - loss: 0.0324 - accuracy: 0.989 - ETA: 32s - loss: 0.0326 - accuracy: 0.988 - ETA: 32s - loss: 0.0330 - accuracy: 0.988 - ETA: 32s - loss: 0.0330 - accuracy: 0.988 - ETA: 32s - loss: 0.0331 - accuracy: 0.988 - ETA: 32s - loss: 0.0334 - accuracy: 0.988 - ETA: 32s - loss: 0.0333 - accuracy: 0.988 - ETA: 32s - loss: 0.0332 - accuracy: 0.988 - ETA: 32s - loss: 0.0331 - accuracy: 0.988 - ETA: 32s - loss: 0.0329 - accuracy: 0.988 - ETA: 32s - loss: 0.0329 - accuracy: 0.988 - ETA: 32s - loss: 0.0327 - accuracy: 0.988 - ETA: 32s - loss: 0.0326 - accuracy: 0.988 - ETA: 32s - loss: 0.0328 - accuracy: 0.988 - ETA: 32s - loss: 0.0328 - accuracy: 0.988 - ETA: 31s - loss: 0.0328 - accuracy: 0.988 - ETA: 31s - loss: 0.0326 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0326 - accuracy: 0.988 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0321 - accuracy: 0.989 - ETA: 31s - loss: 0.0320 - accuracy: 0.989 - ETA: 31s - loss: 0.0319 - accuracy: 0.989 - ETA: 31s - loss: 0.0318 - accuracy: 0.989 - ETA: 31s - loss: 0.0317 - accuracy: 0.989 - ETA: 31s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0317 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0317 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0311 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0309 - accuracy: 0.989 - ETA: 29s - loss: 0.0309 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0306 - accuracy: 0.989 - ETA: 28s - loss: 0.0306 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0306 - accuracy: 0.989 - ETA: 27s - loss: 0.0306 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0303 - accuracy: 0.989 - ETA: 27s - loss: 0.0302 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.9898"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " 949216/1806870 [==============>...............] - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0301 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0297 - accuracy: 0.989 - ETA: 25s - loss: 0.0296 - accuracy: 0.989 - ETA: 25s - loss: 0.0296 - accuracy: 0.989 - ETA: 24s - loss: 0.0297 - accuracy: 0.989 - ETA: 24s - loss: 0.0296 - accuracy: 0.989 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0295 - accuracy: 0.990 - ETA: 24s - loss: 0.0295 - accuracy: 0.990 - ETA: 24s - loss: 0.0294 - accuracy: 0.990 - ETA: 24s - loss: 0.0294 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0288 - accuracy: 0.990 - ETA: 23s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0285 - accuracy: 0.990 - ETA: 22s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0283 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0283 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0280 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 20s - loss: 0.0282 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 19s - loss: 0.0278 - accuracy: 0.990 - ETA: 19s - loss: 0.0278 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0273 - accuracy: 0.990 - ETA: 18s - loss: 0.0273 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0270 - accuracy: 0.990 - ETA: 18s - loss: 0.0270 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0269 - accuracy: 0.990 - ETA: 16s - loss: 0.0269 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0267 - accuracy: 0.9909"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "1434176/1806870 [======================>.......] - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0265 - accuracy: 0.991 - ETA: 16s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 15s - loss: 0.0261 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0258 - accuracy: 0.991 - ETA: 14s - loss: 0.0258 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 9s - loss: 0.0250 - accuracy: 0.991 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.9918"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "1806870/1806870 [==============================] - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0240 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0238 - accuracy: 0.99 - ETA: 5s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - 37s 20us/step - loss: 0.0229 - accuracy: 0.9923 - val_loss: 0.0129 - val_accuracy: 0.9955\n",
+ "\n",
+ "Epoch 00002: val_loss improved from 0.03567 to 0.01290, saving model to test.h8\n"
+ ]
+ }
+ ],
+ "source": [
+ "#begin federated\n",
+ "\n",
+ "earlystopping = EarlyStopping(monitor = 'val_loss',\n",
+ " min_delta = 0.01,\n",
+ " patience = 50,\n",
+ " verbose = 1,\n",
+ " baseline = 2,\n",
+ " restore_best_weights = True)\n",
+ "\n",
+ "checkpoint = ModelCheckpoint('test.h8',\n",
+ " monitor='val_loss',\n",
+ " mode='min',\n",
+ " save_best_only=True,\n",
+ " verbose=1)\n",
+ " \n",
+ "model = Sequential()\n",
+ "model.add(Dense(70, input_dim=39, activation='relu'))\n",
+ "model.add(Dense(50, activation='relu'))\n",
+ "model.add(Dense(50, activation='relu'))\n",
+ "model.add(Dense(2, activation='softmax'))\n",
+ "#sgd = optimizers.SGD(learning_rate=0.0001, momentum=0.9, nesterov=True)\n",
+ "model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])\n",
+ "# def train_shard(i):\n",
+ "history = model.fit(X_train, y_train,\n",
+ "epochs=2,\n",
+ "validation_data=(X_test, y_test),\n",
+ "callbacks = [checkpoint, earlystopping],\n",
+ "shuffle=True)\n",
+ "# return history\n",
+ "# for i in range(len(shard1_traintest)):\n",
+ "# train_shard(i)\n",
+ "#get_3rd_layer_output = K.function([model.layers[0].input],\n",
+ "# [model.layers[2].output])\n",
+ "#layer_output = get_3rd_layer_output(shard_traintest[i][\"X_train\"])[0]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 57,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Model: \"sequential_4\"\n",
+ "_________________________________________________________________\n",
+ "Layer (type) Output Shape Param # \n",
+ "=================================================================\n",
+ "dense_13 (Dense) (None, 70) 2800 \n",
+ "_________________________________________________________________\n",
+ "dense_14 (Dense) (None, 50) 3550 \n",
+ "_________________________________________________________________\n",
+ "dense_15 (Dense) (None, 50) 2550 \n",
+ "_________________________________________________________________\n",
+ "dense_16 (Dense) (None, 2) 102 \n",
+ "=================================================================\n",
+ "Total params: 9,002\n",
+ "Trainable params: 9,002\n",
+ "Non-trainable params: 0\n",
+ "_________________________________________________________________\n"
+ ]
+ }
+ ],
+ "source": [
+ "model.summary()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 58,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#AUXILIARY METHODS FOR FEDERATED LEARNING\n",
+ "\n",
+ "# RETURN INDICES TO LAYERS WITH WEIGHTS AND BIASES\n",
+ "def trainable_layers(model):\n",
+ " return [i for i, layer in enumerate(model.layers) if len(layer.get_weights()) > 0]\n",
+ "\n",
+ "# RETURN WEIGHTS AND BIASES OF A MODEL\n",
+ "def get_parameters(model):\n",
+ " weights = []\n",
+ " biases = []\n",
+ " index = trainable_layers(model)\n",
+ " for i in index:\n",
+ " weights.append(copy.deepcopy(model.layers[i].get_weights()[0]))\n",
+ " biases.append(copy.deepcopy(model.layers[i].get_weights()[1])) \n",
+ " \n",
+ " return weights, biases\n",
+ " \n",
+ "# SET WEIGHTS AND BIASES OF A MODEL\n",
+ "def set_parameters(model, weights, biases):\n",
+ " index = trainable_layers(model)\n",
+ " for i, j in enumerate(index):\n",
+ " model.layers[j].set_weights([weights[i], biases[i]])\n",
+ " \n",
+ "# DEPRECATED: RETURN THE GRADIENTS OF THE MODEL AFTER AN UPDATE \n",
+ "def get_gradients(model, inputs, outputs):\n",
+ " \"\"\" Gets gradient of model for given inputs and outputs for all weights\"\"\"\n",
+ " grads = model.optimizer.get_gradients(model.total_loss, model.trainable_weights)\n",
+ " symb_inputs = (model._feed_inputs + model._feed_targets + model._feed_sample_weights)\n",
+ " f = K.function(symb_inputs, grads)\n",
+ " x, y, sample_weight = model._standardize_user_data(inputs, outputs)\n",
+ " output_grad = f(x + y + sample_weight)\n",
+ " \n",
+ " w_grad = [w for i,w in enumerate(output_grad) if i%2==0]\n",
+ " b_grad = [w for i,w in enumerate(output_grad) if i%2==1]\n",
+ " \n",
+ " return w_grad, b_grad\n",
+ "\n",
+ "# RETURN THE DIFFERENCE OF MODELS' WEIGHTS AND BIASES AFTER AN UPDATE \n",
+ "# NOTE: LEARNING RATE IS APPLIED, SO THE UPDATE IS DIFFERENT FROM THE\n",
+ "# GRADIENTS. IN CASE VANILLA SGD IS USED, THE GRADIENTS ARE OBTAINED\n",
+ "# AS (UPDATES / LEARNING_RATE)\n",
+ "def get_updates(model, inputs, outputs, batch_size, epochs):\n",
+ " w, b = get_parameters(model)\n",
+ " #model.train_on_batch(inputs, outputs)\n",
+ " model.fit(inputs, outputs, batch_size=batch_size, epochs=epochs, verbose=0)\n",
+ " w_new, b_new = get_parameters(model)\n",
+ " \n",
+ " weight_updates = [old - new for old,new in zip(w, w_new)]\n",
+ " bias_updates = [old - new for old,new in zip(b, b_new)]\n",
+ " \n",
+ " return weight_updates, bias_updates\n",
+ "\n",
+ "# UPDATE THE MODEL'S WEIGHTS AND PARAMETERS WITH AN UPDATE\n",
+ "def apply_updates(model, eta, w_new, b_new):\n",
+ " w, b = get_parameters(model)\n",
+ " new_weights = [theta - eta*delta for theta,delta in zip(w, w_new)]\n",
+ " new_biases = [theta - eta*delta for theta,delta in zip(b, b_new)]\n",
+ " set_parameters(model, new_weights, new_biases)\n",
+ " \n",
+ "# FEDERATED AGGREGATION FUNCTION\n",
+ "def aggregate(n_layers, n_peers, f, w_updates, b_updates):\n",
+ " agg_w = [f([w_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]\n",
+ " agg_b = [f([b_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]\n",
+ " return agg_w, agg_b\n",
+ "\n",
+ "# SOLVE NANS\n",
+ "def nans_to_zero(W, B):\n",
+ " W0 = [np.nan_to_num(w, nan=0.0, posinf=0.0, neginf=0.0) for w in W]\n",
+ " B0 = [np.nan_to_num(b, nan=0.0, posinf=0.0, neginf=0.0) for b in B]\n",
+ " return W0, B0\n",
+ "\n",
+ "def build_forest(X,y):\n",
+ " clf=RandomForestClassifier(n_estimators=1000, max_depth=7, random_state=0, verbose = 1)\n",
+ " clf.fit(X,y)\n",
+ " return clf\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 59,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "\n",
+ "def scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local):\n",
+ " sum_feature_improtance= 0\n",
+ " overal_wrong_feature_importance = 0\n",
+ " counter = 0\n",
+ " second_counter = 0\n",
+ " never_seen = 0\n",
+ " avr_wrong_importance = 0\n",
+ " counter1 = 0\n",
+ " for i in range (len(FL_predict1)):\n",
+ " if(FL_predict1[i][0] < 0.5):\n",
+ " FL_predict1[i][0] = 0\n",
+ " FL_predict1[i][1] = 1\n",
+ " if(FL_predict1[i][0] >= 0.5):\n",
+ " FL_predict1[i][0] = 1\n",
+ " FL_predict1[i][1] = 0\n",
+ " for i in range (len(FL_predict1)):\n",
+ " i_tree = 0\n",
+ " # print(i)\n",
+ " if (FL_predict1[i][0] != y_test_local[i][0]):\n",
+ " counter1+=1\n",
+ "# print(i)\n",
+ " # print(\"the test sample number \",i ,\" have been niss classified by the blackbox\" )\n",
+ " for tree_in_forest in forest.estimators_:\n",
+ " temp = forest.estimators_[i_tree].predict([X_test_local[i]])\n",
+ " i_tree = i_tree + 1\n",
+ " inttemp = temp[0].astype(int)\n",
+ " if(FL_predict1[i][0] == inttemp[0]):\n",
+ " sum_feature_improtance = sum_feature_improtance + tree_in_forest.feature_importances_\n",
+ " counter = counter + 1\n",
+ " if(counter>0):\n",
+ " ave_feature_importence = sum_feature_improtance/counter\n",
+ " overal_wrong_feature_importance = ave_feature_importence + overal_wrong_feature_importance\n",
+ " second_counter = second_counter + 1\n",
+ "# print(ave_feature_importence)\n",
+ "# print(\"numbers of the trees predect the wrong predection as the blackbox is \", counter)\n",
+ " counter = 0\n",
+ " sum_feature_improtance = 0\n",
+ " # print(\"------------------------------------------------------------------------------------\")\n",
+ " else:\n",
+ " if(FL_predict1[i][0] != y_test_local[i][0]):\n",
+ " # print(\"the test sample number \", i,\" never have been miss classified by the forest.\")\n",
+ " never_seen = never_seen +1\n",
+ " if(second_counter>0):\n",
+ "# print(second_counter)\n",
+ " # print(\"the number of sampels that was miss classifed by the blackbox and classified correctly by the all forest is\", never_seen)\n",
+ " # print(overal_wrong_feature_importance)\n",
+ " avr_wrong_importance = overal_wrong_feature_importance / second_counter\n",
+ " # print(\"the average wrong dessition cosed by the feature\", avr_wrong_importance)\n",
+ " # print(\"=====================================================================================\")\n",
+ " print(\"the number of miss classified sampels is \", counter1)\n",
+ " return forest.feature_importances_"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 60,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def attack_data(inputs, feature_attacked):\n",
+ " z=0\n",
+ " C=0\n",
+ " z=inputs.max(axis = 0)\n",
+ " C=inputs.min(axis = 0)\n",
+ " for i in range(len(inputs)):\n",
+ " for j in range(len(inputs[0])):\n",
+ " inputs[i][j]= random.uniform(z[j], C[j])\n",
+ "# inputs[i][feature_attacked[1]]= random.uniform(z[feature_attacked[1]], C[feature_attacked[1]]) \n",
+ "# inputs[i][feature_attacked[1]]= random.uniform(z[feature_attacked[1]], C[feature_attacked[1]])\n",
+ "# inputs[i][feature_attacked] = random.randrange(z[feature_attacked]+1)\n",
+ "# print(X_test_attacked[i][att])\n",
+ "# if(X_test_attacked[i][att] == X_test[i][att]):\n",
+ "# feat_same = feat_same + 1\n",
+ " return inputs"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 61,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[0, 1, 2, 3]"
+ ]
+ },
+ "execution_count": 61,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "trainable_layers(model)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 62,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "([array([[-0.19031775, 0.06523454, -0.14931396, ..., 0. ,\n",
+ " 0. , -0.29390967],\n",
+ " [ 0.033116 , -0.08946543, 0.01096467, ..., 0. ,\n",
+ " 0. , -0.015468 ],\n",
+ " [ 0.01199894, 0.02766883, 0.01122332, ..., 0. ,\n",
+ " 0. , -0.0217886 ],\n",
+ " ...,\n",
+ " [-0.00909673, -0.04540771, -0.20296723, ..., 0. ,\n",
+ " 0. , -0.13601643],\n",
+ " [-0.2878099 , 0.23572141, 0.23246315, ..., 0. ,\n",
+ " 0. , 0.10910301],\n",
+ " [ 0.20305228, 0.28390008, 0.5178579 , ..., 0. ,\n",
+ " 0. , -0.20772421]], dtype=float32),\n",
+ " array([[ 0.03976321, 0.00334442, -0.1938284 , ..., 0.12357424,\n",
+ " 0. , -0.4720744 ],\n",
+ " [-0.05385096, -0.19503492, -0.11490272, ..., -0.06129656,\n",
+ " 0. , -0.07149667],\n",
+ " [ 0.09047867, -0.00144058, -0.30648926, ..., -0.06290518,\n",
+ " 0. , -0.20281315],\n",
+ " ...,\n",
+ " [ 0. , 0. , 0. , ..., 0. ,\n",
+ " 0. , 0. ],\n",
+ " [ 0. , 0. , 0. , ..., 0. ,\n",
+ " 0. , 0. ],\n",
+ " [ 0.03847116, 0.3318082 , -0.12002158, ..., 0.0102334 ,\n",
+ " 0. , 0.19092064]], dtype=float32),\n",
+ " array([[-2.42347077e-01, -8.76481831e-03, -3.90259176e-02, ...,\n",
+ " -6.67591989e-02, 5.05356491e-03, 1.02399185e-01],\n",
+ " [-8.08794439e-01, -3.27832878e-01, -5.26163459e-01, ...,\n",
+ " -4.11394715e-01, 0.00000000e+00, 2.40460098e-01],\n",
+ " [ 2.49932185e-01, -3.12010467e-01, -4.72681373e-01, ...,\n",
+ " 6.11434951e-02, 0.00000000e+00, -8.85864496e-02],\n",
+ " ...,\n",
+ " [-1.02717876e-02, 4.91040945e-02, 2.08511353e-01, ...,\n",
+ " 2.92169333e-01, 3.78781557e-03, -7.43160397e-02],\n",
+ " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
+ " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
+ " [-2.40534097e-01, -1.12464696e-01, -6.61374629e-02, ...,\n",
+ " -2.46955007e-01, 2.02164054e-04, -1.38975129e-01]], dtype=float32),\n",
+ " array([[-3.04001868e-01, 3.04008663e-01],\n",
+ " [-3.18321645e-01, 3.18326294e-01],\n",
+ " [-6.49204254e-02, 6.49255514e-02],\n",
+ " [ 2.38253117e-01, -2.38266587e-01],\n",
+ " [-2.33515322e-01, 2.33533710e-01],\n",
+ " [ 2.04552054e-01, -2.04541385e-01],\n",
+ " [-2.16077894e-01, 2.16096789e-01],\n",
+ " [-9.29667503e-02, 9.29654241e-02],\n",
+ " [ 8.22023153e-02, -8.21979046e-02],\n",
+ " [-1.02218628e-01, 1.02235526e-01],\n",
+ " [-2.78488606e-01, 2.78504372e-01],\n",
+ " [-2.50967979e-01, 2.50985503e-01],\n",
+ " [ 6.83438182e-02, -6.83349371e-02],\n",
+ " [ 1.24650508e-01, -1.24644592e-01],\n",
+ " [ 6.33001328e-05, -6.16163015e-05],\n",
+ " [ 1.41896963e-01, -1.41895413e-01],\n",
+ " [-1.02908731e-01, 1.02926940e-01],\n",
+ " [ 5.35694361e-02, -5.35876751e-02],\n",
+ " [ 8.15955400e-02, -8.15925598e-02],\n",
+ " [-1.03019953e-01, 1.03019953e-01],\n",
+ " [-1.25430942e-01, 1.25459164e-01],\n",
+ " [-3.38193893e-01, 3.38200092e-01],\n",
+ " [-1.09561086e-02, 1.09702498e-02],\n",
+ " [-2.82736778e-01, 2.82758176e-01],\n",
+ " [-4.44638729e-01, 4.44639683e-01],\n",
+ " [-6.57172203e-02, 6.57169819e-02],\n",
+ " [-1.72389388e-01, 1.72392488e-01],\n",
+ " [-5.41939139e-02, 5.42033315e-02],\n",
+ " [-5.90079725e-02, 5.90344965e-02],\n",
+ " [ 3.66447330e-01, -3.66433740e-01],\n",
+ " [-2.08910614e-01, 2.08919704e-01],\n",
+ " [-2.05386773e-01, 2.05394983e-01],\n",
+ " [ 2.22557023e-01, -2.22551197e-01],\n",
+ " [ 1.47694349e-01, -1.47691488e-01],\n",
+ " [-1.98568344e-01, 1.98586702e-01],\n",
+ " [ 3.86301279e-02, -3.86058390e-02],\n",
+ " [-3.03680480e-01, 3.03690165e-01],\n",
+ " [ 1.28941819e-01, -1.28929913e-01],\n",
+ " [-1.06625021e-01, 1.06641471e-01],\n",
+ " [ 2.11793751e-01, -2.11783320e-01],\n",
+ " [-1.20612228e+00, 1.20613194e+00],\n",
+ " [ 2.32780397e-01, -2.32766151e-01],\n",
+ " [ 1.07535511e-01, -1.07531980e-01],\n",
+ " [-1.68534845e-01, 1.68567598e-01],\n",
+ " [ 1.57701567e-01, -1.57669455e-01],\n",
+ " [ 2.91942716e-01, -2.91933715e-01],\n",
+ " [ 2.90191770e-02, -2.90192217e-02],\n",
+ " [ 4.56188083e-01, -4.56171900e-01],\n",
+ " [ 2.81581283e-03, -2.81581283e-03],\n",
+ " [-1.18135691e-01, 1.18147850e-01]], dtype=float32)],\n",
+ " [array([ 3.1866699e-03, -7.6422125e-02, 3.3879340e-02, 1.4779243e-01,\n",
+ " 0.0000000e+00, 0.0000000e+00, 1.3526529e-05, 1.0225922e-06,\n",
+ " -2.2294750e-03, -2.1711849e-03, -9.2923865e-02, -3.4378596e-02,\n",
+ " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 7.9754189e-02,\n",
+ " 9.6828096e-02, 0.0000000e+00, 6.9650002e-02, -5.8602124e-02,\n",
+ " -8.6331740e-05, 0.0000000e+00, 1.2667640e-01, 0.0000000e+00,\n",
+ " 0.0000000e+00, -2.6648588e-02, 0.0000000e+00, -3.3291716e-02,\n",
+ " 0.0000000e+00, 0.0000000e+00, 2.2499807e-02, 6.8744346e-02,\n",
+ " -5.0262503e-02, 0.0000000e+00, -8.9680202e-02, 0.0000000e+00,\n",
+ " -4.2678282e-02, 0.0000000e+00, -1.7710961e-02, 0.0000000e+00,\n",
+ " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, -6.1750807e-02,\n",
+ " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,\n",
+ " 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,\n",
+ " 0.0000000e+00, 0.0000000e+00, 5.8094569e-02, 1.2169727e-02,\n",
+ " 1.9261871e-10, 0.0000000e+00, -7.9979539e-02, 0.0000000e+00,\n",
+ " 0.0000000e+00, 7.6093122e-02, 0.0000000e+00, 0.0000000e+00,\n",
+ " 1.6243661e-02, 0.0000000e+00, -9.8505989e-03, 0.0000000e+00,\n",
+ " 0.0000000e+00, -4.3590821e-02], dtype=float32),\n",
+ " array([-0.10964979, 0.1273394 , 0.05826145, 0.11250992, -0.00930649,\n",
+ " 0.08082695, -0.10440034, 0.02672271, 0.14781642, 0.27572772,\n",
+ " -0.0397696 , 0.18053436, 0. , -0.04786159, 0. ,\n",
+ " 0.15616408, 0.0424803 , 0. , -0.0375067 , -0.00756753,\n",
+ " 0.01335342, -0.08301416, -0.07382136, 0.02766102, -0.21604276,\n",
+ " 0.04904766, -0.00283363, 0.01198358, 0.17403054, -0.08457427,\n",
+ " 0.06056517, -0.00864101, 0.02029612, 0.12778968, 0.14824837,\n",
+ " 0.17251332, 0.03519725, 0.05309688, 0.1472145 , -0.18282993,\n",
+ " 0.10138815, 0.01851342, 0.03132945, 0. , -0.19095713,\n",
+ " 0.07761121, 0.17995 , 0.16866425, 0. , -0.02218707],\n",
+ " dtype=float32),\n",
+ " array([ 0.34423378, 0.44879648, -0.04138201, 0.48468772, 0.27873045,\n",
+ " -0.30794245, 0.15183273, 0.08334076, 0.18456669, -0.178191 ,\n",
+ " -0.14348036, 0.57680756, 0.60798985, 0.41697726, -0.00238903,\n",
+ " -0.26822644, -0.10311142, 0.11896864, 0.29975793, 0.23435119,\n",
+ " 0.23297757, 0.00584415, 0.31422138, 0.4213791 , -0.1971436 ,\n",
+ " -0.19466306, -0.08916584, 0.11673178, 0.06989807, -0.05853122,\n",
+ " -0.16468427, 0.20225608, 0.4647019 , -0.20615612, -0.06997643,\n",
+ " -0.16313455, 0.3702965 , 0.03510106, -0.06537277, -0.03514916,\n",
+ " 0.20234883, 0.42463556, -0.00177155, 0.18248317, -0.00233091,\n",
+ " 0.16106895, 0.03673995, -0.28332692, 0.03024025, -0.32204401],\n",
+ " dtype=float32),\n",
+ " array([-0.14419317, 0.14419758], dtype=float32)])"
+ ]
+ },
+ "execution_count": 62,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "get_updates(model, X_train, y_train, 32, 2)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 63,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "W = get_parameters(model)[0]\n",
+ "B = get_parameters(model)[1]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 64,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#AUXILIARY METHODS FOR FL INSPECTION\n",
+ "\n",
+ "# TRANSFORM ALL WEIGHT TENSORS TO 1D ARRAY\n",
+ "def flatten_weights(w_in):\n",
+ " h = w_in[0].reshape(-1)\n",
+ " for w in w_in[1:]:\n",
+ " h = np.append(h, w.reshape(-1))\n",
+ " return h\n",
+ "\n",
+ "# TRANSFORM ALL BIAS TENSORS TO 1D ARRAY\n",
+ "def flatten_biases(b_in):\n",
+ " h = b_in[0].reshape(-1)\n",
+ " for b in b_in[1:]:\n",
+ " h = np.append(h, b.reshape(-1))\n",
+ " return h\n",
+ "\n",
+ "# TRANSFORM WEIGHT AND BIAS TENSORS TO 1D ARRAY\n",
+ "def flatten_parameters(w_in, b_in):\n",
+ " w = flatten_weights(w_in)\n",
+ " b = flatten_biases(b_in)\n",
+ " return w, b\n",
+ "\n",
+ "# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS\n",
+ "def dist_weights(w_a, w_b):\n",
+ " wf_a = flatten_weights(w_a)\n",
+ " wf_b = flatten_weights(w_b)\n",
+ " return euclidean(wf_a, wf_b)\n",
+ "\n",
+ "# COMPUTE EUCLIDEAN DISTANCE OF BIASES\n",
+ "def dist_biases(b_a, b_b):\n",
+ " bf_a = flatten_biases(b_a)\n",
+ " bf_b = flatten_biases(b_b)\n",
+ " return euclidean(bf_a, bf_b)\n",
+ "\n",
+ "# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS AND BIASES\n",
+ "def dist_parameters(w_a, b_a, w_b, b_b):\n",
+ " wf_a, bf_a = flatten_parameters(w_a, b_a)\n",
+ " wf_b, bf_b = flatten_parameters(w_b, b_b)\n",
+ " return euclidean(np.append(wf_a, bf_a), np.append(wf_b, bf_b))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 65,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "39"
+ ]
+ },
+ "execution_count": 65,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "len(W[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 66,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# BASELINE SCENARIO\n",
+ "#buid the model as base line for the shards (sequential)\n",
+ "# Number of peers\n",
+ "#accordin to what we need\n",
+ "n_peers = 100\n",
+ "ss = int(len(X_train)/n_peers)\n",
+ "inputs_in = X_train[0*ss:0*ss+ss]\n",
+ "outputs_in = y_train[0*ss:0*ss+ss]\n",
+ "def build_model(X_t, y_t):\n",
+ " model = Sequential()\n",
+ " model.add(Dense(70, input_dim=39, activation='relu'))\n",
+ " model.add(Dense(64, activation='relu'))\n",
+ " model.add(Dense(50, activation='relu'))\n",
+ " model.add(Dense(2, activation='softmax'))\n",
+ " #sgd = optimizers.SGD(learning_rate=0.0001, momentum=0.9, nesterov=True)\n",
+ " model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])\n",
+ " model.fit(X_t,\n",
+ " y_t, \n",
+ " #inputs_in, \n",
+ " #outputs_in, \n",
+ "# X_train,\n",
+ "# y_train,\n",
+ " batch_size=32, \n",
+ " epochs=100, \n",
+ " verbose=1,\n",
+ " validation_data=((X_test, y_test)))\n",
+ " return model\n",
+ "\n",
+ "# model = build_model(inputs_in, outputs_in)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 67,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Model: \"sequential_4\"\n",
+ "_________________________________________________________________\n",
+ "Layer (type) Output Shape Param # \n",
+ "=================================================================\n",
+ "dense_13 (Dense) (None, 70) 2800 \n",
+ "_________________________________________________________________\n",
+ "dense_14 (Dense) (None, 50) 3550 \n",
+ "_________________________________________________________________\n",
+ "dense_15 (Dense) (None, 50) 2550 \n",
+ "_________________________________________________________________\n",
+ "dense_16 (Dense) (None, 2) 102 \n",
+ "=================================================================\n",
+ "Total params: 9,002\n",
+ "Trainable params: 9,002\n",
+ "Non-trainable params: 0\n",
+ "_________________________________________________________________\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "None"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "display(model.summary())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 68,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# predict probabilities for test set\n",
+ "yhat_probs = model.predict(X_test, verbose=0)\n",
+ "# predict crisp classes for test set\n",
+ "yhat_classes = model.predict_classes(X_test, verbose=0)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 69,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Accuracy: 0.998316\n",
+ "Precision: 0.998423\n",
+ "Recall: 0.998711\n",
+ "F1 score: 0.998567\n"
+ ]
+ }
+ ],
+ "source": [
+ "# accuracy: (tp + tn) / (p + n)\n",
+ "accuracy = accuracy_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
+ "print('Accuracy: %f' % accuracy)\n",
+ "# precision tp / (tp + fp)\n",
+ "precision = precision_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
+ "print('Precision: %f' % precision)\n",
+ "# recall: tp / (tp + fn)\n",
+ "recall = recall_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
+ "print('Recall: %f' % recall)\n",
+ "# f1: 2 tp / (2 tp + fp + fn)\n",
+ "f1 = f1_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
+ "print('F1 score: %f' % f1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 70,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[55991, 126],\n",
+ " [ 103, 79782]], dtype=int64)"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQsAAADtCAYAAACoP1B5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAP4klEQVR4nO3dbaxlVXnA8f/DAA5ahxdRJGIrTSdUagIiAVqTRqXCSBvxg03AphBDMo1Bg2mTFvuFVmti+6FWUmtD4lRoLEhoSYlBphOqIU0QAaUIjMiIVadQRhh8aYm83Pv0w163c7y99+51ZJ05s/f5/5KVe846++67OMCT9bbXE5mJJPU5bN4NkDQMBgtJVQwWkqoYLCRVMVhIqmKwkFTl8Hk3QBqD89/6snxq/1LVtffe/+zOzNw24yY1Z7CQGnhy/xJ37Typ6tojTvzW8TNuzkwYLKQmkqVcnncjZspgITWQwDLj3g1tsJAaSJLns27OYqgMFlIjY+9ZLPzSaURsi4iHI2JPRFw57/aMTUTsiIh9EfHAvNsySwkskVVlqBY6WETEJuCTwDuAU4GLI+LU+bZqdD4DDG6Z8GexTFaVoVroYAGcBezJzEcz8zngBuDCObdpVDLzDmD/vNsxawksZVaVoVr0YPEa4HsT7/eWOmlqy5VlqBZ9gjPWqBtu6Nfc5MDnI2oserDYC7x24v1JwGNzaosGLBOeH3esWPhgcTewNSJOBv4TuAh4z3ybpGEKltbsqI7HQs9ZZOYLwPuBncBu4MbMfHC+rRqXiLgeuBM4JSL2RsRl827TLCSwnHVlqBa9Z0Fm3grcOu92jFVmXjzvNhwsY+9ZLHywkFroNmUZLCRVWE6DhaQe9iwkVUmC53PTvJsxUwu9GrIiIrbPuw1jN/bveKVnUVOGymDRGfV/yIeIkX/HwVIeVlWGymGI1EB3UtZwA0GNmQSLI48+Kje/esssbj0Tm094OVtOOWFY22W++fy8WzCVzbyULXHcoL7jn/A/PJfPVo8bhjzEqDGTYLH51Vs4+2/dNT1T5+6ddwtG7668vfrazBj0EKPGuP/ppINomagqfSLilIi4b6L8KCI+GBHHRcSuiHik/Dy2XB8RcXU57e3+iDhj4l6XlusfiYhLJ+rfFBFfL79zdUT0NsxgITWQBM/l4VWl916ZD2fm6Zl5OvAm4BngZuBK4PbM3ArcXt5Dd9Lb1lK2A58CiIjjgKuAs+kOerpqJcCUa7ZP/F7vaWYGC6mBlQnOmjKlc4FvZeZ36E5xu7bUXwu8q7y+ELguO18GjomIE4HzgV2ZuT8znwZ2AdvKZ1sy887MTOC6iXuty9UQqZGl+u3ex0fEPRPvr8nMa9a59iLg+vL6hMx8HCAzH4+IV5X69U5826h+7xr1GzJYSA0kwVJ9r+HJzDyz76KIOBJ4J/ChvkvXbNL09RtyGCI1spyHVZUpvAP4amY+Ud4/UYYQlJ/7Sv16J75tVH/SGvUbMlhIDXTbvQ+rKlO4mANDEIBbgJUVjUuBf56ov6SsipwD/LAMV3YC50XEsWVi8zxgZ/nsxxFxTlkFuWTiXutyGCI10PpBsoh4KfB24Pcmqj8G3FhOG/su8Nul/lbgAmAP3crJewEyc39EfITu+EiAD2fmSlqG99HldDkK+EIpGzJYSA1k0nRTVmY+A7xiVd1TdKsjq69N4PJ17rMD2LFG/T3AG6Zpk8FCaqJuw9WQGSykBrqMZOOeAjRYSI1MOXk5OAYLqYEkPINTUh17FpJ6LcIZnAYLqYEuI5k9C0kVPClLUq/MsGchqY77LCT16g6/cRgiqdf4D+w1WEgNJLh0KqmfOzglVTMjmaRe3XkW9iwkVXAYIqlXN2fhMERShbFv9x53KJQOkiR4YXlTVakREcdExE0R8Y2I2B0Rv2quU2kkWiVGLj4B3JaZvwycBuzGXKfS8K2shtSUPhGxBfh14NPdvfO5zPwB5jqVxmGKCc6+XKe/CHwf+LuIOA24F7gCc51KwzflDs6+XKeHA2cAH8jMuyLiExwYcqzFXKfSkDScs9gL7M3Mu8r7m+iCh7lOpaHrjtWLqtJ7r8z/Ar4XEaeUqnOBhzDXqTQCGdXLopU+AHw2Io4EHqXLX3oY5jqVhq314TeZeR+w1ryGuU6lofPZEEm9VuYsxqxqgjMitkXEw2Vr6EZLONLCajXBeajq7VlExCbgk8Db6ZZc7o6IWzLzoVk3ThoKT8rqnAXsycxHASLiBrrtpQYLaUXCCz6ivuaW0bNn0xxpmBZhzqImWFRtDY2I7XRPsbH5hJe/yGZJwzP2YFHTb1pvy+hPycxrMvPMzDzziKOPatU+aRBW5izGPMFZEyzuBrZGxMllN9lFdNtLJU3IjKoyVL3DkMx8ISLeT7fPfBOwIzMfnHnLpIExfSGQmbfS7T+XtIbM8c9ZuINTaiJYWnbpVFKFIc9H1DBYSA24z0JSnezmLcbMYCE14mqIpF6JcxaSqgx7d2YNg4XUyPLyuIPFuBeGpYMks+1274j4j5KL9L6VhETmOpVGYgYPkr01M0+fSEhkrlNpDDLryosw11ynBgupkSmGIcdHxD0TZftatwP+JSLunfj8p3KdAuY6lYYmmerx875cpwBvzszHSvLjXRHxjQ2uNdepNCRZWarulflY+bkPuJluzsFcp9LgJeRyVJU+EfGyiHj5ymu6HKUPYK5TaRwa7uA8Abi5rGYeDvxDZt4WEXdjrlNp+Fo9SFbSbpy2Rv1TmOtUGjafDZFUJwGDhaQanmchqY7BQlK/umXRITNYSC2kE5ySajkMkVTHnoWkGvYsJFUxWEjqVR4kGzODhdSKPQtJVVw6lVQj7FlI6jXNMVgDZbCQmgiHIZIq2bOQVGV53g2YLYOF1MICHH7j6d5SI5F1pfp+EZsi4msR8fny/uSIuKvkLf1cRBxZ6l9S3u8pn79u4h4fKvUPR8T5E/XbSt2eiLhy9d9ei8FCaqVl4pDOFcDuifd/Dny85Dp9Gris1F8GPJ2ZvwR8vFxHRJwKXAT8Cl0u078pAWgT8Em6HKmnAheXazc0m2HIN5+Hc/f2X6ef2c7H7pt3E0bvrPOfmdvfjoiTgN8EPgr8fsnv8TbgPeWSa4E/oUtwfGF5DXAT8Nfl+guBGzLzWeDbEbGHLlkRwJ5yijgRcUO59qGN2mTPQmpkimFITa7TvwL+kAPTpq8AfpCZL5T3k/lJ/y+nafn8h+X6aXOgbsgJTqmVRrlOI+K3gH2ZeW9EvGWleq2/2PPZevVrdRJ6B0gGC6mFpOXS6ZuBd0bEBcBmYAtdT+OYiDi89B4m85Ou5DTdGxGHA0cD+1k/1ykb1K/LYYjUSKvVkMz8UGaelJmvo5ug/NfM/B3gi8C7y2Wrc52u5EB9d7k+S/1FZbXkZGAr8BW6dIZby+rKkeVv3NLXLnsWUiuz38H5R8ANEfFnwNeAT5f6TwN/XyYw99P9z09mPhgRN9JNXL4AXJ6ZSwAR8X66xMmbgB2Z+WDfHzdYSK3MIFhk5peAL5XXj3JgNWPymp9wIEny6s8+Sreisrr+VrqEytUMFlID0264GiKDhdTKyLd7GyykVuxZSKoRPnUqqZdzFpKqGSwkVTFYSKox9mGI270lVbFnIbUy8p6FwUJqIV06lVTLnoWkPsH4JzgNFlIrBgtJvdzBKamawUJSDVdDJNWxZyGp1/TZxgbH7d5SI61O946IzRHxlYj494h4MCL+tNSb61QahXa5Tp8F3paZpwGnA9si4hzmnOvUYCE10jBvSGbmf5e3R5SSdLlObyr11wLvKq8vLO8pn5+7OtdpZn4bWMl1ehYl12lmPges5DrdkMFCaqW+Z9Gb67T0AO4D9gG7gG9hrlNp+KZMBbBhrlOAkgzo9Ig4BrgZeP1al638+XU+a5rr1J6F1Eq7OYsDt8z8AV2SoXMouU7LR2vlOqUy1+lGOVDXZbCQGmm4GvLK0qMgIo4CfgPYjblOpZFot8/iRODasmpxGHBjZn4+Ih7CXKfSCDQKFpl5P/DGNerNdSoNnk+dSqpmsJBUw6dOJVVxGCKp3wI8dWqwkFoxWEjqswine/fu4IyIHRGxLyIeOBgNkgZrBtu9DyU1270/Q/csvKQNRGZVGareYUhm3jF58o6kNZi+UFK14XYaqjQLFuUAj+0Am3lpq9tKg7HwE5y1MvOazDwzM888gpe0uq00HCOf4HQYIrWwAA+S1SydXg/cCZwSEXsj4rK+35EW0qL3LDLz4oPREGnIFmFTlsMQqZFYHne0MFhILQx8iFHDYCE1MvZNWZ7uLbXSaIIzIl4bEV+MiN0l1+kVpf64iNhVcp3uiohjS31ExNUlb+n9EXHGxL0uLdc/EhGXTtS/KSK+Xn7n6pLBbEMGC6mRVqkA6E7i/oPMfD1dvpDLSy7SK4HbS67T28t76HKWbi1lO/Ap6IILcBVwNt1Bv1etBJhyzfaJ3+t9/stgIbWQQGZd6btV5uOZ+dXy+sd0OUNew0/nNF2d6/S6kiP1y3TJiE4Ezgd2Zeb+zHyaLg3itvLZlsy8s+QXuW7iXutyzkJqZIo5i+Mj4p6J99dk5jVr3rN7iPONwF3ACZn5OHQBJSJeVS6bNqfpa8rr1fUbMlhIDUy5z6I31ylARPwc8I/ABzPzRxtMK0yb63S9+g05DJFaqB2CVJ5nERFH0AWKz2bmP5XqJ8oQgvJzX6mfNqfp3vJ6df2GDBZSIw1znQZdSsLdmfmXEx9N5jRdnev0krIqcg7wwzJc2QmcFxHHlonN84Cd5bMfR8Q55W9dMnGvdTkMkVpptynrzcDvAl+PiPtK3R8DHwNuLM9nfZcDKQtvBS4A9gDPAO8FyMz9EfERukTIAB/OzP3l9fvoTsE7CvhCKRsyWEiNtHo2JDP/jbXnFQDOXeP6BC5f5147gB1r1N8DvGGadhkspBYS8NkQSTXGvt3bYCG1MuCTu2sYLKRGPM9CUj8fUZdUo9vBOe5oYbCQWnGCU1INexaS+mW6z0JSHVdDJNVxGCKpl1nUJVWzZyGpyrhjhcFCasWlU0n9ElgyWEjqEaQ9C0mVDBaSqow8WHi6t9RC0j1IVlMqRMSOiNgXEQ9M1JnrVBqDyKwqlT7D/88/aq5TaRQaJhnKzDuA/auqzXUqDV4mLFfv967OdbqKuU6lUah/NqQq1+kUzHUqDUnjOYu1mOtUGoWGcxbrMNepNHiNM5JFxPXAW+jmN/bSrWrMNddp5Aw2kkTE94HvNL/x7BwPPDnvRozcEL/jX8jMV9ZcePTmV+ev/fyl/RcCtz3yF/c2nrM4KGbSs6j9gg8VEXHPEP/lDclCfMcj38HpMERqIYGlcR+VZbCQmkhIg8UiqNkQoxdn/N+xw5Dxq9w9pxdh9N9x49WQQ5HBQmrFnoWkKgYLSb0yYWlp3q2YKYOF1Io9C0lVDBaS+plFXVKNhHRTlqQq9iwkVXHOQlIvl04l1cr6A3sHyWAhNfGij8w75BkspBZ8kExSNZdOJfVJIO1ZSOqVnpQlqVKOfOl0JqkApEUTEbfRpTuo8WRm9mYtP9QYLCRVMX2hpCoGC0lVDBaSqhgsJFUxWEiq8r+iNcFVHQEJ/gAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ "