Explainable-Federated-Learn.../activity_only_basic_FI.ipynb

3516 lines
344 KiB
Plaintext
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

{
"cells": [
{
"cell_type": "code",
"execution_count": 45,
"metadata": {},
"outputs": [],
"source": [
"#IMPORTS\n",
"\n",
"import numpy as np\n",
"import random\n",
"import tensorflow as tf\n",
"import tensorflow.keras as kr\n",
"import tensorflow.keras.backend as K\n",
"from tensorflow.keras.models import Model\n",
"from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense\n",
"from tensorflow.keras.datasets import mnist\n",
"import os\n",
"import csv\n",
"\n",
"from scipy.spatial.distance import euclidean\n",
"from sklearn.metrics import confusion_matrix\n",
"\n",
"from time import sleep\n",
"from tqdm import tqdm\n",
"\n",
"import copy\n",
"import numpy\n",
"from sklearn.datasets import make_classification\n",
"from sklearn.ensemble import RandomForestClassifier\n",
"import pandas as pd\n",
"import matplotlib.pyplot as plt\n",
"import math\n",
"import seaborn as sns\n",
"from numpy.random import RandomState\n",
"import scipy as scp\n",
"from sklearn.model_selection import train_test_split\n",
"from sklearn.compose import ColumnTransformer\n",
"from sklearn.preprocessing import OneHotEncoder, LabelEncoder\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense\n",
"from keras import optimizers\n",
"from keras.callbacks import EarlyStopping,ModelCheckpoint\n",
"from keras.utils import to_categorical\n",
"from keras import backend as K\n",
"from itertools import product\n",
"from sklearn.metrics import accuracy_score\n",
"from sklearn.metrics import precision_score\n",
"from sklearn.metrics import recall_score\n",
"from sklearn.metrics import f1_score\n",
"from sklearn.metrics import roc_auc_score\n",
"from sklearn.metrics import confusion_matrix\n",
"\n",
"from sklearn import mixture\n",
"\n",
"from mpl_toolkits.mplot3d import Axes3D\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 46,
"metadata": {},
"outputs": [],
"source": [
"feature_attacked = [3,5,8]\n",
"rs = RandomState(92) #To reproduce the same results each time we run this notebook"
]
},
{
"cell_type": "code",
"execution_count": 47,
"metadata": {},
"outputs": [],
"source": [
"#Load dataset into a pandas DataFrame\n",
"activity = pd.read_csv(\"D:/explaineblity/activity_3_original.csv\", sep=',')"
]
},
{
"cell_type": "code",
"execution_count": 48,
"metadata": {},
"outputs": [],
"source": [
"to_drop = ['subject', 'timestamp', 'heart_rate','activityID']\n",
"activity.drop(axis=1, columns=to_drop, inplace=True)"
]
},
{
"cell_type": "code",
"execution_count": 49,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>motion</th>\n",
" <th>temp_hand</th>\n",
" <th>acceleration_16_x_hand</th>\n",
" <th>acceleration_16_y_hand</th>\n",
" <th>acceleration_16_z_hand</th>\n",
" <th>acceleration_6_x_hand</th>\n",
" <th>acceleration_6_y_hand</th>\n",
" <th>acceleration_6_z_hand</th>\n",
" <th>gyroscope_x_hand</th>\n",
" <th>gyroscope_y_hand</th>\n",
" <th>...</th>\n",
" <th>acceleration_16_z_ankle</th>\n",
" <th>acceleration_6_x_ankle</th>\n",
" <th>acceleration_6_y_ankle</th>\n",
" <th>acceleration_6_z_ankle</th>\n",
" <th>gyroscope_x_ankle</th>\n",
" <th>gyroscope_y_ankle</th>\n",
" <th>gyroscope_z_ankle</th>\n",
" <th>magnetometer_x_ankle</th>\n",
" <th>magnetometer_y_ankle</th>\n",
" <th>magnetometer_z_ankle</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>n</td>\n",
" <td>30.375</td>\n",
" <td>2.21530</td>\n",
" <td>8.27915</td>\n",
" <td>5.58753</td>\n",
" <td>2.24689</td>\n",
" <td>8.55387</td>\n",
" <td>5.77143</td>\n",
" <td>-0.004750</td>\n",
" <td>0.037579</td>\n",
" <td>...</td>\n",
" <td>0.095156</td>\n",
" <td>9.63162</td>\n",
" <td>-1.76757</td>\n",
" <td>0.265761</td>\n",
" <td>0.002908</td>\n",
" <td>-0.027714</td>\n",
" <td>0.001752</td>\n",
" <td>-61.1081</td>\n",
" <td>-36.8636</td>\n",
" <td>-58.3696</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>n</td>\n",
" <td>30.375</td>\n",
" <td>2.29196</td>\n",
" <td>7.67288</td>\n",
" <td>5.74467</td>\n",
" <td>2.27373</td>\n",
" <td>8.14592</td>\n",
" <td>5.78739</td>\n",
" <td>-0.171710</td>\n",
" <td>0.025479</td>\n",
" <td>...</td>\n",
" <td>-0.020804</td>\n",
" <td>9.58649</td>\n",
" <td>-1.75247</td>\n",
" <td>0.250816</td>\n",
" <td>0.020882</td>\n",
" <td>0.000945</td>\n",
" <td>0.006007</td>\n",
" <td>-60.8916</td>\n",
" <td>-36.3197</td>\n",
" <td>-58.3656</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>n</td>\n",
" <td>30.375</td>\n",
" <td>2.29090</td>\n",
" <td>7.14240</td>\n",
" <td>5.82342</td>\n",
" <td>2.26966</td>\n",
" <td>7.66268</td>\n",
" <td>5.78846</td>\n",
" <td>-0.238241</td>\n",
" <td>0.011214</td>\n",
" <td>...</td>\n",
" <td>-0.059173</td>\n",
" <td>9.60196</td>\n",
" <td>-1.73721</td>\n",
" <td>0.356632</td>\n",
" <td>-0.035392</td>\n",
" <td>-0.052422</td>\n",
" <td>-0.004882</td>\n",
" <td>-60.3407</td>\n",
" <td>-35.7842</td>\n",
" <td>-58.6119</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>n</td>\n",
" <td>30.375</td>\n",
" <td>2.21800</td>\n",
" <td>7.14365</td>\n",
" <td>5.89930</td>\n",
" <td>2.22177</td>\n",
" <td>7.25535</td>\n",
" <td>5.88000</td>\n",
" <td>-0.192912</td>\n",
" <td>0.019053</td>\n",
" <td>...</td>\n",
" <td>0.094385</td>\n",
" <td>9.58674</td>\n",
" <td>-1.78264</td>\n",
" <td>0.311453</td>\n",
" <td>-0.032514</td>\n",
" <td>-0.018844</td>\n",
" <td>0.026950</td>\n",
" <td>-60.7646</td>\n",
" <td>-37.1028</td>\n",
" <td>-57.8799</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>n</td>\n",
" <td>30.375</td>\n",
" <td>2.30106</td>\n",
" <td>7.25857</td>\n",
" <td>6.09259</td>\n",
" <td>2.20720</td>\n",
" <td>7.24042</td>\n",
" <td>5.95555</td>\n",
" <td>-0.069961</td>\n",
" <td>-0.018328</td>\n",
" <td>...</td>\n",
" <td>0.095775</td>\n",
" <td>9.64677</td>\n",
" <td>-1.75240</td>\n",
" <td>0.295902</td>\n",
" <td>0.001351</td>\n",
" <td>-0.048878</td>\n",
" <td>-0.006328</td>\n",
" <td>-60.2040</td>\n",
" <td>-37.1225</td>\n",
" <td>-57.8847</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>5 rows × 40 columns</p>\n",
"</div>"
],
"text/plain": [
" motion temp_hand acceleration_16_x_hand acceleration_16_y_hand \\\n",
"0 n 30.375 2.21530 8.27915 \n",
"1 n 30.375 2.29196 7.67288 \n",
"2 n 30.375 2.29090 7.14240 \n",
"3 n 30.375 2.21800 7.14365 \n",
"4 n 30.375 2.30106 7.25857 \n",
"\n",
" acceleration_16_z_hand acceleration_6_x_hand acceleration_6_y_hand \\\n",
"0 5.58753 2.24689 8.55387 \n",
"1 5.74467 2.27373 8.14592 \n",
"2 5.82342 2.26966 7.66268 \n",
"3 5.89930 2.22177 7.25535 \n",
"4 6.09259 2.20720 7.24042 \n",
"\n",
" acceleration_6_z_hand gyroscope_x_hand gyroscope_y_hand ... \\\n",
"0 5.77143 -0.004750 0.037579 ... \n",
"1 5.78739 -0.171710 0.025479 ... \n",
"2 5.78846 -0.238241 0.011214 ... \n",
"3 5.88000 -0.192912 0.019053 ... \n",
"4 5.95555 -0.069961 -0.018328 ... \n",
"\n",
" acceleration_16_z_ankle acceleration_6_x_ankle acceleration_6_y_ankle \\\n",
"0 0.095156 9.63162 -1.76757 \n",
"1 -0.020804 9.58649 -1.75247 \n",
"2 -0.059173 9.60196 -1.73721 \n",
"3 0.094385 9.58674 -1.78264 \n",
"4 0.095775 9.64677 -1.75240 \n",
"\n",
" acceleration_6_z_ankle gyroscope_x_ankle gyroscope_y_ankle \\\n",
"0 0.265761 0.002908 -0.027714 \n",
"1 0.250816 0.020882 0.000945 \n",
"2 0.356632 -0.035392 -0.052422 \n",
"3 0.311453 -0.032514 -0.018844 \n",
"4 0.295902 0.001351 -0.048878 \n",
"\n",
" gyroscope_z_ankle magnetometer_x_ankle magnetometer_y_ankle \\\n",
"0 0.001752 -61.1081 -36.8636 \n",
"1 0.006007 -60.8916 -36.3197 \n",
"2 -0.004882 -60.3407 -35.7842 \n",
"3 0.026950 -60.7646 -37.1028 \n",
"4 -0.006328 -60.2040 -37.1225 \n",
"\n",
" magnetometer_z_ankle \n",
"0 -58.3696 \n",
"1 -58.3656 \n",
"2 -58.6119 \n",
"3 -57.8799 \n",
"4 -57.8847 \n",
"\n",
"[5 rows x 40 columns]"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(activity.head())"
]
},
{
"cell_type": "code",
"execution_count": 50,
"metadata": {},
"outputs": [],
"source": [
"activity = pd.concat([activity,pd.get_dummies(activity['motion'], prefix='motion')],axis=1)\n",
"activity.drop('motion', axis=1, inplace=True)"
]
},
{
"cell_type": "code",
"execution_count": 51,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>temp_hand</th>\n",
" <th>acceleration_16_x_hand</th>\n",
" <th>acceleration_16_y_hand</th>\n",
" <th>acceleration_16_z_hand</th>\n",
" <th>acceleration_6_x_hand</th>\n",
" <th>acceleration_6_y_hand</th>\n",
" <th>acceleration_6_z_hand</th>\n",
" <th>gyroscope_x_hand</th>\n",
" <th>gyroscope_y_hand</th>\n",
" <th>gyroscope_z_hand</th>\n",
" <th>...</th>\n",
" <th>acceleration_6_y_ankle</th>\n",
" <th>acceleration_6_z_ankle</th>\n",
" <th>gyroscope_x_ankle</th>\n",
" <th>gyroscope_y_ankle</th>\n",
" <th>gyroscope_z_ankle</th>\n",
" <th>magnetometer_x_ankle</th>\n",
" <th>magnetometer_y_ankle</th>\n",
" <th>magnetometer_z_ankle</th>\n",
" <th>motion_n</th>\n",
" <th>motion_y</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>30.375</td>\n",
" <td>2.21530</td>\n",
" <td>8.27915</td>\n",
" <td>5.58753</td>\n",
" <td>2.24689</td>\n",
" <td>8.55387</td>\n",
" <td>5.77143</td>\n",
" <td>-0.004750</td>\n",
" <td>0.037579</td>\n",
" <td>-0.011145</td>\n",
" <td>...</td>\n",
" <td>-1.76757</td>\n",
" <td>0.265761</td>\n",
" <td>0.002908</td>\n",
" <td>-0.027714</td>\n",
" <td>0.001752</td>\n",
" <td>-61.1081</td>\n",
" <td>-36.8636</td>\n",
" <td>-58.3696</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>30.375</td>\n",
" <td>2.29196</td>\n",
" <td>7.67288</td>\n",
" <td>5.74467</td>\n",
" <td>2.27373</td>\n",
" <td>8.14592</td>\n",
" <td>5.78739</td>\n",
" <td>-0.171710</td>\n",
" <td>0.025479</td>\n",
" <td>-0.009538</td>\n",
" <td>...</td>\n",
" <td>-1.75247</td>\n",
" <td>0.250816</td>\n",
" <td>0.020882</td>\n",
" <td>0.000945</td>\n",
" <td>0.006007</td>\n",
" <td>-60.8916</td>\n",
" <td>-36.3197</td>\n",
" <td>-58.3656</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>30.375</td>\n",
" <td>2.29090</td>\n",
" <td>7.14240</td>\n",
" <td>5.82342</td>\n",
" <td>2.26966</td>\n",
" <td>7.66268</td>\n",
" <td>5.78846</td>\n",
" <td>-0.238241</td>\n",
" <td>0.011214</td>\n",
" <td>0.000831</td>\n",
" <td>...</td>\n",
" <td>-1.73721</td>\n",
" <td>0.356632</td>\n",
" <td>-0.035392</td>\n",
" <td>-0.052422</td>\n",
" <td>-0.004882</td>\n",
" <td>-60.3407</td>\n",
" <td>-35.7842</td>\n",
" <td>-58.6119</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>30.375</td>\n",
" <td>2.21800</td>\n",
" <td>7.14365</td>\n",
" <td>5.89930</td>\n",
" <td>2.22177</td>\n",
" <td>7.25535</td>\n",
" <td>5.88000</td>\n",
" <td>-0.192912</td>\n",
" <td>0.019053</td>\n",
" <td>0.013374</td>\n",
" <td>...</td>\n",
" <td>-1.78264</td>\n",
" <td>0.311453</td>\n",
" <td>-0.032514</td>\n",
" <td>-0.018844</td>\n",
" <td>0.026950</td>\n",
" <td>-60.7646</td>\n",
" <td>-37.1028</td>\n",
" <td>-57.8799</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>30.375</td>\n",
" <td>2.30106</td>\n",
" <td>7.25857</td>\n",
" <td>6.09259</td>\n",
" <td>2.20720</td>\n",
" <td>7.24042</td>\n",
" <td>5.95555</td>\n",
" <td>-0.069961</td>\n",
" <td>-0.018328</td>\n",
" <td>0.004582</td>\n",
" <td>...</td>\n",
" <td>-1.75240</td>\n",
" <td>0.295902</td>\n",
" <td>0.001351</td>\n",
" <td>-0.048878</td>\n",
" <td>-0.006328</td>\n",
" <td>-60.2040</td>\n",
" <td>-37.1225</td>\n",
" <td>-57.8847</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>5 rows × 41 columns</p>\n",
"</div>"
],
"text/plain": [
" temp_hand acceleration_16_x_hand acceleration_16_y_hand \\\n",
"0 30.375 2.21530 8.27915 \n",
"1 30.375 2.29196 7.67288 \n",
"2 30.375 2.29090 7.14240 \n",
"3 30.375 2.21800 7.14365 \n",
"4 30.375 2.30106 7.25857 \n",
"\n",
" acceleration_16_z_hand acceleration_6_x_hand acceleration_6_y_hand \\\n",
"0 5.58753 2.24689 8.55387 \n",
"1 5.74467 2.27373 8.14592 \n",
"2 5.82342 2.26966 7.66268 \n",
"3 5.89930 2.22177 7.25535 \n",
"4 6.09259 2.20720 7.24042 \n",
"\n",
" acceleration_6_z_hand gyroscope_x_hand gyroscope_y_hand \\\n",
"0 5.77143 -0.004750 0.037579 \n",
"1 5.78739 -0.171710 0.025479 \n",
"2 5.78846 -0.238241 0.011214 \n",
"3 5.88000 -0.192912 0.019053 \n",
"4 5.95555 -0.069961 -0.018328 \n",
"\n",
" gyroscope_z_hand ... acceleration_6_y_ankle acceleration_6_z_ankle \\\n",
"0 -0.011145 ... -1.76757 0.265761 \n",
"1 -0.009538 ... -1.75247 0.250816 \n",
"2 0.000831 ... -1.73721 0.356632 \n",
"3 0.013374 ... -1.78264 0.311453 \n",
"4 0.004582 ... -1.75240 0.295902 \n",
"\n",
" gyroscope_x_ankle gyroscope_y_ankle gyroscope_z_ankle \\\n",
"0 0.002908 -0.027714 0.001752 \n",
"1 0.020882 0.000945 0.006007 \n",
"2 -0.035392 -0.052422 -0.004882 \n",
"3 -0.032514 -0.018844 0.026950 \n",
"4 0.001351 -0.048878 -0.006328 \n",
"\n",
" magnetometer_x_ankle magnetometer_y_ankle magnetometer_z_ankle motion_n \\\n",
"0 -61.1081 -36.8636 -58.3696 1 \n",
"1 -60.8916 -36.3197 -58.3656 1 \n",
"2 -60.3407 -35.7842 -58.6119 1 \n",
"3 -60.7646 -37.1028 -57.8799 1 \n",
"4 -60.2040 -37.1225 -57.8847 1 \n",
"\n",
" motion_y \n",
"0 0 \n",
"1 0 \n",
"2 0 \n",
"3 0 \n",
"4 0 \n",
"\n",
"[5 rows x 41 columns]"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(activity.head())"
]
},
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['temp_hand',\n",
" 'acceleration_16_x_hand',\n",
" 'acceleration_16_y_hand',\n",
" 'acceleration_16_z_hand',\n",
" 'acceleration_6_x_hand',\n",
" 'acceleration_6_y_hand',\n",
" 'acceleration_6_z_hand',\n",
" 'gyroscope_x_hand',\n",
" 'gyroscope_y_hand',\n",
" 'gyroscope_z_hand',\n",
" 'magnetometer_x_hand',\n",
" 'magnetometer_y_hand',\n",
" 'magnetometer_z_hand',\n",
" 'temp_chest',\n",
" 'acceleration_16_x_chest',\n",
" 'acceleration_16_y_chest',\n",
" 'acceleration_16_z_chest',\n",
" 'acceleration_6_x_chest',\n",
" 'acceleration_6_y_chest',\n",
" 'acceleration_6_z_chest',\n",
" 'gyroscope_x_chest',\n",
" 'gyroscope_y_chest',\n",
" 'gyroscope_z_chest',\n",
" 'magnetometer_x_chest',\n",
" 'magnetometer_y_chest',\n",
" 'magnetometer_z_chest',\n",
" 'temp_ankle',\n",
" 'acceleration_16_x_ankle',\n",
" 'acceleration_16_y_ankle',\n",
" 'acceleration_16_z_ankle',\n",
" 'acceleration_6_x_ankle',\n",
" 'acceleration_6_y_ankle',\n",
" 'acceleration_6_z_ankle',\n",
" 'gyroscope_x_ankle',\n",
" 'gyroscope_y_ankle',\n",
" 'gyroscope_z_ankle',\n",
" 'magnetometer_x_ankle',\n",
" 'magnetometer_y_ankle',\n",
" 'magnetometer_z_ankle']"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"class_label = [ 'motion_n', 'motion_y']\n",
"predictors = [a for a in activity.columns.values if a not in class_label]\n",
"\n",
"for p in predictors:\n",
" activity[p].fillna(activity[p].mean(), inplace=True)\n",
"\n",
"display(predictors)\n",
"for p in predictors:\n",
" activity[p] = (activity[p]-activity[p].min()) / (activity[p].max() - activity[p].min())\n",
" activity[p].astype('float32')\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 53,
"metadata": {},
"outputs": [],
"source": [
"activity = activity.to_numpy()"
]
},
{
"cell_type": "code",
"execution_count": 54,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(1942872, 41)"
]
},
"execution_count": 54,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"activity.shape"
]
},
{
"cell_type": "code",
"execution_count": 55,
"metadata": {},
"outputs": [],
"source": [
"X_train, X_test, y_train, y_test = train_test_split(activity[:,:-2],activity[:,-2:], test_size=0.07, random_state=rs)\n"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Train on 1806870 samples, validate on 136002 samples\n",
"Epoch 1/2\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" 468416/1806870 [======>.......................] - ETA: 1:54:54 - loss: 0.6740 - accuracy: 0.625 - ETA: 2:24 - loss: 0.6243 - accuracy: 0.6502 - ETA: 1:29 - loss: 0.5579 - accuracy: 0.71 - ETA: 1:09 - loss: 0.4978 - accuracy: 0.75 - ETA: 1:00 - loss: 0.4544 - accuracy: 0.78 - ETA: 55s - loss: 0.4229 - accuracy: 0.8029 - ETA: 52s - loss: 0.3976 - accuracy: 0.816 - ETA: 49s - loss: 0.3798 - accuracy: 0.826 - ETA: 47s - loss: 0.3648 - accuracy: 0.834 - ETA: 46s - loss: 0.3515 - accuracy: 0.841 - ETA: 45s - loss: 0.3383 - accuracy: 0.847 - ETA: 44s - loss: 0.3275 - accuracy: 0.854 - ETA: 43s - loss: 0.3230 - accuracy: 0.856 - ETA: 42s - loss: 0.3164 - accuracy: 0.859 - ETA: 41s - loss: 0.3093 - accuracy: 0.863 - ETA: 41s - loss: 0.3022 - accuracy: 0.867 - ETA: 40s - loss: 0.2951 - accuracy: 0.870 - ETA: 40s - loss: 0.2895 - accuracy: 0.873 - ETA: 39s - loss: 0.2853 - accuracy: 0.875 - ETA: 39s - loss: 0.2801 - accuracy: 0.878 - ETA: 39s - loss: 0.2769 - accuracy: 0.879 - ETA: 38s - loss: 0.2723 - accuracy: 0.882 - ETA: 38s - loss: 0.2683 - accuracy: 0.884 - ETA: 38s - loss: 0.2645 - accuracy: 0.886 - ETA: 38s - loss: 0.2614 - accuracy: 0.887 - ETA: 38s - loss: 0.2584 - accuracy: 0.889 - ETA: 37s - loss: 0.2558 - accuracy: 0.890 - ETA: 37s - loss: 0.2524 - accuracy: 0.892 - ETA: 37s - loss: 0.2500 - accuracy: 0.893 - ETA: 37s - loss: 0.2470 - accuracy: 0.895 - ETA: 37s - loss: 0.2446 - accuracy: 0.896 - ETA: 37s - loss: 0.2424 - accuracy: 0.897 - ETA: 37s - loss: 0.2401 - accuracy: 0.898 - ETA: 36s - loss: 0.2375 - accuracy: 0.900 - ETA: 36s - loss: 0.2349 - accuracy: 0.901 - ETA: 36s - loss: 0.2333 - accuracy: 0.902 - ETA: 36s - loss: 0.2313 - accuracy: 0.903 - ETA: 36s - loss: 0.2291 - accuracy: 0.904 - ETA: 36s - loss: 0.2273 - accuracy: 0.905 - ETA: 36s - loss: 0.2252 - accuracy: 0.906 - ETA: 36s - loss: 0.2240 - accuracy: 0.907 - ETA: 35s - loss: 0.2229 - accuracy: 0.907 - ETA: 35s - loss: 0.2211 - accuracy: 0.908 - ETA: 35s - loss: 0.2195 - accuracy: 0.909 - ETA: 35s - loss: 0.2176 - accuracy: 0.910 - ETA: 35s - loss: 0.2159 - accuracy: 0.911 - ETA: 35s - loss: 0.2140 - accuracy: 0.912 - ETA: 35s - loss: 0.2125 - accuracy: 0.913 - ETA: 35s - loss: 0.2107 - accuracy: 0.913 - ETA: 35s - loss: 0.2090 - accuracy: 0.914 - ETA: 34s - loss: 0.2075 - accuracy: 0.915 - ETA: 34s - loss: 0.2062 - accuracy: 0.916 - ETA: 34s - loss: 0.2049 - accuracy: 0.917 - ETA: 34s - loss: 0.2040 - accuracy: 0.917 - ETA: 34s - loss: 0.2029 - accuracy: 0.917 - ETA: 34s - loss: 0.2018 - accuracy: 0.918 - ETA: 34s - loss: 0.2007 - accuracy: 0.919 - ETA: 34s - loss: 0.1996 - accuracy: 0.919 - ETA: 34s - loss: 0.1985 - accuracy: 0.919 - ETA: 34s - loss: 0.1973 - accuracy: 0.920 - ETA: 34s - loss: 0.1963 - accuracy: 0.921 - ETA: 33s - loss: 0.1952 - accuracy: 0.921 - ETA: 33s - loss: 0.1944 - accuracy: 0.922 - ETA: 33s - loss: 0.1932 - accuracy: 0.922 - ETA: 33s - loss: 0.1924 - accuracy: 0.922 - ETA: 33s - loss: 0.1916 - accuracy: 0.923 - ETA: 33s - loss: 0.1904 - accuracy: 0.923 - ETA: 33s - loss: 0.1892 - accuracy: 0.924 - ETA: 33s - loss: 0.1881 - accuracy: 0.925 - ETA: 33s - loss: 0.1869 - accuracy: 0.925 - ETA: 33s - loss: 0.1860 - accuracy: 0.926 - ETA: 33s - loss: 0.1852 - accuracy: 0.926 - ETA: 33s - loss: 0.1847 - accuracy: 0.926 - ETA: 33s - loss: 0.1839 - accuracy: 0.927 - ETA: 32s - loss: 0.1831 - accuracy: 0.927 - ETA: 32s - loss: 0.1824 - accuracy: 0.927 - ETA: 32s - loss: 0.1817 - accuracy: 0.928 - ETA: 32s - loss: 0.1811 - accuracy: 0.928 - ETA: 32s - loss: 0.1803 - accuracy: 0.928 - ETA: 32s - loss: 0.1796 - accuracy: 0.929 - ETA: 32s - loss: 0.1790 - accuracy: 0.929 - ETA: 32s - loss: 0.1782 - accuracy: 0.929 - ETA: 32s - loss: 0.1774 - accuracy: 0.930 - ETA: 32s - loss: 0.1766 - accuracy: 0.930 - ETA: 32s - loss: 0.1761 - accuracy: 0.930 - ETA: 32s - loss: 0.1754 - accuracy: 0.931 - ETA: 32s - loss: 0.1747 - accuracy: 0.931 - ETA: 32s - loss: 0.1740 - accuracy: 0.931 - ETA: 31s - loss: 0.1732 - accuracy: 0.932 - ETA: 31s - loss: 0.1728 - accuracy: 0.932 - ETA: 31s - loss: 0.1722 - accuracy: 0.932 - ETA: 31s - loss: 0.1715 - accuracy: 0.932 - ETA: 31s - loss: 0.1712 - accuracy: 0.933 - ETA: 31s - loss: 0.1707 - accuracy: 0.933 - ETA: 31s - loss: 0.1702 - accuracy: 0.933 - ETA: 31s - loss: 0.1695 - accuracy: 0.933 - ETA: 31s - loss: 0.1688 - accuracy: 0.934 - ETA: 31s - loss: 0.1683 - accuracy: 0.934 - ETA: 31s - loss: 0.1676 - accuracy: 0.934 - ETA: 31s - loss: 0.1670 - accuracy: 0.935 - ETA: 31s - loss: 0.1664 - accuracy: 0.935 - ETA: 31s - loss: 0.1663 - accuracy: 0.935 - ETA: 31s - loss: 0.1660 - accuracy: 0.935 - ETA: 31s - loss: 0.1655 - accuracy: 0.935 - ETA: 30s - loss: 0.1649 - accuracy: 0.936 - ETA: 30s - loss: 0.1646 - accuracy: 0.936 - ETA: 30s - loss: 0.1640 - accuracy: 0.936 - ETA: 30s - loss: 0.1635 - accuracy: 0.936 - ETA: 30s - loss: 0.1629 - accuracy: 0.936 - ETA: 30s - loss: 0.1623 - accuracy: 0.937 - ETA: 30s - loss: 0.1620 - accuracy: 0.937 - ETA: 30s - loss: 0.1613 - accuracy: 0.937 - ETA: 30s - loss: 0.1609 - accuracy: 0.937 - ETA: 30s - loss: 0.1604 - accuracy: 0.938 - ETA: 30s - loss: 0.1599 - accuracy: 0.938 - ETA: 30s - loss: 0.1598 - accuracy: 0.938 - ETA: 30s - loss: 0.1591 - accuracy: 0.938 - ETA: 30s - loss: 0.1586 - accuracy: 0.938 - ETA: 30s - loss: 0.1583 - accuracy: 0.939 - ETA: 30s - loss: 0.1578 - accuracy: 0.939 - ETA: 30s - loss: 0.1577 - accuracy: 0.939 - ETA: 29s - loss: 0.1574 - accuracy: 0.939 - ETA: 29s - loss: 0.1571 - accuracy: 0.939 - ETA: 29s - loss: 0.1567 - accuracy: 0.939 - ETA: 29s - loss: 0.1562 - accuracy: 0.940 - ETA: 29s - loss: 0.1556 - accuracy: 0.940 - ETA: 29s - loss: 0.1552 - accuracy: 0.940 - ETA: 29s - loss: 0.1549 - accuracy: 0.940 - ETA: 29s - loss: 0.1548 - accuracy: 0.940 - ETA: 29s - loss: 0.1545 - accuracy: 0.940 - ETA: 29s - loss: 0.1540 - accuracy: 0.940 - ETA: 29s - loss: 0.1536 - accuracy: 0.941 - ETA: 29s - loss: 0.1532 - accuracy: 0.941 - ETA: 29s - loss: 0.1530 - accuracy: 0.941 - ETA: 29s - loss: 0.1526 - accuracy: 0.941 - ETA: 29s - loss: 0.1522 - accuracy: 0.941 - ETA: 29s - loss: 0.1520 - accuracy: 0.941 - ETA: 29s - loss: 0.1517 - accuracy: 0.942 - ETA: 29s - loss: 0.1513 - accuracy: 0.942 - ETA: 29s - loss: 0.1509 - accuracy: 0.942 - ETA: 28s - loss: 0.1505 - accuracy: 0.942 - ETA: 28s - loss: 0.1501 - accuracy: 0.942 - ETA: 28s - loss: 0.1497 - accuracy: 0.942 - ETA: 28s - loss: 0.1494 - accuracy: 0.943 - ETA: 28s - loss: 0.1490 - accuracy: 0.943 - ETA: 28s - loss: 0.1487 - accuracy: 0.943 - ETA: 28s - loss: 0.1483 - accuracy: 0.943 - ETA: 28s - loss: 0.1481 - accuracy: 0.943 - ETA: 28s - loss: 0.1478 - accuracy: 0.943 - ETA: 28s - loss: 0.1475 - accuracy: 0.943 - ETA: 28s - loss: 0.1473 - accuracy: 0.943 - ETA: 28s - loss: 0.1470 - accuracy: 0.944 - ETA: 28s - loss: 0.1467 - accuracy: 0.944 - ETA: 28s - loss: 0.1465 - accuracy: 0.944 - ETA: 28s - loss: 0.1461 - accuracy: 0.944 - ETA: 28s - loss: 0.1457 - accuracy: 0.944 - ETA: 28s - loss: 0.1452 - accuracy: 0.944 - ETA: 27s - loss: 0.1449 - accuracy: 0.945 - ETA: 27s - loss: 0.1445 - accuracy: 0.945 - ETA: 27s - loss: 0.1440 - accuracy: 0.945 - ETA: 27s - loss: 0.1438 - accuracy: 0.945 - ETA: 27s - loss: 0.1436 - accuracy: 0.945 - ETA: 27s - loss: 0.1433 - accuracy: 0.945 - ETA: 27s - loss: 0.1430 - accuracy: 0.945 - ETA: 27s - loss: 0.1429 - accuracy: 0.945 - ETA: 27s - loss: 0.1426 - accuracy: 0.946 - ETA: 27s - loss: 0.1425 - accuracy: 0.946 - ETA: 27s - loss: 0.1421 - accuracy: 0.946 - ETA: 27s - loss: 0.1419 - accuracy: 0.946 - ETA: 27s - loss: 0.1417 - accuracy: 0.946 - ETA: 27s - loss: 0.1414 - accuracy: 0.946 - ETA: 27s - loss: 0.1411 - accuracy: 0.946 - ETA: 27s - loss: 0.1408 - accuracy: 0.946 - ETA: 27s - loss: 0.1405 - accuracy: 0.947 - ETA: 27s - loss: 0.1402 - accuracy: 0.947 - ETA: 26s - loss: 0.1404 - accuracy: 0.947 - ETA: 26s - loss: 0.1401 - accuracy: 0.947 - ETA: 26s - loss: 0.1398 - accuracy: 0.947 - ETA: 26s - loss: 0.1396 - accuracy: 0.947 - ETA: 26s - loss: 0.1393 - accuracy: 0.947 - ETA: 26s - loss: 0.1390 - accuracy: 0.947 - ETA: 26s - loss: 0.1387 - accuracy: 0.947 - ETA: 26s - loss: 0.1386 - accuracy: 0.947 - ETA: 26s - loss: 0.1383 - accuracy: 0.9479\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" 948576/1806870 [==============>...............] - ETA: 26s - loss: 0.1380 - accuracy: 0.948 - ETA: 26s - loss: 0.1379 - accuracy: 0.948 - ETA: 26s - loss: 0.1377 - accuracy: 0.948 - ETA: 26s - loss: 0.1374 - accuracy: 0.948 - ETA: 26s - loss: 0.1372 - accuracy: 0.948 - ETA: 26s - loss: 0.1369 - accuracy: 0.948 - ETA: 26s - loss: 0.1366 - accuracy: 0.948 - ETA: 26s - loss: 0.1363 - accuracy: 0.948 - ETA: 26s - loss: 0.1360 - accuracy: 0.948 - ETA: 25s - loss: 0.1357 - accuracy: 0.949 - ETA: 25s - loss: 0.1354 - accuracy: 0.949 - ETA: 25s - loss: 0.1352 - accuracy: 0.949 - ETA: 25s - loss: 0.1350 - accuracy: 0.949 - ETA: 25s - loss: 0.1347 - accuracy: 0.949 - ETA: 25s - loss: 0.1344 - accuracy: 0.949 - ETA: 25s - loss: 0.1341 - accuracy: 0.949 - ETA: 25s - loss: 0.1340 - accuracy: 0.949 - ETA: 25s - loss: 0.1337 - accuracy: 0.950 - ETA: 25s - loss: 0.1335 - accuracy: 0.950 - ETA: 25s - loss: 0.1333 - accuracy: 0.950 - ETA: 25s - loss: 0.1331 - accuracy: 0.950 - ETA: 25s - loss: 0.1328 - accuracy: 0.950 - ETA: 25s - loss: 0.1326 - accuracy: 0.950 - ETA: 25s - loss: 0.1323 - accuracy: 0.950 - ETA: 25s - loss: 0.1321 - accuracy: 0.950 - ETA: 25s - loss: 0.1320 - accuracy: 0.950 - ETA: 25s - loss: 0.1318 - accuracy: 0.950 - ETA: 25s - loss: 0.1315 - accuracy: 0.950 - ETA: 24s - loss: 0.1313 - accuracy: 0.951 - ETA: 24s - loss: 0.1311 - accuracy: 0.951 - ETA: 24s - loss: 0.1310 - accuracy: 0.951 - ETA: 24s - loss: 0.1307 - accuracy: 0.951 - ETA: 24s - loss: 0.1304 - accuracy: 0.951 - ETA: 24s - loss: 0.1302 - accuracy: 0.951 - ETA: 24s - loss: 0.1301 - accuracy: 0.951 - ETA: 24s - loss: 0.1299 - accuracy: 0.951 - ETA: 24s - loss: 0.1298 - accuracy: 0.951 - ETA: 24s - loss: 0.1296 - accuracy: 0.951 - ETA: 24s - loss: 0.1293 - accuracy: 0.951 - ETA: 24s - loss: 0.1291 - accuracy: 0.951 - ETA: 24s - loss: 0.1289 - accuracy: 0.952 - ETA: 24s - loss: 0.1286 - accuracy: 0.952 - ETA: 24s - loss: 0.1284 - accuracy: 0.952 - ETA: 24s - loss: 0.1282 - accuracy: 0.952 - ETA: 24s - loss: 0.1279 - accuracy: 0.952 - ETA: 24s - loss: 0.1277 - accuracy: 0.952 - ETA: 23s - loss: 0.1275 - accuracy: 0.952 - ETA: 23s - loss: 0.1273 - accuracy: 0.952 - ETA: 23s - loss: 0.1272 - accuracy: 0.952 - ETA: 23s - loss: 0.1270 - accuracy: 0.952 - ETA: 23s - loss: 0.1269 - accuracy: 0.952 - ETA: 23s - loss: 0.1267 - accuracy: 0.953 - ETA: 23s - loss: 0.1265 - accuracy: 0.953 - ETA: 23s - loss: 0.1262 - accuracy: 0.953 - ETA: 23s - loss: 0.1260 - accuracy: 0.953 - ETA: 23s - loss: 0.1258 - accuracy: 0.953 - ETA: 23s - loss: 0.1256 - accuracy: 0.953 - ETA: 23s - loss: 0.1253 - accuracy: 0.953 - ETA: 23s - loss: 0.1251 - accuracy: 0.953 - ETA: 23s - loss: 0.1249 - accuracy: 0.953 - ETA: 23s - loss: 0.1248 - accuracy: 0.953 - ETA: 23s - loss: 0.1245 - accuracy: 0.953 - ETA: 23s - loss: 0.1243 - accuracy: 0.954 - ETA: 23s - loss: 0.1241 - accuracy: 0.954 - ETA: 23s - loss: 0.1240 - accuracy: 0.954 - ETA: 22s - loss: 0.1238 - accuracy: 0.954 - ETA: 22s - loss: 0.1236 - accuracy: 0.954 - ETA: 22s - loss: 0.1234 - accuracy: 0.954 - ETA: 22s - loss: 0.1232 - accuracy: 0.954 - ETA: 22s - loss: 0.1230 - accuracy: 0.954 - ETA: 22s - loss: 0.1229 - accuracy: 0.954 - ETA: 22s - loss: 0.1227 - accuracy: 0.954 - ETA: 22s - loss: 0.1225 - accuracy: 0.954 - ETA: 22s - loss: 0.1223 - accuracy: 0.954 - ETA: 22s - loss: 0.1221 - accuracy: 0.954 - ETA: 22s - loss: 0.1219 - accuracy: 0.954 - ETA: 22s - loss: 0.1218 - accuracy: 0.955 - ETA: 22s - loss: 0.1217 - accuracy: 0.955 - ETA: 22s - loss: 0.1215 - accuracy: 0.955 - ETA: 22s - loss: 0.1214 - accuracy: 0.955 - ETA: 22s - loss: 0.1211 - accuracy: 0.955 - ETA: 22s - loss: 0.1210 - accuracy: 0.955 - ETA: 22s - loss: 0.1207 - accuracy: 0.955 - ETA: 22s - loss: 0.1206 - accuracy: 0.955 - ETA: 21s - loss: 0.1204 - accuracy: 0.955 - ETA: 21s - loss: 0.1203 - accuracy: 0.955 - ETA: 21s - loss: 0.1200 - accuracy: 0.955 - ETA: 21s - loss: 0.1199 - accuracy: 0.955 - ETA: 21s - loss: 0.1197 - accuracy: 0.955 - ETA: 21s - loss: 0.1196 - accuracy: 0.955 - ETA: 21s - loss: 0.1195 - accuracy: 0.956 - ETA: 21s - loss: 0.1193 - accuracy: 0.956 - ETA: 21s - loss: 0.1191 - accuracy: 0.956 - ETA: 21s - loss: 0.1189 - accuracy: 0.956 - ETA: 21s - loss: 0.1188 - accuracy: 0.956 - ETA: 21s - loss: 0.1187 - accuracy: 0.956 - ETA: 21s - loss: 0.1185 - accuracy: 0.956 - ETA: 21s - loss: 0.1184 - accuracy: 0.956 - ETA: 21s - loss: 0.1183 - accuracy: 0.956 - ETA: 21s - loss: 0.1181 - accuracy: 0.956 - ETA: 21s - loss: 0.1179 - accuracy: 0.956 - ETA: 21s - loss: 0.1177 - accuracy: 0.956 - ETA: 21s - loss: 0.1176 - accuracy: 0.956 - ETA: 21s - loss: 0.1173 - accuracy: 0.956 - ETA: 20s - loss: 0.1172 - accuracy: 0.956 - ETA: 20s - loss: 0.1171 - accuracy: 0.957 - ETA: 20s - loss: 0.1169 - accuracy: 0.957 - ETA: 20s - loss: 0.1167 - accuracy: 0.957 - ETA: 20s - loss: 0.1166 - accuracy: 0.957 - ETA: 20s - loss: 0.1165 - accuracy: 0.957 - ETA: 20s - loss: 0.1163 - accuracy: 0.957 - ETA: 20s - loss: 0.1162 - accuracy: 0.957 - ETA: 20s - loss: 0.1160 - accuracy: 0.957 - ETA: 20s - loss: 0.1158 - accuracy: 0.957 - ETA: 20s - loss: 0.1156 - accuracy: 0.957 - ETA: 20s - loss: 0.1154 - accuracy: 0.957 - ETA: 20s - loss: 0.1152 - accuracy: 0.957 - ETA: 20s - loss: 0.1150 - accuracy: 0.957 - ETA: 20s - loss: 0.1149 - accuracy: 0.957 - ETA: 20s - loss: 0.1147 - accuracy: 0.957 - ETA: 20s - loss: 0.1146 - accuracy: 0.958 - ETA: 20s - loss: 0.1145 - accuracy: 0.958 - ETA: 20s - loss: 0.1143 - accuracy: 0.958 - ETA: 19s - loss: 0.1142 - accuracy: 0.958 - ETA: 19s - loss: 0.1140 - accuracy: 0.958 - ETA: 19s - loss: 0.1139 - accuracy: 0.958 - ETA: 19s - loss: 0.1137 - accuracy: 0.958 - ETA: 19s - loss: 0.1135 - accuracy: 0.958 - ETA: 19s - loss: 0.1133 - accuracy: 0.958 - ETA: 19s - loss: 0.1132 - accuracy: 0.958 - ETA: 19s - loss: 0.1130 - accuracy: 0.958 - ETA: 19s - loss: 0.1129 - accuracy: 0.958 - ETA: 19s - loss: 0.1128 - accuracy: 0.958 - ETA: 19s - loss: 0.1127 - accuracy: 0.958 - ETA: 19s - loss: 0.1126 - accuracy: 0.958 - ETA: 19s - loss: 0.1124 - accuracy: 0.958 - ETA: 19s - loss: 0.1123 - accuracy: 0.958 - ETA: 19s - loss: 0.1121 - accuracy: 0.959 - ETA: 19s - loss: 0.1120 - accuracy: 0.959 - ETA: 19s - loss: 0.1118 - accuracy: 0.959 - ETA: 19s - loss: 0.1117 - accuracy: 0.959 - ETA: 19s - loss: 0.1116 - accuracy: 0.959 - ETA: 19s - loss: 0.1115 - accuracy: 0.959 - ETA: 18s - loss: 0.1113 - accuracy: 0.959 - ETA: 18s - loss: 0.1112 - accuracy: 0.959 - ETA: 18s - loss: 0.1110 - accuracy: 0.959 - ETA: 18s - loss: 0.1108 - accuracy: 0.959 - ETA: 18s - loss: 0.1107 - accuracy: 0.959 - ETA: 18s - loss: 0.1105 - accuracy: 0.959 - ETA: 18s - loss: 0.1104 - accuracy: 0.959 - ETA: 18s - loss: 0.1103 - accuracy: 0.959 - ETA: 18s - loss: 0.1101 - accuracy: 0.959 - ETA: 18s - loss: 0.1099 - accuracy: 0.959 - ETA: 18s - loss: 0.1098 - accuracy: 0.959 - ETA: 18s - loss: 0.1097 - accuracy: 0.960 - ETA: 18s - loss: 0.1096 - accuracy: 0.960 - ETA: 18s - loss: 0.1095 - accuracy: 0.960 - ETA: 18s - loss: 0.1094 - accuracy: 0.960 - ETA: 18s - loss: 0.1093 - accuracy: 0.960 - ETA: 18s - loss: 0.1092 - accuracy: 0.960 - ETA: 18s - loss: 0.1090 - accuracy: 0.960 - ETA: 18s - loss: 0.1089 - accuracy: 0.960 - ETA: 17s - loss: 0.1088 - accuracy: 0.960 - ETA: 17s - loss: 0.1086 - accuracy: 0.960 - ETA: 17s - loss: 0.1085 - accuracy: 0.960 - ETA: 17s - loss: 0.1085 - accuracy: 0.960 - ETA: 17s - loss: 0.1084 - accuracy: 0.960 - ETA: 17s - loss: 0.1082 - accuracy: 0.960 - ETA: 17s - loss: 0.1080 - accuracy: 0.960 - ETA: 17s - loss: 0.1079 - accuracy: 0.960 - ETA: 17s - loss: 0.1078 - accuracy: 0.960 - ETA: 17s - loss: 0.1077 - accuracy: 0.960 - ETA: 17s - loss: 0.1075 - accuracy: 0.960 - ETA: 17s - loss: 0.1073 - accuracy: 0.961 - ETA: 17s - loss: 0.1071 - accuracy: 0.961 - ETA: 17s - loss: 0.1070 - accuracy: 0.961 - ETA: 17s - loss: 0.1069 - accuracy: 0.961 - ETA: 17s - loss: 0.1068 - accuracy: 0.961 - ETA: 17s - loss: 0.1066 - accuracy: 0.961 - ETA: 17s - loss: 0.1066 - accuracy: 0.961 - ETA: 17s - loss: 0.1065 - accuracy: 0.961 - ETA: 17s - loss: 0.1064 - accuracy: 0.961 - ETA: 16s - loss: 0.1062 - accuracy: 0.961 - ETA: 16s - loss: 0.1061 - accuracy: 0.961 - ETA: 16s - loss: 0.1060 - accuracy: 0.961 - ETA: 16s - loss: 0.1058 - accuracy: 0.9616"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1435872/1806870 [======================>.......] - ETA: 16s - loss: 0.1057 - accuracy: 0.961 - ETA: 16s - loss: 0.1055 - accuracy: 0.961 - ETA: 16s - loss: 0.1055 - accuracy: 0.961 - ETA: 16s - loss: 0.1054 - accuracy: 0.961 - ETA: 16s - loss: 0.1052 - accuracy: 0.961 - ETA: 16s - loss: 0.1051 - accuracy: 0.961 - ETA: 16s - loss: 0.1050 - accuracy: 0.961 - ETA: 16s - loss: 0.1048 - accuracy: 0.961 - ETA: 16s - loss: 0.1047 - accuracy: 0.962 - ETA: 16s - loss: 0.1046 - accuracy: 0.962 - ETA: 16s - loss: 0.1045 - accuracy: 0.962 - ETA: 16s - loss: 0.1043 - accuracy: 0.962 - ETA: 16s - loss: 0.1043 - accuracy: 0.962 - ETA: 16s - loss: 0.1041 - accuracy: 0.962 - ETA: 16s - loss: 0.1041 - accuracy: 0.962 - ETA: 15s - loss: 0.1039 - accuracy: 0.962 - ETA: 15s - loss: 0.1038 - accuracy: 0.962 - ETA: 15s - loss: 0.1037 - accuracy: 0.962 - ETA: 15s - loss: 0.1035 - accuracy: 0.962 - ETA: 15s - loss: 0.1034 - accuracy: 0.962 - ETA: 15s - loss: 0.1033 - accuracy: 0.962 - ETA: 15s - loss: 0.1031 - accuracy: 0.962 - ETA: 15s - loss: 0.1030 - accuracy: 0.962 - ETA: 15s - loss: 0.1028 - accuracy: 0.962 - ETA: 15s - loss: 0.1027 - accuracy: 0.962 - ETA: 15s - loss: 0.1026 - accuracy: 0.962 - ETA: 15s - loss: 0.1025 - accuracy: 0.962 - ETA: 15s - loss: 0.1024 - accuracy: 0.962 - ETA: 15s - loss: 0.1023 - accuracy: 0.962 - ETA: 15s - loss: 0.1022 - accuracy: 0.963 - ETA: 15s - loss: 0.1021 - accuracy: 0.963 - ETA: 15s - loss: 0.1020 - accuracy: 0.963 - ETA: 15s - loss: 0.1019 - accuracy: 0.963 - ETA: 15s - loss: 0.1018 - accuracy: 0.963 - ETA: 15s - loss: 0.1017 - accuracy: 0.963 - ETA: 14s - loss: 0.1015 - accuracy: 0.963 - ETA: 14s - loss: 0.1014 - accuracy: 0.963 - ETA: 14s - loss: 0.1014 - accuracy: 0.963 - ETA: 14s - loss: 0.1013 - accuracy: 0.963 - ETA: 14s - loss: 0.1011 - accuracy: 0.963 - ETA: 14s - loss: 0.1010 - accuracy: 0.963 - ETA: 14s - loss: 0.1009 - accuracy: 0.963 - ETA: 14s - loss: 0.1008 - accuracy: 0.963 - ETA: 14s - loss: 0.1007 - accuracy: 0.963 - ETA: 14s - loss: 0.1006 - accuracy: 0.963 - ETA: 14s - loss: 0.1005 - accuracy: 0.963 - ETA: 14s - loss: 0.1004 - accuracy: 0.963 - ETA: 14s - loss: 0.1003 - accuracy: 0.963 - ETA: 14s - loss: 0.1002 - accuracy: 0.963 - ETA: 14s - loss: 0.1002 - accuracy: 0.963 - ETA: 14s - loss: 0.1000 - accuracy: 0.963 - ETA: 14s - loss: 0.0999 - accuracy: 0.963 - ETA: 14s - loss: 0.0998 - accuracy: 0.963 - ETA: 14s - loss: 0.0997 - accuracy: 0.963 - ETA: 13s - loss: 0.0996 - accuracy: 0.964 - ETA: 13s - loss: 0.0995 - accuracy: 0.964 - ETA: 13s - loss: 0.0994 - accuracy: 0.964 - ETA: 13s - loss: 0.0993 - accuracy: 0.964 - ETA: 13s - loss: 0.0993 - accuracy: 0.964 - ETA: 13s - loss: 0.0992 - accuracy: 0.964 - ETA: 13s - loss: 0.0990 - accuracy: 0.964 - ETA: 13s - loss: 0.0989 - accuracy: 0.964 - ETA: 13s - loss: 0.0988 - accuracy: 0.964 - ETA: 13s - loss: 0.0987 - accuracy: 0.964 - ETA: 13s - loss: 0.0986 - accuracy: 0.964 - ETA: 13s - loss: 0.0985 - accuracy: 0.964 - ETA: 13s - loss: 0.0984 - accuracy: 0.964 - ETA: 13s - loss: 0.0983 - accuracy: 0.964 - ETA: 13s - loss: 0.0982 - accuracy: 0.964 - ETA: 13s - loss: 0.0981 - accuracy: 0.964 - ETA: 13s - loss: 0.0980 - accuracy: 0.964 - ETA: 13s - loss: 0.0979 - accuracy: 0.964 - ETA: 13s - loss: 0.0978 - accuracy: 0.964 - ETA: 13s - loss: 0.0977 - accuracy: 0.964 - ETA: 12s - loss: 0.0977 - accuracy: 0.964 - ETA: 12s - loss: 0.0976 - accuracy: 0.964 - ETA: 12s - loss: 0.0974 - accuracy: 0.964 - ETA: 12s - loss: 0.0973 - accuracy: 0.964 - ETA: 12s - loss: 0.0972 - accuracy: 0.964 - ETA: 12s - loss: 0.0971 - accuracy: 0.965 - ETA: 12s - loss: 0.0970 - accuracy: 0.965 - ETA: 12s - loss: 0.0969 - accuracy: 0.965 - ETA: 12s - loss: 0.0968 - accuracy: 0.965 - ETA: 12s - loss: 0.0968 - accuracy: 0.965 - ETA: 12s - loss: 0.0967 - accuracy: 0.965 - ETA: 12s - loss: 0.0966 - accuracy: 0.965 - ETA: 12s - loss: 0.0965 - accuracy: 0.965 - ETA: 12s - loss: 0.0964 - accuracy: 0.965 - ETA: 12s - loss: 0.0963 - accuracy: 0.965 - ETA: 12s - loss: 0.0962 - accuracy: 0.965 - ETA: 12s - loss: 0.0961 - accuracy: 0.965 - ETA: 12s - loss: 0.0960 - accuracy: 0.965 - ETA: 12s - loss: 0.0959 - accuracy: 0.965 - ETA: 11s - loss: 0.0958 - accuracy: 0.965 - ETA: 11s - loss: 0.0957 - accuracy: 0.965 - ETA: 11s - loss: 0.0956 - accuracy: 0.965 - ETA: 11s - loss: 0.0955 - accuracy: 0.965 - ETA: 11s - loss: 0.0954 - accuracy: 0.965 - ETA: 11s - loss: 0.0953 - accuracy: 0.965 - ETA: 11s - loss: 0.0952 - accuracy: 0.965 - ETA: 11s - loss: 0.0952 - accuracy: 0.965 - ETA: 11s - loss: 0.0951 - accuracy: 0.965 - ETA: 11s - loss: 0.0950 - accuracy: 0.965 - ETA: 11s - loss: 0.0949 - accuracy: 0.965 - ETA: 11s - loss: 0.0948 - accuracy: 0.965 - ETA: 11s - loss: 0.0947 - accuracy: 0.965 - ETA: 11s - loss: 0.0946 - accuracy: 0.965 - ETA: 11s - loss: 0.0945 - accuracy: 0.966 - ETA: 11s - loss: 0.0944 - accuracy: 0.966 - ETA: 11s - loss: 0.0943 - accuracy: 0.966 - ETA: 11s - loss: 0.0942 - accuracy: 0.966 - ETA: 11s - loss: 0.0941 - accuracy: 0.966 - ETA: 11s - loss: 0.0940 - accuracy: 0.966 - ETA: 10s - loss: 0.0939 - accuracy: 0.966 - ETA: 10s - loss: 0.0938 - accuracy: 0.966 - ETA: 10s - loss: 0.0937 - accuracy: 0.966 - ETA: 10s - loss: 0.0936 - accuracy: 0.966 - ETA: 10s - loss: 0.0935 - accuracy: 0.966 - ETA: 10s - loss: 0.0934 - accuracy: 0.966 - ETA: 10s - loss: 0.0933 - accuracy: 0.966 - ETA: 10s - loss: 0.0932 - accuracy: 0.966 - ETA: 10s - loss: 0.0931 - accuracy: 0.966 - ETA: 10s - loss: 0.0930 - accuracy: 0.966 - ETA: 10s - loss: 0.0929 - accuracy: 0.966 - ETA: 10s - loss: 0.0928 - accuracy: 0.966 - ETA: 10s - loss: 0.0927 - accuracy: 0.966 - ETA: 10s - loss: 0.0926 - accuracy: 0.966 - ETA: 10s - loss: 0.0925 - accuracy: 0.966 - ETA: 10s - loss: 0.0924 - accuracy: 0.966 - ETA: 10s - loss: 0.0923 - accuracy: 0.966 - ETA: 10s - loss: 0.0923 - accuracy: 0.966 - ETA: 10s - loss: 0.0922 - accuracy: 0.966 - ETA: 10s - loss: 0.0921 - accuracy: 0.966 - ETA: 9s - loss: 0.0920 - accuracy: 0.966 - ETA: 9s - loss: 0.0920 - accuracy: 0.96 - ETA: 9s - loss: 0.0919 - accuracy: 0.96 - ETA: 9s - loss: 0.0918 - accuracy: 0.96 - ETA: 9s - loss: 0.0917 - accuracy: 0.96 - ETA: 9s - loss: 0.0917 - accuracy: 0.96 - ETA: 9s - loss: 0.0916 - accuracy: 0.96 - ETA: 9s - loss: 0.0915 - accuracy: 0.96 - ETA: 9s - loss: 0.0914 - accuracy: 0.96 - ETA: 9s - loss: 0.0913 - accuracy: 0.96 - ETA: 9s - loss: 0.0912 - accuracy: 0.96 - ETA: 9s - loss: 0.0911 - accuracy: 0.96 - ETA: 9s - loss: 0.0910 - accuracy: 0.96 - ETA: 9s - loss: 0.0909 - accuracy: 0.96 - ETA: 9s - loss: 0.0908 - accuracy: 0.96 - ETA: 9s - loss: 0.0907 - accuracy: 0.96 - ETA: 9s - loss: 0.0906 - accuracy: 0.96 - ETA: 9s - loss: 0.0905 - accuracy: 0.96 - ETA: 9s - loss: 0.0905 - accuracy: 0.96 - ETA: 9s - loss: 0.0904 - accuracy: 0.96 - ETA: 8s - loss: 0.0904 - accuracy: 0.96 - ETA: 8s - loss: 0.0903 - accuracy: 0.96 - ETA: 8s - loss: 0.0902 - accuracy: 0.96 - ETA: 8s - loss: 0.0901 - accuracy: 0.96 - ETA: 8s - loss: 0.0900 - accuracy: 0.96 - ETA: 8s - loss: 0.0899 - accuracy: 0.96 - ETA: 8s - loss: 0.0898 - accuracy: 0.96 - ETA: 8s - loss: 0.0897 - accuracy: 0.96 - ETA: 8s - loss: 0.0896 - accuracy: 0.96 - ETA: 8s - loss: 0.0896 - accuracy: 0.96 - ETA: 8s - loss: 0.0895 - accuracy: 0.96 - ETA: 8s - loss: 0.0894 - accuracy: 0.96 - ETA: 8s - loss: 0.0893 - accuracy: 0.96 - ETA: 8s - loss: 0.0892 - accuracy: 0.96 - ETA: 8s - loss: 0.0891 - accuracy: 0.96 - ETA: 8s - loss: 0.0890 - accuracy: 0.96 - ETA: 8s - loss: 0.0889 - accuracy: 0.96 - ETA: 8s - loss: 0.0889 - accuracy: 0.96 - ETA: 8s - loss: 0.0888 - accuracy: 0.96 - ETA: 7s - loss: 0.0887 - accuracy: 0.96 - ETA: 7s - loss: 0.0886 - accuracy: 0.96 - ETA: 7s - loss: 0.0885 - accuracy: 0.96 - ETA: 7s - loss: 0.0885 - accuracy: 0.96 - ETA: 7s - loss: 0.0884 - accuracy: 0.96 - ETA: 7s - loss: 0.0883 - accuracy: 0.96 - ETA: 7s - loss: 0.0882 - accuracy: 0.96 - ETA: 7s - loss: 0.0881 - accuracy: 0.96 - ETA: 7s - loss: 0.0880 - accuracy: 0.96 - ETA: 7s - loss: 0.0879 - accuracy: 0.96 - ETA: 7s - loss: 0.0878 - accuracy: 0.96 - ETA: 7s - loss: 0.0878 - accuracy: 0.96 - ETA: 7s - loss: 0.0877 - accuracy: 0.96 - ETA: 7s - loss: 0.0876 - accuracy: 0.96 - ETA: 7s - loss: 0.0876 - accuracy: 0.96 - ETA: 7s - loss: 0.0875 - accuracy: 0.9687"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1806870/1806870 [==============================] - ETA: 7s - loss: 0.0874 - accuracy: 0.96 - ETA: 7s - loss: 0.0873 - accuracy: 0.96 - ETA: 7s - loss: 0.0873 - accuracy: 0.96 - ETA: 7s - loss: 0.0872 - accuracy: 0.96 - ETA: 6s - loss: 0.0871 - accuracy: 0.96 - ETA: 6s - loss: 0.0870 - accuracy: 0.96 - ETA: 6s - loss: 0.0870 - accuracy: 0.96 - ETA: 6s - loss: 0.0869 - accuracy: 0.96 - ETA: 6s - loss: 0.0868 - accuracy: 0.96 - ETA: 6s - loss: 0.0867 - accuracy: 0.96 - ETA: 6s - loss: 0.0867 - accuracy: 0.96 - ETA: 6s - loss: 0.0866 - accuracy: 0.96 - ETA: 6s - loss: 0.0866 - accuracy: 0.96 - ETA: 6s - loss: 0.0865 - accuracy: 0.96 - ETA: 6s - loss: 0.0864 - accuracy: 0.96 - ETA: 6s - loss: 0.0863 - accuracy: 0.96 - ETA: 6s - loss: 0.0863 - accuracy: 0.96 - ETA: 6s - loss: 0.0862 - accuracy: 0.96 - ETA: 6s - loss: 0.0861 - accuracy: 0.96 - ETA: 6s - loss: 0.0860 - accuracy: 0.96 - ETA: 6s - loss: 0.0860 - accuracy: 0.96 - ETA: 6s - loss: 0.0859 - accuracy: 0.96 - ETA: 6s - loss: 0.0858 - accuracy: 0.96 - ETA: 6s - loss: 0.0857 - accuracy: 0.96 - ETA: 6s - loss: 0.0857 - accuracy: 0.96 - ETA: 5s - loss: 0.0856 - accuracy: 0.96 - ETA: 5s - loss: 0.0856 - accuracy: 0.96 - ETA: 5s - loss: 0.0855 - accuracy: 0.96 - ETA: 5s - loss: 0.0854 - accuracy: 0.96 - ETA: 5s - loss: 0.0853 - accuracy: 0.96 - ETA: 5s - loss: 0.0852 - accuracy: 0.96 - ETA: 5s - loss: 0.0852 - accuracy: 0.96 - ETA: 5s - loss: 0.0851 - accuracy: 0.96 - ETA: 5s - loss: 0.0850 - accuracy: 0.96 - ETA: 5s - loss: 0.0849 - accuracy: 0.96 - ETA: 5s - loss: 0.0849 - accuracy: 0.96 - ETA: 5s - loss: 0.0848 - accuracy: 0.96 - ETA: 5s - loss: 0.0847 - accuracy: 0.96 - ETA: 5s - loss: 0.0846 - accuracy: 0.96 - ETA: 5s - loss: 0.0846 - accuracy: 0.96 - ETA: 5s - loss: 0.0845 - accuracy: 0.96 - ETA: 5s - loss: 0.0844 - accuracy: 0.96 - ETA: 5s - loss: 0.0844 - accuracy: 0.96 - ETA: 5s - loss: 0.0843 - accuracy: 0.96 - ETA: 5s - loss: 0.0842 - accuracy: 0.96 - ETA: 4s - loss: 0.0841 - accuracy: 0.96 - ETA: 4s - loss: 0.0840 - accuracy: 0.97 - ETA: 4s - loss: 0.0839 - accuracy: 0.97 - ETA: 4s - loss: 0.0838 - accuracy: 0.97 - ETA: 4s - loss: 0.0838 - accuracy: 0.97 - ETA: 4s - loss: 0.0837 - accuracy: 0.97 - ETA: 4s - loss: 0.0837 - accuracy: 0.97 - ETA: 4s - loss: 0.0836 - accuracy: 0.97 - ETA: 4s - loss: 0.0835 - accuracy: 0.97 - ETA: 4s - loss: 0.0834 - accuracy: 0.97 - ETA: 4s - loss: 0.0833 - accuracy: 0.97 - ETA: 4s - loss: 0.0832 - accuracy: 0.97 - ETA: 4s - loss: 0.0832 - accuracy: 0.97 - ETA: 4s - loss: 0.0831 - accuracy: 0.97 - ETA: 4s - loss: 0.0830 - accuracy: 0.97 - ETA: 4s - loss: 0.0830 - accuracy: 0.97 - ETA: 4s - loss: 0.0829 - accuracy: 0.97 - ETA: 4s - loss: 0.0828 - accuracy: 0.97 - ETA: 4s - loss: 0.0828 - accuracy: 0.97 - ETA: 4s - loss: 0.0827 - accuracy: 0.97 - ETA: 3s - loss: 0.0826 - accuracy: 0.97 - ETA: 3s - loss: 0.0826 - accuracy: 0.97 - ETA: 3s - loss: 0.0825 - accuracy: 0.97 - ETA: 3s - loss: 0.0825 - accuracy: 0.97 - ETA: 3s - loss: 0.0824 - accuracy: 0.97 - ETA: 3s - loss: 0.0823 - accuracy: 0.97 - ETA: 3s - loss: 0.0822 - accuracy: 0.97 - ETA: 3s - loss: 0.0822 - accuracy: 0.97 - ETA: 3s - loss: 0.0821 - accuracy: 0.97 - ETA: 3s - loss: 0.0821 - accuracy: 0.97 - ETA: 3s - loss: 0.0820 - accuracy: 0.97 - ETA: 3s - loss: 0.0819 - accuracy: 0.97 - ETA: 3s - loss: 0.0819 - accuracy: 0.97 - ETA: 3s - loss: 0.0818 - accuracy: 0.97 - ETA: 3s - loss: 0.0818 - accuracy: 0.97 - ETA: 3s - loss: 0.0817 - accuracy: 0.97 - ETA: 3s - loss: 0.0816 - accuracy: 0.97 - ETA: 3s - loss: 0.0815 - accuracy: 0.97 - ETA: 3s - loss: 0.0814 - accuracy: 0.97 - ETA: 3s - loss: 0.0814 - accuracy: 0.97 - ETA: 3s - loss: 0.0813 - accuracy: 0.97 - ETA: 2s - loss: 0.0812 - accuracy: 0.97 - ETA: 2s - loss: 0.0811 - accuracy: 0.97 - ETA: 2s - loss: 0.0810 - accuracy: 0.97 - ETA: 2s - loss: 0.0809 - accuracy: 0.97 - ETA: 2s - loss: 0.0809 - accuracy: 0.97 - ETA: 2s - loss: 0.0808 - accuracy: 0.97 - ETA: 2s - loss: 0.0808 - accuracy: 0.97 - ETA: 2s - loss: 0.0807 - accuracy: 0.97 - ETA: 2s - loss: 0.0806 - accuracy: 0.97 - ETA: 2s - loss: 0.0805 - accuracy: 0.97 - ETA: 2s - loss: 0.0805 - accuracy: 0.97 - ETA: 2s - loss: 0.0804 - accuracy: 0.97 - ETA: 2s - loss: 0.0803 - accuracy: 0.97 - ETA: 2s - loss: 0.0803 - accuracy: 0.97 - ETA: 2s - loss: 0.0802 - accuracy: 0.97 - ETA: 2s - loss: 0.0802 - accuracy: 0.97 - ETA: 2s - loss: 0.0801 - accuracy: 0.97 - ETA: 2s - loss: 0.0800 - accuracy: 0.97 - ETA: 2s - loss: 0.0800 - accuracy: 0.97 - ETA: 1s - loss: 0.0799 - accuracy: 0.97 - ETA: 1s - loss: 0.0798 - accuracy: 0.97 - ETA: 1s - loss: 0.0797 - accuracy: 0.97 - ETA: 1s - loss: 0.0797 - accuracy: 0.97 - ETA: 1s - loss: 0.0796 - accuracy: 0.97 - ETA: 1s - loss: 0.0795 - accuracy: 0.97 - ETA: 1s - loss: 0.0795 - accuracy: 0.97 - ETA: 1s - loss: 0.0794 - accuracy: 0.97 - ETA: 1s - loss: 0.0793 - accuracy: 0.97 - ETA: 1s - loss: 0.0793 - accuracy: 0.97 - ETA: 1s - loss: 0.0792 - accuracy: 0.97 - ETA: 1s - loss: 0.0791 - accuracy: 0.97 - ETA: 1s - loss: 0.0790 - accuracy: 0.97 - ETA: 1s - loss: 0.0790 - accuracy: 0.97 - ETA: 1s - loss: 0.0789 - accuracy: 0.97 - ETA: 1s - loss: 0.0788 - accuracy: 0.97 - ETA: 1s - loss: 0.0788 - accuracy: 0.97 - ETA: 1s - loss: 0.0787 - accuracy: 0.97 - ETA: 1s - loss: 0.0787 - accuracy: 0.97 - ETA: 1s - loss: 0.0786 - accuracy: 0.97 - ETA: 0s - loss: 0.0786 - accuracy: 0.97 - ETA: 0s - loss: 0.0785 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0782 - accuracy: 0.97 - ETA: 0s - loss: 0.0782 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0780 - accuracy: 0.97 - ETA: 0s - loss: 0.0779 - accuracy: 0.97 - ETA: 0s - loss: 0.0779 - accuracy: 0.97 - ETA: 0s - loss: 0.0778 - accuracy: 0.97 - ETA: 0s - loss: 0.0777 - accuracy: 0.97 - ETA: 0s - loss: 0.0776 - accuracy: 0.97 - ETA: 0s - loss: 0.0776 - accuracy: 0.97 - ETA: 0s - loss: 0.0775 - accuracy: 0.97 - ETA: 0s - loss: 0.0774 - accuracy: 0.97 - ETA: 0s - loss: 0.0774 - accuracy: 0.97 - ETA: 0s - loss: 0.0773 - accuracy: 0.97 - 37s 20us/step - loss: 0.0773 - accuracy: 0.9725 - val_loss: 0.0357 - val_accuracy: 0.9882\n",
"\n",
"Epoch 00001: val_loss improved from inf to 0.03567, saving model to test.h8\n",
"Epoch 2/2\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" 474496/1806870 [======>.......................] - ETA: 51:48 - loss: 0.0022 - accuracy: 1.000 - ETA: 1:13 - loss: 0.0302 - accuracy: 0.990 - ETA: 53s - loss: 0.0260 - accuracy: 0.9923 - ETA: 47s - loss: 0.0219 - accuracy: 0.993 - ETA: 43s - loss: 0.0236 - accuracy: 0.993 - ETA: 41s - loss: 0.0304 - accuracy: 0.991 - ETA: 40s - loss: 0.0327 - accuracy: 0.990 - ETA: 39s - loss: 0.0327 - accuracy: 0.990 - ETA: 38s - loss: 0.0317 - accuracy: 0.990 - ETA: 38s - loss: 0.0308 - accuracy: 0.990 - ETA: 37s - loss: 0.0303 - accuracy: 0.990 - ETA: 37s - loss: 0.0307 - accuracy: 0.990 - ETA: 37s - loss: 0.0311 - accuracy: 0.990 - ETA: 36s - loss: 0.0312 - accuracy: 0.990 - ETA: 36s - loss: 0.0306 - accuracy: 0.990 - ETA: 36s - loss: 0.0319 - accuracy: 0.989 - ETA: 36s - loss: 0.0315 - accuracy: 0.989 - ETA: 35s - loss: 0.0318 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 35s - loss: 0.0323 - accuracy: 0.989 - ETA: 35s - loss: 0.0319 - accuracy: 0.989 - ETA: 35s - loss: 0.0317 - accuracy: 0.989 - ETA: 34s - loss: 0.0316 - accuracy: 0.989 - ETA: 34s - loss: 0.0318 - accuracy: 0.989 - ETA: 34s - loss: 0.0327 - accuracy: 0.989 - ETA: 34s - loss: 0.0325 - accuracy: 0.989 - ETA: 34s - loss: 0.0323 - accuracy: 0.989 - ETA: 34s - loss: 0.0324 - accuracy: 0.989 - ETA: 34s - loss: 0.0321 - accuracy: 0.989 - ETA: 34s - loss: 0.0323 - accuracy: 0.989 - ETA: 34s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0318 - accuracy: 0.989 - ETA: 33s - loss: 0.0318 - accuracy: 0.989 - ETA: 33s - loss: 0.0319 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0321 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0320 - accuracy: 0.989 - ETA: 33s - loss: 0.0322 - accuracy: 0.989 - ETA: 33s - loss: 0.0324 - accuracy: 0.989 - ETA: 33s - loss: 0.0323 - accuracy: 0.989 - ETA: 32s - loss: 0.0327 - accuracy: 0.988 - ETA: 32s - loss: 0.0325 - accuracy: 0.988 - ETA: 32s - loss: 0.0324 - accuracy: 0.989 - ETA: 32s - loss: 0.0325 - accuracy: 0.988 - ETA: 32s - loss: 0.0324 - accuracy: 0.989 - ETA: 32s - loss: 0.0326 - accuracy: 0.988 - ETA: 32s - loss: 0.0330 - accuracy: 0.988 - ETA: 32s - loss: 0.0330 - accuracy: 0.988 - ETA: 32s - loss: 0.0331 - accuracy: 0.988 - ETA: 32s - loss: 0.0334 - accuracy: 0.988 - ETA: 32s - loss: 0.0333 - accuracy: 0.988 - ETA: 32s - loss: 0.0332 - accuracy: 0.988 - ETA: 32s - loss: 0.0331 - accuracy: 0.988 - ETA: 32s - loss: 0.0329 - accuracy: 0.988 - ETA: 32s - loss: 0.0329 - accuracy: 0.988 - ETA: 32s - loss: 0.0327 - accuracy: 0.988 - ETA: 32s - loss: 0.0326 - accuracy: 0.988 - ETA: 32s - loss: 0.0328 - accuracy: 0.988 - ETA: 32s - loss: 0.0328 - accuracy: 0.988 - ETA: 31s - loss: 0.0328 - accuracy: 0.988 - ETA: 31s - loss: 0.0326 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0327 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0326 - accuracy: 0.988 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0325 - accuracy: 0.989 - ETA: 31s - loss: 0.0324 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0322 - accuracy: 0.989 - ETA: 31s - loss: 0.0321 - accuracy: 0.989 - ETA: 31s - loss: 0.0320 - accuracy: 0.989 - ETA: 31s - loss: 0.0319 - accuracy: 0.989 - ETA: 31s - loss: 0.0318 - accuracy: 0.989 - ETA: 31s - loss: 0.0317 - accuracy: 0.989 - ETA: 31s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0317 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0317 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0316 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0315 - accuracy: 0.989 - ETA: 30s - loss: 0.0314 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 30s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0313 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0312 - accuracy: 0.989 - ETA: 29s - loss: 0.0311 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0309 - accuracy: 0.989 - ETA: 29s - loss: 0.0309 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 29s - loss: 0.0310 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0308 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0307 - accuracy: 0.989 - ETA: 28s - loss: 0.0306 - accuracy: 0.989 - ETA: 28s - loss: 0.0306 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 28s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0306 - accuracy: 0.989 - ETA: 27s - loss: 0.0306 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0305 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0304 - accuracy: 0.989 - ETA: 27s - loss: 0.0303 - accuracy: 0.989 - ETA: 27s - loss: 0.0302 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.989 - ETA: 26s - loss: 0.0299 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0301 - accuracy: 0.989 - ETA: 26s - loss: 0.0300 - accuracy: 0.9898"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" 949216/1806870 [==============>...............] - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0301 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0300 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0299 - accuracy: 0.989 - ETA: 25s - loss: 0.0298 - accuracy: 0.989 - ETA: 25s - loss: 0.0297 - accuracy: 0.989 - ETA: 25s - loss: 0.0296 - accuracy: 0.989 - ETA: 25s - loss: 0.0296 - accuracy: 0.989 - ETA: 24s - loss: 0.0297 - accuracy: 0.989 - ETA: 24s - loss: 0.0296 - accuracy: 0.989 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0296 - accuracy: 0.990 - ETA: 24s - loss: 0.0295 - accuracy: 0.990 - ETA: 24s - loss: 0.0295 - accuracy: 0.990 - ETA: 24s - loss: 0.0294 - accuracy: 0.990 - ETA: 24s - loss: 0.0294 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0293 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 24s - loss: 0.0292 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0291 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0290 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0289 - accuracy: 0.990 - ETA: 23s - loss: 0.0288 - accuracy: 0.990 - ETA: 23s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0288 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0287 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0286 - accuracy: 0.990 - ETA: 22s - loss: 0.0285 - accuracy: 0.990 - ETA: 22s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0285 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0284 - accuracy: 0.990 - ETA: 21s - loss: 0.0283 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0283 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0280 - accuracy: 0.990 - ETA: 21s - loss: 0.0281 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 21s - loss: 0.0282 - accuracy: 0.990 - ETA: 20s - loss: 0.0282 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0281 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0280 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 20s - loss: 0.0279 - accuracy: 0.990 - ETA: 19s - loss: 0.0278 - accuracy: 0.990 - ETA: 19s - loss: 0.0278 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0277 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0276 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 19s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0275 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0274 - accuracy: 0.990 - ETA: 18s - loss: 0.0273 - accuracy: 0.990 - ETA: 18s - loss: 0.0273 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0272 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 18s - loss: 0.0270 - accuracy: 0.990 - ETA: 18s - loss: 0.0270 - accuracy: 0.990 - ETA: 18s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0272 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0271 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0270 - accuracy: 0.990 - ETA: 17s - loss: 0.0269 - accuracy: 0.990 - ETA: 16s - loss: 0.0269 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0267 - accuracy: 0.9909"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1434176/1806870 [======================>.......] - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0268 - accuracy: 0.990 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0267 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0266 - accuracy: 0.991 - ETA: 16s - loss: 0.0265 - accuracy: 0.991 - ETA: 16s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0265 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0264 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0263 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 15s - loss: 0.0261 - accuracy: 0.991 - ETA: 15s - loss: 0.0262 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0261 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0260 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0258 - accuracy: 0.991 - ETA: 14s - loss: 0.0258 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 14s - loss: 0.0259 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0258 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0256 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 13s - loss: 0.0257 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0256 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0255 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0254 - accuracy: 0.991 - ETA: 12s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0255 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0254 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0253 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 11s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0252 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0251 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 10s - loss: 0.0250 - accuracy: 0.991 - ETA: 9s - loss: 0.0250 - accuracy: 0.991 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0250 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0249 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 9s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0248 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0247 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0246 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 8s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0245 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0244 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.9918"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"1806870/1806870 [==============================] - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 7s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0243 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0242 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0240 - accuracy: 0.99 - ETA: 6s - loss: 0.0241 - accuracy: 0.99 - ETA: 6s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0240 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0239 - accuracy: 0.99 - ETA: 5s - loss: 0.0238 - accuracy: 0.99 - ETA: 5s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0238 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0237 - accuracy: 0.99 - ETA: 4s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0236 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0235 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 3s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0234 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0233 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 2s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0232 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 1s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - 37s 20us/step - loss: 0.0229 - accuracy: 0.9923 - val_loss: 0.0129 - val_accuracy: 0.9955\n",
"\n",
"Epoch 00002: val_loss improved from 0.03567 to 0.01290, saving model to test.h8\n"
]
}
],
"source": [
"#begin federated\n",
"\n",
"earlystopping = EarlyStopping(monitor = 'val_loss',\n",
" min_delta = 0.01,\n",
" patience = 50,\n",
" verbose = 1,\n",
" baseline = 2,\n",
" restore_best_weights = True)\n",
"\n",
"checkpoint = ModelCheckpoint('test.h8',\n",
" monitor='val_loss',\n",
" mode='min',\n",
" save_best_only=True,\n",
" verbose=1)\n",
" \n",
"model = Sequential()\n",
"model.add(Dense(70, input_dim=39, activation='relu'))\n",
"model.add(Dense(50, activation='relu'))\n",
"model.add(Dense(50, activation='relu'))\n",
"model.add(Dense(2, activation='softmax'))\n",
"#sgd = optimizers.SGD(learning_rate=0.0001, momentum=0.9, nesterov=True)\n",
"model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])\n",
"# def train_shard(i):\n",
"history = model.fit(X_train, y_train,\n",
"epochs=2,\n",
"validation_data=(X_test, y_test),\n",
"callbacks = [checkpoint, earlystopping],\n",
"shuffle=True)\n",
"# return history\n",
"# for i in range(len(shard1_traintest)):\n",
"# train_shard(i)\n",
"#get_3rd_layer_output = K.function([model.layers[0].input],\n",
"# [model.layers[2].output])\n",
"#layer_output = get_3rd_layer_output(shard_traintest[i][\"X_train\"])[0]"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"sequential_4\"\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"dense_13 (Dense) (None, 70) 2800 \n",
"_________________________________________________________________\n",
"dense_14 (Dense) (None, 50) 3550 \n",
"_________________________________________________________________\n",
"dense_15 (Dense) (None, 50) 2550 \n",
"_________________________________________________________________\n",
"dense_16 (Dense) (None, 2) 102 \n",
"=================================================================\n",
"Total params: 9,002\n",
"Trainable params: 9,002\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 58,
"metadata": {},
"outputs": [],
"source": [
"#AUXILIARY METHODS FOR FEDERATED LEARNING\n",
"\n",
"# RETURN INDICES TO LAYERS WITH WEIGHTS AND BIASES\n",
"def trainable_layers(model):\n",
" return [i for i, layer in enumerate(model.layers) if len(layer.get_weights()) > 0]\n",
"\n",
"# RETURN WEIGHTS AND BIASES OF A MODEL\n",
"def get_parameters(model):\n",
" weights = []\n",
" biases = []\n",
" index = trainable_layers(model)\n",
" for i in index:\n",
" weights.append(copy.deepcopy(model.layers[i].get_weights()[0]))\n",
" biases.append(copy.deepcopy(model.layers[i].get_weights()[1])) \n",
" \n",
" return weights, biases\n",
" \n",
"# SET WEIGHTS AND BIASES OF A MODEL\n",
"def set_parameters(model, weights, biases):\n",
" index = trainable_layers(model)\n",
" for i, j in enumerate(index):\n",
" model.layers[j].set_weights([weights[i], biases[i]])\n",
" \n",
"# DEPRECATED: RETURN THE GRADIENTS OF THE MODEL AFTER AN UPDATE \n",
"def get_gradients(model, inputs, outputs):\n",
" \"\"\" Gets gradient of model for given inputs and outputs for all weights\"\"\"\n",
" grads = model.optimizer.get_gradients(model.total_loss, model.trainable_weights)\n",
" symb_inputs = (model._feed_inputs + model._feed_targets + model._feed_sample_weights)\n",
" f = K.function(symb_inputs, grads)\n",
" x, y, sample_weight = model._standardize_user_data(inputs, outputs)\n",
" output_grad = f(x + y + sample_weight)\n",
" \n",
" w_grad = [w for i,w in enumerate(output_grad) if i%2==0]\n",
" b_grad = [w for i,w in enumerate(output_grad) if i%2==1]\n",
" \n",
" return w_grad, b_grad\n",
"\n",
"# RETURN THE DIFFERENCE OF MODELS' WEIGHTS AND BIASES AFTER AN UPDATE \n",
"# NOTE: LEARNING RATE IS APPLIED, SO THE UPDATE IS DIFFERENT FROM THE\n",
"# GRADIENTS. IN CASE VANILLA SGD IS USED, THE GRADIENTS ARE OBTAINED\n",
"# AS (UPDATES / LEARNING_RATE)\n",
"def get_updates(model, inputs, outputs, batch_size, epochs):\n",
" w, b = get_parameters(model)\n",
" #model.train_on_batch(inputs, outputs)\n",
" model.fit(inputs, outputs, batch_size=batch_size, epochs=epochs, verbose=0)\n",
" w_new, b_new = get_parameters(model)\n",
" \n",
" weight_updates = [old - new for old,new in zip(w, w_new)]\n",
" bias_updates = [old - new for old,new in zip(b, b_new)]\n",
" \n",
" return weight_updates, bias_updates\n",
"\n",
"# UPDATE THE MODEL'S WEIGHTS AND PARAMETERS WITH AN UPDATE\n",
"def apply_updates(model, eta, w_new, b_new):\n",
" w, b = get_parameters(model)\n",
" new_weights = [theta - eta*delta for theta,delta in zip(w, w_new)]\n",
" new_biases = [theta - eta*delta for theta,delta in zip(b, b_new)]\n",
" set_parameters(model, new_weights, new_biases)\n",
" \n",
"# FEDERATED AGGREGATION FUNCTION\n",
"def aggregate(n_layers, n_peers, f, w_updates, b_updates):\n",
" agg_w = [f([w_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]\n",
" agg_b = [f([b_updates[j][i] for j in range(n_peers)], axis=0) for i in range(n_layers)]\n",
" return agg_w, agg_b\n",
"\n",
"# SOLVE NANS\n",
"def nans_to_zero(W, B):\n",
" W0 = [np.nan_to_num(w, nan=0.0, posinf=0.0, neginf=0.0) for w in W]\n",
" B0 = [np.nan_to_num(b, nan=0.0, posinf=0.0, neginf=0.0) for b in B]\n",
" return W0, B0\n",
"\n",
"def build_forest(X,y):\n",
" clf=RandomForestClassifier(n_estimators=1000, max_depth=7, random_state=0, verbose = 1)\n",
" clf.fit(X,y)\n",
" return clf\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 59,
"metadata": {},
"outputs": [],
"source": [
"\n",
"def scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local):\n",
" sum_feature_improtance= 0\n",
" overal_wrong_feature_importance = 0\n",
" counter = 0\n",
" second_counter = 0\n",
" never_seen = 0\n",
" avr_wrong_importance = 0\n",
" counter1 = 0\n",
" for i in range (len(FL_predict1)):\n",
" if(FL_predict1[i][0] < 0.5):\n",
" FL_predict1[i][0] = 0\n",
" FL_predict1[i][1] = 1\n",
" if(FL_predict1[i][0] >= 0.5):\n",
" FL_predict1[i][0] = 1\n",
" FL_predict1[i][1] = 0\n",
" for i in range (len(FL_predict1)):\n",
" i_tree = 0\n",
" # print(i)\n",
" if (FL_predict1[i][0] != y_test_local[i][0]):\n",
" counter1+=1\n",
"# print(i)\n",
" # print(\"the test sample number \",i ,\" have been niss classified by the blackbox\" )\n",
" for tree_in_forest in forest.estimators_:\n",
" temp = forest.estimators_[i_tree].predict([X_test_local[i]])\n",
" i_tree = i_tree + 1\n",
" inttemp = temp[0].astype(int)\n",
" if(FL_predict1[i][0] == inttemp[0]):\n",
" sum_feature_improtance = sum_feature_improtance + tree_in_forest.feature_importances_\n",
" counter = counter + 1\n",
" if(counter>0):\n",
" ave_feature_importence = sum_feature_improtance/counter\n",
" overal_wrong_feature_importance = ave_feature_importence + overal_wrong_feature_importance\n",
" second_counter = second_counter + 1\n",
"# print(ave_feature_importence)\n",
"# print(\"numbers of the trees predect the wrong predection as the blackbox is \", counter)\n",
" counter = 0\n",
" sum_feature_improtance = 0\n",
" # print(\"------------------------------------------------------------------------------------\")\n",
" else:\n",
" if(FL_predict1[i][0] != y_test_local[i][0]):\n",
" # print(\"the test sample number \", i,\" never have been miss classified by the forest.\")\n",
" never_seen = never_seen +1\n",
" if(second_counter>0):\n",
"# print(second_counter)\n",
" # print(\"the number of sampels that was miss classifed by the blackbox and classified correctly by the all forest is\", never_seen)\n",
" # print(overal_wrong_feature_importance)\n",
" avr_wrong_importance = overal_wrong_feature_importance / second_counter\n",
" # print(\"the average wrong dessition cosed by the feature\", avr_wrong_importance)\n",
" # print(\"=====================================================================================\")\n",
" print(\"the number of miss classified sampels is \", counter1)\n",
" return forest.feature_importances_"
]
},
{
"cell_type": "code",
"execution_count": 60,
"metadata": {},
"outputs": [],
"source": [
"def attack_data(inputs, feature_attacked):\n",
" z=0\n",
" C=0\n",
" z=inputs.max(axis = 0)\n",
" C=inputs.min(axis = 0)\n",
" for i in range(len(inputs)):\n",
" for j in range(len(inputs[0])):\n",
" inputs[i][j]= random.uniform(z[j], C[j])\n",
"# inputs[i][feature_attacked[1]]= random.uniform(z[feature_attacked[1]], C[feature_attacked[1]]) \n",
"# inputs[i][feature_attacked[1]]= random.uniform(z[feature_attacked[1]], C[feature_attacked[1]])\n",
"# inputs[i][feature_attacked] = random.randrange(z[feature_attacked]+1)\n",
"# print(X_test_attacked[i][att])\n",
"# if(X_test_attacked[i][att] == X_test[i][att]):\n",
"# feat_same = feat_same + 1\n",
" return inputs"
]
},
{
"cell_type": "code",
"execution_count": 61,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[0, 1, 2, 3]"
]
},
"execution_count": 61,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"trainable_layers(model)"
]
},
{
"cell_type": "code",
"execution_count": 62,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"([array([[-0.19031775, 0.06523454, -0.14931396, ..., 0. ,\n",
" 0. , -0.29390967],\n",
" [ 0.033116 , -0.08946543, 0.01096467, ..., 0. ,\n",
" 0. , -0.015468 ],\n",
" [ 0.01199894, 0.02766883, 0.01122332, ..., 0. ,\n",
" 0. , -0.0217886 ],\n",
" ...,\n",
" [-0.00909673, -0.04540771, -0.20296723, ..., 0. ,\n",
" 0. , -0.13601643],\n",
" [-0.2878099 , 0.23572141, 0.23246315, ..., 0. ,\n",
" 0. , 0.10910301],\n",
" [ 0.20305228, 0.28390008, 0.5178579 , ..., 0. ,\n",
" 0. , -0.20772421]], dtype=float32),\n",
" array([[ 0.03976321, 0.00334442, -0.1938284 , ..., 0.12357424,\n",
" 0. , -0.4720744 ],\n",
" [-0.05385096, -0.19503492, -0.11490272, ..., -0.06129656,\n",
" 0. , -0.07149667],\n",
" [ 0.09047867, -0.00144058, -0.30648926, ..., -0.06290518,\n",
" 0. , -0.20281315],\n",
" ...,\n",
" [ 0. , 0. , 0. , ..., 0. ,\n",
" 0. , 0. ],\n",
" [ 0. , 0. , 0. , ..., 0. ,\n",
" 0. , 0. ],\n",
" [ 0.03847116, 0.3318082 , -0.12002158, ..., 0.0102334 ,\n",
" 0. , 0.19092064]], dtype=float32),\n",
" array([[-2.42347077e-01, -8.76481831e-03, -3.90259176e-02, ...,\n",
" -6.67591989e-02, 5.05356491e-03, 1.02399185e-01],\n",
" [-8.08794439e-01, -3.27832878e-01, -5.26163459e-01, ...,\n",
" -4.11394715e-01, 0.00000000e+00, 2.40460098e-01],\n",
" [ 2.49932185e-01, -3.12010467e-01, -4.72681373e-01, ...,\n",
" 6.11434951e-02, 0.00000000e+00, -8.85864496e-02],\n",
" ...,\n",
" [-1.02717876e-02, 4.91040945e-02, 2.08511353e-01, ...,\n",
" 2.92169333e-01, 3.78781557e-03, -7.43160397e-02],\n",
" [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
" 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
" [-2.40534097e-01, -1.12464696e-01, -6.61374629e-02, ...,\n",
" -2.46955007e-01, 2.02164054e-04, -1.38975129e-01]], dtype=float32),\n",
" array([[-3.04001868e-01, 3.04008663e-01],\n",
" [-3.18321645e-01, 3.18326294e-01],\n",
" [-6.49204254e-02, 6.49255514e-02],\n",
" [ 2.38253117e-01, -2.38266587e-01],\n",
" [-2.33515322e-01, 2.33533710e-01],\n",
" [ 2.04552054e-01, -2.04541385e-01],\n",
" [-2.16077894e-01, 2.16096789e-01],\n",
" [-9.29667503e-02, 9.29654241e-02],\n",
" [ 8.22023153e-02, -8.21979046e-02],\n",
" [-1.02218628e-01, 1.02235526e-01],\n",
" [-2.78488606e-01, 2.78504372e-01],\n",
" [-2.50967979e-01, 2.50985503e-01],\n",
" [ 6.83438182e-02, -6.83349371e-02],\n",
" [ 1.24650508e-01, -1.24644592e-01],\n",
" [ 6.33001328e-05, -6.16163015e-05],\n",
" [ 1.41896963e-01, -1.41895413e-01],\n",
" [-1.02908731e-01, 1.02926940e-01],\n",
" [ 5.35694361e-02, -5.35876751e-02],\n",
" [ 8.15955400e-02, -8.15925598e-02],\n",
" [-1.03019953e-01, 1.03019953e-01],\n",
" [-1.25430942e-01, 1.25459164e-01],\n",
" [-3.38193893e-01, 3.38200092e-01],\n",
" [-1.09561086e-02, 1.09702498e-02],\n",
" [-2.82736778e-01, 2.82758176e-01],\n",
" [-4.44638729e-01, 4.44639683e-01],\n",
" [-6.57172203e-02, 6.57169819e-02],\n",
" [-1.72389388e-01, 1.72392488e-01],\n",
" [-5.41939139e-02, 5.42033315e-02],\n",
" [-5.90079725e-02, 5.90344965e-02],\n",
" [ 3.66447330e-01, -3.66433740e-01],\n",
" [-2.08910614e-01, 2.08919704e-01],\n",
" [-2.05386773e-01, 2.05394983e-01],\n",
" [ 2.22557023e-01, -2.22551197e-01],\n",
" [ 1.47694349e-01, -1.47691488e-01],\n",
" [-1.98568344e-01, 1.98586702e-01],\n",
" [ 3.86301279e-02, -3.86058390e-02],\n",
" [-3.03680480e-01, 3.03690165e-01],\n",
" [ 1.28941819e-01, -1.28929913e-01],\n",
" [-1.06625021e-01, 1.06641471e-01],\n",
" [ 2.11793751e-01, -2.11783320e-01],\n",
" [-1.20612228e+00, 1.20613194e+00],\n",
" [ 2.32780397e-01, -2.32766151e-01],\n",
" [ 1.07535511e-01, -1.07531980e-01],\n",
" [-1.68534845e-01, 1.68567598e-01],\n",
" [ 1.57701567e-01, -1.57669455e-01],\n",
" [ 2.91942716e-01, -2.91933715e-01],\n",
" [ 2.90191770e-02, -2.90192217e-02],\n",
" [ 4.56188083e-01, -4.56171900e-01],\n",
" [ 2.81581283e-03, -2.81581283e-03],\n",
" [-1.18135691e-01, 1.18147850e-01]], dtype=float32)],\n",
" [array([ 3.1866699e-03, -7.6422125e-02, 3.3879340e-02, 1.4779243e-01,\n",
" 0.0000000e+00, 0.0000000e+00, 1.3526529e-05, 1.0225922e-06,\n",
" -2.2294750e-03, -2.1711849e-03, -9.2923865e-02, -3.4378596e-02,\n",
" 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 7.9754189e-02,\n",
" 9.6828096e-02, 0.0000000e+00, 6.9650002e-02, -5.8602124e-02,\n",
" -8.6331740e-05, 0.0000000e+00, 1.2667640e-01, 0.0000000e+00,\n",
" 0.0000000e+00, -2.6648588e-02, 0.0000000e+00, -3.3291716e-02,\n",
" 0.0000000e+00, 0.0000000e+00, 2.2499807e-02, 6.8744346e-02,\n",
" -5.0262503e-02, 0.0000000e+00, -8.9680202e-02, 0.0000000e+00,\n",
" -4.2678282e-02, 0.0000000e+00, -1.7710961e-02, 0.0000000e+00,\n",
" 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, -6.1750807e-02,\n",
" 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,\n",
" 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,\n",
" 0.0000000e+00, 0.0000000e+00, 5.8094569e-02, 1.2169727e-02,\n",
" 1.9261871e-10, 0.0000000e+00, -7.9979539e-02, 0.0000000e+00,\n",
" 0.0000000e+00, 7.6093122e-02, 0.0000000e+00, 0.0000000e+00,\n",
" 1.6243661e-02, 0.0000000e+00, -9.8505989e-03, 0.0000000e+00,\n",
" 0.0000000e+00, -4.3590821e-02], dtype=float32),\n",
" array([-0.10964979, 0.1273394 , 0.05826145, 0.11250992, -0.00930649,\n",
" 0.08082695, -0.10440034, 0.02672271, 0.14781642, 0.27572772,\n",
" -0.0397696 , 0.18053436, 0. , -0.04786159, 0. ,\n",
" 0.15616408, 0.0424803 , 0. , -0.0375067 , -0.00756753,\n",
" 0.01335342, -0.08301416, -0.07382136, 0.02766102, -0.21604276,\n",
" 0.04904766, -0.00283363, 0.01198358, 0.17403054, -0.08457427,\n",
" 0.06056517, -0.00864101, 0.02029612, 0.12778968, 0.14824837,\n",
" 0.17251332, 0.03519725, 0.05309688, 0.1472145 , -0.18282993,\n",
" 0.10138815, 0.01851342, 0.03132945, 0. , -0.19095713,\n",
" 0.07761121, 0.17995 , 0.16866425, 0. , -0.02218707],\n",
" dtype=float32),\n",
" array([ 0.34423378, 0.44879648, -0.04138201, 0.48468772, 0.27873045,\n",
" -0.30794245, 0.15183273, 0.08334076, 0.18456669, -0.178191 ,\n",
" -0.14348036, 0.57680756, 0.60798985, 0.41697726, -0.00238903,\n",
" -0.26822644, -0.10311142, 0.11896864, 0.29975793, 0.23435119,\n",
" 0.23297757, 0.00584415, 0.31422138, 0.4213791 , -0.1971436 ,\n",
" -0.19466306, -0.08916584, 0.11673178, 0.06989807, -0.05853122,\n",
" -0.16468427, 0.20225608, 0.4647019 , -0.20615612, -0.06997643,\n",
" -0.16313455, 0.3702965 , 0.03510106, -0.06537277, -0.03514916,\n",
" 0.20234883, 0.42463556, -0.00177155, 0.18248317, -0.00233091,\n",
" 0.16106895, 0.03673995, -0.28332692, 0.03024025, -0.32204401],\n",
" dtype=float32),\n",
" array([-0.14419317, 0.14419758], dtype=float32)])"
]
},
"execution_count": 62,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"get_updates(model, X_train, y_train, 32, 2)"
]
},
{
"cell_type": "code",
"execution_count": 63,
"metadata": {},
"outputs": [],
"source": [
"W = get_parameters(model)[0]\n",
"B = get_parameters(model)[1]"
]
},
{
"cell_type": "code",
"execution_count": 64,
"metadata": {},
"outputs": [],
"source": [
"#AUXILIARY METHODS FOR FL INSPECTION\n",
"\n",
"# TRANSFORM ALL WEIGHT TENSORS TO 1D ARRAY\n",
"def flatten_weights(w_in):\n",
" h = w_in[0].reshape(-1)\n",
" for w in w_in[1:]:\n",
" h = np.append(h, w.reshape(-1))\n",
" return h\n",
"\n",
"# TRANSFORM ALL BIAS TENSORS TO 1D ARRAY\n",
"def flatten_biases(b_in):\n",
" h = b_in[0].reshape(-1)\n",
" for b in b_in[1:]:\n",
" h = np.append(h, b.reshape(-1))\n",
" return h\n",
"\n",
"# TRANSFORM WEIGHT AND BIAS TENSORS TO 1D ARRAY\n",
"def flatten_parameters(w_in, b_in):\n",
" w = flatten_weights(w_in)\n",
" b = flatten_biases(b_in)\n",
" return w, b\n",
"\n",
"# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS\n",
"def dist_weights(w_a, w_b):\n",
" wf_a = flatten_weights(w_a)\n",
" wf_b = flatten_weights(w_b)\n",
" return euclidean(wf_a, wf_b)\n",
"\n",
"# COMPUTE EUCLIDEAN DISTANCE OF BIASES\n",
"def dist_biases(b_a, b_b):\n",
" bf_a = flatten_biases(b_a)\n",
" bf_b = flatten_biases(b_b)\n",
" return euclidean(bf_a, bf_b)\n",
"\n",
"# COMPUTE EUCLIDEAN DISTANCE OF WEIGHTS AND BIASES\n",
"def dist_parameters(w_a, b_a, w_b, b_b):\n",
" wf_a, bf_a = flatten_parameters(w_a, b_a)\n",
" wf_b, bf_b = flatten_parameters(w_b, b_b)\n",
" return euclidean(np.append(wf_a, bf_a), np.append(wf_b, bf_b))"
]
},
{
"cell_type": "code",
"execution_count": 65,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"39"
]
},
"execution_count": 65,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(W[0])"
]
},
{
"cell_type": "code",
"execution_count": 66,
"metadata": {},
"outputs": [],
"source": [
"# BASELINE SCENARIO\n",
"#buid the model as base line for the shards (sequential)\n",
"# Number of peers\n",
"#accordin to what we need\n",
"n_peers = 100\n",
"ss = int(len(X_train)/n_peers)\n",
"inputs_in = X_train[0*ss:0*ss+ss]\n",
"outputs_in = y_train[0*ss:0*ss+ss]\n",
"def build_model(X_t, y_t):\n",
" model = Sequential()\n",
" model.add(Dense(70, input_dim=39, activation='relu'))\n",
" model.add(Dense(64, activation='relu'))\n",
" model.add(Dense(50, activation='relu'))\n",
" model.add(Dense(2, activation='softmax'))\n",
" #sgd = optimizers.SGD(learning_rate=0.0001, momentum=0.9, nesterov=True)\n",
" model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])\n",
" model.fit(X_t,\n",
" y_t, \n",
" #inputs_in, \n",
" #outputs_in, \n",
"# X_train,\n",
"# y_train,\n",
" batch_size=32, \n",
" epochs=100, \n",
" verbose=1,\n",
" validation_data=((X_test, y_test)))\n",
" return model\n",
"\n",
"# model = build_model(inputs_in, outputs_in)"
]
},
{
"cell_type": "code",
"execution_count": 67,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"sequential_4\"\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"dense_13 (Dense) (None, 70) 2800 \n",
"_________________________________________________________________\n",
"dense_14 (Dense) (None, 50) 3550 \n",
"_________________________________________________________________\n",
"dense_15 (Dense) (None, 50) 2550 \n",
"_________________________________________________________________\n",
"dense_16 (Dense) (None, 2) 102 \n",
"=================================================================\n",
"Total params: 9,002\n",
"Trainable params: 9,002\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
},
{
"data": {
"text/plain": [
"None"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(model.summary())"
]
},
{
"cell_type": "code",
"execution_count": 68,
"metadata": {},
"outputs": [],
"source": [
"# predict probabilities for test set\n",
"yhat_probs = model.predict(X_test, verbose=0)\n",
"# predict crisp classes for test set\n",
"yhat_classes = model.predict_classes(X_test, verbose=0)"
]
},
{
"cell_type": "code",
"execution_count": 69,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy: 0.998316\n",
"Precision: 0.998423\n",
"Recall: 0.998711\n",
"F1 score: 0.998567\n"
]
}
],
"source": [
"# accuracy: (tp + tn) / (p + n)\n",
"accuracy = accuracy_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
"print('Accuracy: %f' % accuracy)\n",
"# precision tp / (tp + fp)\n",
"precision = precision_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
"print('Precision: %f' % precision)\n",
"# recall: tp / (tp + fn)\n",
"recall = recall_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
"print('Recall: %f' % recall)\n",
"# f1: 2 tp / (2 tp + fp + fn)\n",
"f1 = f1_score(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
"print('F1 score: %f' % f1)"
]
},
{
"cell_type": "code",
"execution_count": 70,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[55991, 126],\n",
" [ 103, 79782]], dtype=int64)"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAQsAAADtCAYAAACoP1B5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAP4klEQVR4nO3dbaxlVXnA8f/DAA5ahxdRJGIrTSdUagIiAVqTRqXCSBvxg03AphBDMo1Bg2mTFvuFVmti+6FWUmtD4lRoLEhoSYlBphOqIU0QAaUIjMiIVadQRhh8aYm83Pv0w163c7y99+51ZJ05s/f5/5KVe846++67OMCT9bbXE5mJJPU5bN4NkDQMBgtJVQwWkqoYLCRVMVhIqmKwkFTl8Hk3QBqD89/6snxq/1LVtffe/+zOzNw24yY1Z7CQGnhy/xJ37Typ6tojTvzW8TNuzkwYLKQmkqVcnncjZspgITWQwDLj3g1tsJAaSJLns27OYqgMFlIjY+9ZLPzSaURsi4iHI2JPRFw57/aMTUTsiIh9EfHAvNsySwkskVVlqBY6WETEJuCTwDuAU4GLI+LU+bZqdD4DDG6Z8GexTFaVoVroYAGcBezJzEcz8zngBuDCObdpVDLzDmD/vNsxawksZVaVoVr0YPEa4HsT7/eWOmlqy5VlqBZ9gjPWqBtu6Nfc5MDnI2oserDYC7x24v1JwGNzaosGLBOeH3esWPhgcTewNSJOBv4TuAh4z3ybpGEKltbsqI7HQs9ZZOYLwPuBncBu4MbMfHC+rRqXiLgeuBM4JSL2RsRl827TLCSwnHVlqBa9Z0Fm3grcOu92jFVmXjzvNhwsY+9ZLHywkFroNmUZLCRVWE6DhaQe9iwkVUmC53PTvJsxUwu9GrIiIrbPuw1jN/bveKVnUVOGymDRGfV/yIeIkX/HwVIeVlWGymGI1EB3UtZwA0GNmQSLI48+Kje/esssbj0Tm094OVtOOWFY22W++fy8WzCVzbyULXHcoL7jn/A/PJfPVo8bhjzEqDGTYLH51Vs4+2/dNT1T5+6ddwtG7668vfrazBj0EKPGuP/ppINomagqfSLilIi4b6L8KCI+GBHHRcSuiHik/Dy2XB8RcXU57e3+iDhj4l6XlusfiYhLJ+rfFBFfL79zdUT0NsxgITWQBM/l4VWl916ZD2fm6Zl5OvAm4BngZuBK4PbM3ArcXt5Dd9Lb1lK2A58CiIjjgKuAs+kOerpqJcCUa7ZP/F7vaWYGC6mBlQnOmjKlc4FvZeZ36E5xu7bUXwu8q7y+ELguO18GjomIE4HzgV2ZuT8znwZ2AdvKZ1sy887MTOC6iXuty9UQqZGl+u3ex0fEPRPvr8nMa9a59iLg+vL6hMx8HCAzH4+IV5X69U5826h+7xr1GzJYSA0kwVJ9r+HJzDyz76KIOBJ4J/ChvkvXbNL09RtyGCI1spyHVZUpvAP4amY+Ud4/UYYQlJ/7Sv16J75tVH/SGvUbMlhIDXTbvQ+rKlO4mANDEIBbgJUVjUuBf56ov6SsipwD/LAMV3YC50XEsWVi8zxgZ/nsxxFxTlkFuWTiXutyGCI10PpBsoh4KfB24Pcmqj8G3FhOG/su8Nul/lbgAmAP3crJewEyc39EfITu+EiAD2fmSlqG99HldDkK+EIpGzJYSA1k0nRTVmY+A7xiVd1TdKsjq69N4PJ17rMD2LFG/T3AG6Zpk8FCaqJuw9WQGSykBrqMZOOeAjRYSI1MOXk5OAYLqYEkPINTUh17FpJ6LcIZnAYLqYEuI5k9C0kVPClLUq/MsGchqY77LCT16g6/cRgiqdf4D+w1WEgNJLh0KqmfOzglVTMjmaRe3XkW9iwkVXAYIqlXN2fhMERShbFv9x53KJQOkiR4YXlTVakREcdExE0R8Y2I2B0Rv2quU2kkWiVGLj4B3JaZvwycBuzGXKfS8K2shtSUPhGxBfh14NPdvfO5zPwB5jqVxmGKCc6+XKe/CHwf+LuIOA24F7gCc51KwzflDs6+XKeHA2cAH8jMuyLiExwYcqzFXKfSkDScs9gL7M3Mu8r7m+iCh7lOpaHrjtWLqtJ7r8z/Ar4XEaeUqnOBhzDXqTQCGdXLopU+AHw2Io4EHqXLX3oY5jqVhq314TeZeR+w1ryGuU6lofPZEEm9VuYsxqxqgjMitkXEw2Vr6EZLONLCajXBeajq7VlExCbgk8Db6ZZc7o6IWzLzoVk3ThoKT8rqnAXsycxHASLiBrrtpQYLaUXCCz6ivuaW0bNn0xxpmBZhzqImWFRtDY2I7XRPsbH5hJe/yGZJwzP2YFHTb1pvy+hPycxrMvPMzDzziKOPatU+aRBW5izGPMFZEyzuBrZGxMllN9lFdNtLJU3IjKoyVL3DkMx8ISLeT7fPfBOwIzMfnHnLpIExfSGQmbfS7T+XtIbM8c9ZuINTaiJYWnbpVFKFIc9H1DBYSA24z0JSnezmLcbMYCE14mqIpF6JcxaSqgx7d2YNg4XUyPLyuIPFuBeGpYMks+1274j4j5KL9L6VhETmOpVGYgYPkr01M0+fSEhkrlNpDDLryosw11ynBgupkSmGIcdHxD0TZftatwP+JSLunfj8p3KdAuY6lYYmmerx875cpwBvzszHSvLjXRHxjQ2uNdepNCRZWarulflY+bkPuJluzsFcp9LgJeRyVJU+EfGyiHj5ymu6HKUPYK5TaRwa7uA8Abi5rGYeDvxDZt4WEXdjrlNp+Fo9SFbSbpy2Rv1TmOtUGjafDZFUJwGDhaQanmchqY7BQlK/umXRITNYSC2kE5ySajkMkVTHnoWkGvYsJFUxWEjqVR4kGzODhdSKPQtJVVw6lVQj7FlI6jXNMVgDZbCQmgiHIZIq2bOQVGV53g2YLYOF1MICHH7j6d5SI5F1pfp+EZsi4msR8fny/uSIuKvkLf1cRBxZ6l9S3u8pn79u4h4fKvUPR8T5E/XbSt2eiLhy9d9ei8FCaqVl4pDOFcDuifd/Dny85Dp9Gris1F8GPJ2ZvwR8vFxHRJwKXAT8Cl0u078pAWgT8Em6HKmnAheXazc0m2HIN5+Hc/f2X6ef2c7H7pt3E0bvrPOfmdvfjoiTgN8EPgr8fsnv8TbgPeWSa4E/oUtwfGF5DXAT8Nfl+guBGzLzWeDbEbGHLlkRwJ5yijgRcUO59qGN2mTPQmpkimFITa7TvwL+kAPTpq8AfpCZL5T3k/lJ/y+nafn8h+X6aXOgbsgJTqmVRrlOI+K3gH2ZeW9EvGWleq2/2PPZevVrdRJ6B0gGC6mFpOXS6ZuBd0bEBcBmYAtdT+OYiDi89B4m85Ou5DTdGxGHA0cD+1k/1ykb1K/LYYjUSKvVkMz8UGaelJmvo5ug/NfM/B3gi8C7y2Wrc52u5EB9d7k+S/1FZbXkZGAr8BW6dIZby+rKkeVv3NLXLnsWUiuz38H5R8ANEfFnwNeAT5f6TwN/XyYw99P9z09mPhgRN9JNXL4AXJ6ZSwAR8X66xMmbgB2Z+WDfHzdYSK3MIFhk5peAL5XXj3JgNWPymp9wIEny6s8+Sreisrr+VrqEytUMFlID0264GiKDhdTKyLd7GyykVuxZSKoRPnUqqZdzFpKqGSwkVTFYSKox9mGI270lVbFnIbUy8p6FwUJqIV06lVTLnoWkPsH4JzgNFlIrBgtJvdzBKamawUJSDVdDJNWxZyGp1/TZxgbH7d5SI61O946IzRHxlYj494h4MCL+tNSb61QahXa5Tp8F3paZpwGnA9si4hzmnOvUYCE10jBvSGbmf5e3R5SSdLlObyr11wLvKq8vLO8pn5+7OtdpZn4bWMl1ehYl12lmPges5DrdkMFCaqW+Z9Gb67T0AO4D9gG7gG9hrlNp+KZMBbBhrlOAkgzo9Ig4BrgZeP1al638+XU+a5rr1J6F1Eq7OYsDt8z8AV2SoXMouU7LR2vlOqUy1+lGOVDXZbCQGmm4GvLK0qMgIo4CfgPYjblOpZFot8/iRODasmpxGHBjZn4+Ih7CXKfSCDQKFpl5P/DGNerNdSoNnk+dSqpmsJBUw6dOJVVxGCKp3wI8dWqwkFoxWEjqswine/fu4IyIHRGxLyIeOBgNkgZrBtu9DyU1270/Q/csvKQNRGZVGareYUhm3jF58o6kNZi+UFK14XYaqjQLFuUAj+0Am3lpq9tKg7HwE5y1MvOazDwzM888gpe0uq00HCOf4HQYIrWwAA+S1SydXg/cCZwSEXsj4rK+35EW0qL3LDLz4oPREGnIFmFTlsMQqZFYHne0MFhILQx8iFHDYCE1MvZNWZ7uLbXSaIIzIl4bEV+MiN0l1+kVpf64iNhVcp3uiohjS31ExNUlb+n9EXHGxL0uLdc/EhGXTtS/KSK+Xn7n6pLBbEMGC6mRVqkA6E7i/oPMfD1dvpDLSy7SK4HbS67T28t76HKWbi1lO/Ap6IILcBVwNt1Bv1etBJhyzfaJ3+t9/stgIbWQQGZd6btV5uOZ+dXy+sd0OUNew0/nNF2d6/S6kiP1y3TJiE4Ezgd2Zeb+zHyaLg3itvLZlsy8s+QXuW7iXutyzkJqZIo5i+Mj4p6J99dk5jVr3rN7iPONwF3ACZn5OHQBJSJeVS6bNqfpa8rr1fUbMlhIDUy5z6I31ylARPwc8I/ABzPzRxtMK0yb63S9+g05DJFaqB2CVJ5nERFH0AWKz2bmP5XqJ8oQgvJzX6mfNqfp3vJ6df2GDBZSIw1znQZdSsLdmfmXEx9N5jRdnev0krIqcg7wwzJc2QmcFxHHlonN84Cd5bMfR8Q55W9dMnGvdTkMkVpptynrzcDvAl+PiPtK3R8DHwNuLM9nfZcDKQtvBS4A9gDPAO8FyMz9EfERukTIAB/OzP3l9fvoTsE7CvhCKRsyWEiNtHo2JDP/jbXnFQDOXeP6BC5f5147gB1r1N8DvGGadhkspBYS8NkQSTXGvt3bYCG1MuCTu2sYLKRGPM9CUj8fUZdUo9vBOe5oYbCQWnGCU1INexaS+mW6z0JSHVdDJNVxGCKpl1nUJVWzZyGpyrhjhcFCasWlU0n9ElgyWEjqEaQ9C0mVDBaSqow8WHi6t9RC0j1IVlMqRMSOiNgXEQ9M1JnrVBqDyKwqlT7D/88/aq5TaRQaJhnKzDuA/auqzXUqDV4mLFfv967OdbqKuU6lUah/NqQq1+kUzHUqDUnjOYu1mOtUGoWGcxbrMNepNHiNM5JFxPXAW+jmN/bSrWrMNddp5Aw2kkTE94HvNL/x7BwPPDnvRozcEL/jX8jMV9ZcePTmV+ev/fyl/RcCtz3yF/c2nrM4KGbSs6j9gg8VEXHPEP/lDclCfMcj38HpMERqIYGlcR+VZbCQmkhIg8UiqNkQoxdn/N+xw5Dxq9w9pxdh9N9x49WQQ5HBQmrFnoWkKgYLSb0yYWlp3q2YKYOF1Io9C0lVDBaS+plFXVKNhHRTlqQq9iwkVXHOQlIvl04l1cr6A3sHyWAhNfGij8w75BkspBZ8kExSNZdOJfVJIO1ZSOqVnpQlqVKOfOl0JqkApEUTEbfRpTuo8WRm9mYtP9QYLCRVMX2hpCoGC0lVDBaSqhgsJFUxWEiq8r+iNcFVHQEJ/gAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 288x288 with 2 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"# ROC AUC\n",
"# auc = roc_auc_score(shard1_traintest[i][\"y_test\"], yhat_probs)\n",
"# print('ROC AUC: %f' % auc)\n",
"# confusion matrix\n",
"mat = confusion_matrix(np.argmax(y_test, axis=1), np.argmax(model.predict(X_test), axis=1))\n",
"\n",
"display(mat)\n",
"plt.matshow(mat);\n",
"plt.colorbar()\n",
"plt.show()\n"
]
},
{
"cell_type": "code",
"execution_count": 71,
"metadata": {},
"outputs": [],
"source": [
"def savecsv(lists, filename):\n",
" #print lists\n",
" if os.path.isfile(filename):\n",
" os.remove(filename)\n",
" with open(filename, 'a') as csvfile:\n",
" w = csv.DictWriter(csvfile, lists.keys())\n",
" w.writeheader()\n",
" w.writerow(lists)\n",
"# fwriter = csv.writer(csvfile, delimiter=',',lineterminator='\\n')\n",
"# fwriter.writerows(lists)\n",
" csvfile.close()\n",
" \n",
" \n",
"# import csv\n",
"\n",
"# my_dict = {\"test\": 1, \"testing\": 2}\n",
"\n",
"# with open('mycsvfile.csv', 'wb') as f: # Just use 'w' mode in 3.x\n",
"# w = csv.DictWriter(f, my_dict.keys())\n",
"# w.writeheader()\n",
"# w.writerow(my_dict)"
]
},
{
"cell_type": "code",
"execution_count": 72,
"metadata": {},
"outputs": [],
"source": [
"FI_dic1= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}#,10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"FI_dic2= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[]}#,10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic3= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic4= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic5= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic6= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic7= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic8= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic9= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic10= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[]}\n",
"# FI_dic11= {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[],18:[],19:[],20:[],\n",
"# 21:[],22:[],23:[],24:[],25:[],26:[],27:[],28:[],29:[],30:[]}\n",
"dic = {0:[],1:[],2:[],3:[],4:[],5:[],6:[],7:[],8:[],9:[],10:[],11:[],12:[],13:[],14:[],15:[],16:[],17:[]}"
]
},
{
"cell_type": "code",
"execution_count": 73,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Train on 18068 samples, validate on 136002 samples\n",
"Epoch 1/100\n",
"18068/18068 [==============================] - ETA: 39s - loss: 0.6788 - accuracy: 0.562 - ETA: 0s - loss: 0.6006 - accuracy: 0.691 - ETA: 0s - loss: 0.5092 - accuracy: 0.76 - ETA: 0s - loss: 0.4488 - accuracy: 0.79 - ETA: 0s - loss: 0.4155 - accuracy: 0.81 - ETA: 0s - loss: 0.3891 - accuracy: 0.82 - ETA: 0s - loss: 0.3593 - accuracy: 0.84 - ETA: 0s - loss: 0.3467 - accuracy: 0.85 - 2s 105us/step - loss: 0.3436 - accuracy: 0.8538 - val_loss: 0.2597 - val_accuracy: 0.8811\n",
"Epoch 2/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.2291 - accuracy: 0.90 - ETA: 0s - loss: 0.2141 - accuracy: 0.91 - ETA: 0s - loss: 0.2275 - accuracy: 0.90 - ETA: 0s - loss: 0.2265 - accuracy: 0.90 - ETA: 0s - loss: 0.2190 - accuracy: 0.91 - ETA: 0s - loss: 0.2150 - accuracy: 0.91 - ETA: 0s - loss: 0.2104 - accuracy: 0.91 - ETA: 0s - loss: 0.2042 - accuracy: 0.91 - 2s 101us/step - loss: 0.2036 - accuracy: 0.9200 - val_loss: 0.1762 - val_accuracy: 0.9202\n",
"Epoch 3/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1665 - accuracy: 0.93 - ETA: 0s - loss: 0.1756 - accuracy: 0.93 - ETA: 0s - loss: 0.1940 - accuracy: 0.92 - ETA: 0s - loss: 0.1856 - accuracy: 0.92 - ETA: 0s - loss: 0.1829 - accuracy: 0.92 - ETA: 0s - loss: 0.1831 - accuracy: 0.92 - ETA: 0s - loss: 0.1887 - accuracy: 0.92 - ETA: 0s - loss: 0.1854 - accuracy: 0.92 - 2s 102us/step - loss: 0.1833 - accuracy: 0.9264 - val_loss: 0.1747 - val_accuracy: 0.9309\n",
"Epoch 4/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.3563 - accuracy: 0.78 - ETA: 0s - loss: 0.1465 - accuracy: 0.94 - ETA: 0s - loss: 0.1444 - accuracy: 0.94 - ETA: 0s - loss: 0.1452 - accuracy: 0.94 - ETA: 0s - loss: 0.1484 - accuracy: 0.94 - ETA: 0s - loss: 0.1492 - accuracy: 0.94 - ETA: 0s - loss: 0.1500 - accuracy: 0.94 - ETA: 0s - loss: 0.1510 - accuracy: 0.94 - 2s 96us/step - loss: 0.1506 - accuracy: 0.9438 - val_loss: 0.1533 - val_accuracy: 0.9412\n",
"Epoch 5/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1071 - accuracy: 0.93 - ETA: 0s - loss: 0.1414 - accuracy: 0.95 - ETA: 0s - loss: 0.1402 - accuracy: 0.95 - ETA: 0s - loss: 0.1363 - accuracy: 0.95 - ETA: 0s - loss: 0.1369 - accuracy: 0.95 - ETA: 0s - loss: 0.1350 - accuracy: 0.95 - ETA: 0s - loss: 0.1391 - accuracy: 0.94 - 2s 99us/step - loss: 0.1414 - accuracy: 0.9487 - val_loss: 0.1322 - val_accuracy: 0.9524\n",
"Epoch 6/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1931 - accuracy: 0.90 - ETA: 0s - loss: 0.1383 - accuracy: 0.94 - ETA: 0s - loss: 0.1314 - accuracy: 0.95 - ETA: 0s - loss: 0.1356 - accuracy: 0.95 - ETA: 0s - loss: 0.1363 - accuracy: 0.95 - ETA: 0s - loss: 0.1372 - accuracy: 0.95 - ETA: 0s - loss: 0.1372 - accuracy: 0.95 - 2s 100us/step - loss: 0.1405 - accuracy: 0.9487 - val_loss: 0.1310 - val_accuracy: 0.9576\n",
"Epoch 7/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0796 - accuracy: 1.00 - ETA: 0s - loss: 0.1566 - accuracy: 0.94 - ETA: 0s - loss: 0.1336 - accuracy: 0.95 - ETA: 0s - loss: 0.1296 - accuracy: 0.95 - ETA: 0s - loss: 0.1308 - accuracy: 0.95 - ETA: 0s - loss: 0.1323 - accuracy: 0.95 - ETA: 0s - loss: 0.1299 - accuracy: 0.95 - ETA: 0s - loss: 0.1291 - accuracy: 0.95 - 2s 100us/step - loss: 0.1291 - accuracy: 0.9533 - val_loss: 0.1146 - val_accuracy: 0.9584\n",
"Epoch 8/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0266 - accuracy: 1.00 - ETA: 0s - loss: 0.1207 - accuracy: 0.96 - ETA: 0s - loss: 0.1266 - accuracy: 0.95 - ETA: 0s - loss: 0.1208 - accuracy: 0.96 - ETA: 0s - loss: 0.1163 - accuracy: 0.96 - ETA: 0s - loss: 0.1196 - accuracy: 0.95 - ETA: 0s - loss: 0.1160 - accuracy: 0.96 - ETA: 0s - loss: 0.1160 - accuracy: 0.96 - 2s 97us/step - loss: 0.1161 - accuracy: 0.9610 - val_loss: 0.1006 - val_accuracy: 0.9674\n",
"Epoch 9/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0423 - accuracy: 1.00 - ETA: 0s - loss: 0.1142 - accuracy: 0.96 - ETA: 0s - loss: 0.1189 - accuracy: 0.95 - ETA: 0s - loss: 0.1210 - accuracy: 0.95 - ETA: 0s - loss: 0.1186 - accuracy: 0.95 - ETA: 0s - loss: 0.1148 - accuracy: 0.95 - ETA: 0s - loss: 0.1155 - accuracy: 0.95 - 2s 99us/step - loss: 0.1134 - accuracy: 0.9593 - val_loss: 0.1345 - val_accuracy: 0.9470\n",
"Epoch 10/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1500 - accuracy: 0.93 - ETA: 0s - loss: 0.1266 - accuracy: 0.94 - ETA: 0s - loss: 0.1323 - accuracy: 0.94 - ETA: 0s - loss: 0.1268 - accuracy: 0.95 - ETA: 0s - loss: 0.1307 - accuracy: 0.95 - ETA: 0s - loss: 0.1280 - accuracy: 0.95 - ETA: 0s - loss: 0.1250 - accuracy: 0.95 - ETA: 0s - loss: 0.1211 - accuracy: 0.95 - 2s 100us/step - loss: 0.1208 - accuracy: 0.9557 - val_loss: 0.0943 - val_accuracy: 0.9679\n",
"Epoch 11/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0425 - accuracy: 1.00 - ETA: 0s - loss: 0.1098 - accuracy: 0.95 - ETA: 0s - loss: 0.0987 - accuracy: 0.96 - ETA: 0s - loss: 0.1052 - accuracy: 0.96 - ETA: 0s - loss: 0.1071 - accuracy: 0.96 - ETA: 0s - loss: 0.1099 - accuracy: 0.95 - ETA: 0s - loss: 0.1124 - accuracy: 0.95 - 2s 100us/step - loss: 0.1155 - accuracy: 0.9574 - val_loss: 0.1408 - val_accuracy: 0.9464\n",
"Epoch 12/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0991 - accuracy: 0.96 - ETA: 0s - loss: 0.1338 - accuracy: 0.95 - ETA: 0s - loss: 0.1271 - accuracy: 0.95 - ETA: 0s - loss: 0.1174 - accuracy: 0.95 - ETA: 0s - loss: 0.1149 - accuracy: 0.95 - ETA: 0s - loss: 0.1109 - accuracy: 0.96 - ETA: 0s - loss: 0.1107 - accuracy: 0.96 - ETA: 0s - loss: 0.1114 - accuracy: 0.96 - 2s 104us/step - loss: 0.1107 - accuracy: 0.9619 - val_loss: 0.0843 - val_accuracy: 0.9707\n",
"Epoch 13/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1949 - accuracy: 0.90 - ETA: 0s - loss: 0.0911 - accuracy: 0.96 - ETA: 0s - loss: 0.0872 - accuracy: 0.96 - ETA: 0s - loss: 0.0943 - accuracy: 0.96 - ETA: 0s - loss: 0.1002 - accuracy: 0.96 - ETA: 0s - loss: 0.1068 - accuracy: 0.96 - ETA: 0s - loss: 0.1112 - accuracy: 0.95 - 2s 102us/step - loss: 0.1098 - accuracy: 0.9592 - val_loss: 0.0916 - val_accuracy: 0.9700\n",
"Epoch 14/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0711 - accuracy: 0.96 - ETA: 0s - loss: 0.0996 - accuracy: 0.96 - ETA: 0s - loss: 0.1074 - accuracy: 0.96 - ETA: 0s - loss: 0.1204 - accuracy: 0.95 - ETA: 0s - loss: 0.1184 - accuracy: 0.95 - ETA: 0s - loss: 0.1118 - accuracy: 0.95 - ETA: 0s - loss: 0.1081 - accuracy: 0.96 - 2s 97us/step - loss: 0.1091 - accuracy: 0.9604 - val_loss: 0.1071 - val_accuracy: 0.9602\n",
"Epoch 15/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0172 - accuracy: 1.00 - ETA: 0s - loss: 0.1060 - accuracy: 0.96 - ETA: 0s - loss: 0.1036 - accuracy: 0.95 - ETA: 0s - loss: 0.1007 - accuracy: 0.96 - ETA: 0s - loss: 0.0968 - accuracy: 0.96 - ETA: 0s - loss: 0.0999 - accuracy: 0.96 - ETA: 0s - loss: 0.1008 - accuracy: 0.96 - 2s 98us/step - loss: 0.0995 - accuracy: 0.9649 - val_loss: 0.1024 - val_accuracy: 0.9661\n",
"Epoch 16/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0327 - accuracy: 1.00 - ETA: 0s - loss: 0.0968 - accuracy: 0.96 - ETA: 0s - loss: 0.1199 - accuracy: 0.95 - ETA: 0s - loss: 0.1218 - accuracy: 0.95 - ETA: 0s - loss: 0.1172 - accuracy: 0.95 - ETA: 0s - loss: 0.1132 - accuracy: 0.95 - ETA: 0s - loss: 0.1132 - accuracy: 0.96 - 2s 99us/step - loss: 0.1109 - accuracy: 0.9613 - val_loss: 0.0845 - val_accuracy: 0.9723\n",
"Epoch 17/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0223 - accuracy: 1.00 - ETA: 0s - loss: 0.0942 - accuracy: 0.96 - ETA: 0s - loss: 0.0892 - accuracy: 0.96 - ETA: 0s - loss: 0.0889 - accuracy: 0.96 - ETA: 0s - loss: 0.0900 - accuracy: 0.96 - ETA: 0s - loss: 0.0925 - accuracy: 0.96 - ETA: 0s - loss: 0.0982 - accuracy: 0.96 - ETA: 0s - loss: 0.0954 - accuracy: 0.96 - 2s 99us/step - loss: 0.0955 - accuracy: 0.9666 - val_loss: 0.0854 - val_accuracy: 0.9719\n",
"Epoch 18/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"18068/18068 [==============================] - ETA: 0s - loss: 0.0278 - accuracy: 1.00 - ETA: 0s - loss: 0.0835 - accuracy: 0.97 - ETA: 0s - loss: 0.0847 - accuracy: 0.97 - ETA: 0s - loss: 0.0903 - accuracy: 0.96 - ETA: 0s - loss: 0.0930 - accuracy: 0.96 - ETA: 0s - loss: 0.0942 - accuracy: 0.96 - ETA: 0s - loss: 0.0909 - accuracy: 0.96 - 2s 100us/step - loss: 0.0904 - accuracy: 0.9677 - val_loss: 0.1059 - val_accuracy: 0.9670\n",
"Epoch 19/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1972 - accuracy: 0.93 - ETA: 0s - loss: 0.0773 - accuracy: 0.97 - ETA: 0s - loss: 0.0849 - accuracy: 0.97 - ETA: 0s - loss: 0.0869 - accuracy: 0.97 - ETA: 0s - loss: 0.0817 - accuracy: 0.97 - ETA: 0s - loss: 0.0803 - accuracy: 0.97 - ETA: 0s - loss: 0.0845 - accuracy: 0.97 - 2s 97us/step - loss: 0.0852 - accuracy: 0.9716 - val_loss: 0.1073 - val_accuracy: 0.9576\n",
"Epoch 20/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1471 - accuracy: 0.93 - ETA: 0s - loss: 0.0899 - accuracy: 0.96 - ETA: 0s - loss: 0.0802 - accuracy: 0.97 - ETA: 0s - loss: 0.0826 - accuracy: 0.97 - ETA: 0s - loss: 0.0898 - accuracy: 0.96 - ETA: 0s - loss: 0.0912 - accuracy: 0.96 - ETA: 0s - loss: 0.0898 - accuracy: 0.96 - 2s 99us/step - loss: 0.0899 - accuracy: 0.9690 - val_loss: 0.0760 - val_accuracy: 0.9753\n",
"Epoch 21/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1002 - accuracy: 0.96 - ETA: 0s - loss: 0.0704 - accuracy: 0.97 - ETA: 0s - loss: 0.0741 - accuracy: 0.97 - ETA: 0s - loss: 0.0735 - accuracy: 0.97 - ETA: 0s - loss: 0.0729 - accuracy: 0.97 - ETA: 0s - loss: 0.0734 - accuracy: 0.97 - ETA: 0s - loss: 0.0755 - accuracy: 0.97 - 2s 102us/step - loss: 0.0755 - accuracy: 0.9735 - val_loss: 0.0788 - val_accuracy: 0.9719\n",
"Epoch 22/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.2126 - accuracy: 0.90 - ETA: 0s - loss: 0.0985 - accuracy: 0.96 - ETA: 0s - loss: 0.0866 - accuracy: 0.96 - ETA: 0s - loss: 0.0771 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0791 - accuracy: 0.97 - ETA: 0s - loss: 0.0772 - accuracy: 0.97 - 2s 97us/step - loss: 0.0767 - accuracy: 0.9727 - val_loss: 0.0837 - val_accuracy: 0.9757\n",
"Epoch 23/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0565 - accuracy: 0.96 - ETA: 0s - loss: 0.0798 - accuracy: 0.97 - ETA: 0s - loss: 0.0660 - accuracy: 0.97 - ETA: 0s - loss: 0.0723 - accuracy: 0.97 - ETA: 0s - loss: 0.0793 - accuracy: 0.97 - ETA: 0s - loss: 0.0781 - accuracy: 0.97 - ETA: 0s - loss: 0.0759 - accuracy: 0.97 - 2s 98us/step - loss: 0.0767 - accuracy: 0.9733 - val_loss: 0.0879 - val_accuracy: 0.9695\n",
"Epoch 24/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0170 - accuracy: 1.00 - ETA: 0s - loss: 0.0718 - accuracy: 0.97 - ETA: 0s - loss: 0.0800 - accuracy: 0.97 - ETA: 0s - loss: 0.0783 - accuracy: 0.97 - ETA: 0s - loss: 0.0785 - accuracy: 0.97 - ETA: 0s - loss: 0.0784 - accuracy: 0.97 - ETA: 0s - loss: 0.0799 - accuracy: 0.97 - 2s 98us/step - loss: 0.0807 - accuracy: 0.9716 - val_loss: 0.0583 - val_accuracy: 0.9794\n",
"Epoch 25/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0030 - accuracy: 1.00 - ETA: 0s - loss: 0.0636 - accuracy: 0.97 - ETA: 0s - loss: 0.0630 - accuracy: 0.97 - ETA: 0s - loss: 0.0804 - accuracy: 0.97 - ETA: 0s - loss: 0.0770 - accuracy: 0.97 - ETA: 0s - loss: 0.0748 - accuracy: 0.97 - ETA: 0s - loss: 0.0725 - accuracy: 0.97 - ETA: 0s - loss: 0.0742 - accuracy: 0.97 - 2s 97us/step - loss: 0.0745 - accuracy: 0.9740 - val_loss: 0.0661 - val_accuracy: 0.9816\n",
"Epoch 26/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0948 - accuracy: 0.93 - ETA: 0s - loss: 0.0584 - accuracy: 0.97 - ETA: 0s - loss: 0.0578 - accuracy: 0.97 - ETA: 0s - loss: 0.0676 - accuracy: 0.97 - ETA: 0s - loss: 0.0804 - accuracy: 0.96 - ETA: 0s - loss: 0.0886 - accuracy: 0.96 - ETA: 0s - loss: 0.0872 - accuracy: 0.96 - 2s 98us/step - loss: 0.0849 - accuracy: 0.9677 - val_loss: 0.0790 - val_accuracy: 0.9766\n",
"Epoch 27/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0358 - accuracy: 1.00 - ETA: 0s - loss: 0.0517 - accuracy: 0.98 - ETA: 0s - loss: 0.0588 - accuracy: 0.98 - ETA: 0s - loss: 0.0719 - accuracy: 0.97 - ETA: 0s - loss: 0.0696 - accuracy: 0.97 - ETA: 0s - loss: 0.0659 - accuracy: 0.97 - ETA: 0s - loss: 0.0649 - accuracy: 0.97 - 2s 99us/step - loss: 0.0643 - accuracy: 0.9778 - val_loss: 0.0584 - val_accuracy: 0.9804\n",
"Epoch 28/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1423 - accuracy: 0.93 - ETA: 0s - loss: 0.0831 - accuracy: 0.96 - ETA: 0s - loss: 0.0703 - accuracy: 0.97 - ETA: 0s - loss: 0.0680 - accuracy: 0.97 - ETA: 0s - loss: 0.0644 - accuracy: 0.97 - ETA: 0s - loss: 0.0650 - accuracy: 0.97 - ETA: 0s - loss: 0.0664 - accuracy: 0.97 - 2s 100us/step - loss: 0.0649 - accuracy: 0.9760 - val_loss: 0.0610 - val_accuracy: 0.9821\n",
"Epoch 29/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0349 - accuracy: 0.96 - ETA: 0s - loss: 0.0645 - accuracy: 0.97 - ETA: 0s - loss: 0.0565 - accuracy: 0.98 - ETA: 0s - loss: 0.0675 - accuracy: 0.97 - ETA: 0s - loss: 0.0673 - accuracy: 0.97 - ETA: 0s - loss: 0.0667 - accuracy: 0.97 - ETA: 0s - loss: 0.0663 - accuracy: 0.97 - 2s 97us/step - loss: 0.0650 - accuracy: 0.9775 - val_loss: 0.0835 - val_accuracy: 0.9671\n",
"Epoch 30/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0869 - accuracy: 0.96 - ETA: 0s - loss: 0.0555 - accuracy: 0.98 - ETA: 0s - loss: 0.0522 - accuracy: 0.98 - ETA: 0s - loss: 0.0543 - accuracy: 0.98 - ETA: 0s - loss: 0.0567 - accuracy: 0.98 - ETA: 0s - loss: 0.0611 - accuracy: 0.97 - ETA: 0s - loss: 0.0642 - accuracy: 0.97 - 2s 100us/step - loss: 0.0638 - accuracy: 0.9774 - val_loss: 0.0530 - val_accuracy: 0.9830\n",
"Epoch 31/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0032 - accuracy: 1.00 - ETA: 0s - loss: 0.0556 - accuracy: 0.98 - ETA: 0s - loss: 0.0615 - accuracy: 0.97 - ETA: 0s - loss: 0.0574 - accuracy: 0.98 - ETA: 0s - loss: 0.0574 - accuracy: 0.98 - ETA: 0s - loss: 0.0583 - accuracy: 0.98 - ETA: 0s - loss: 0.0581 - accuracy: 0.98 - 2s 95us/step - loss: 0.0563 - accuracy: 0.9813 - val_loss: 0.0531 - val_accuracy: 0.9849\n",
"Epoch 32/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0303 - accuracy: 1.00 - ETA: 0s - loss: 0.0350 - accuracy: 0.98 - ETA: 0s - loss: 0.0485 - accuracy: 0.98 - ETA: 0s - loss: 0.0524 - accuracy: 0.98 - ETA: 0s - loss: 0.0515 - accuracy: 0.98 - ETA: 0s - loss: 0.0588 - accuracy: 0.97 - ETA: 0s - loss: 0.0606 - accuracy: 0.97 - 2s 99us/step - loss: 0.0622 - accuracy: 0.9787 - val_loss: 0.0634 - val_accuracy: 0.9815\n",
"Epoch 33/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0507 - accuracy: 1.00 - ETA: 0s - loss: 0.0457 - accuracy: 0.98 - ETA: 0s - loss: 0.0464 - accuracy: 0.98 - ETA: 0s - loss: 0.0504 - accuracy: 0.98 - ETA: 0s - loss: 0.0511 - accuracy: 0.98 - ETA: 0s - loss: 0.0512 - accuracy: 0.98 - ETA: 0s - loss: 0.0510 - accuracy: 0.98 - 2s 101us/step - loss: 0.0508 - accuracy: 0.9833 - val_loss: 0.0499 - val_accuracy: 0.9864\n",
"Epoch 34/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0097 - accuracy: 1.00 - ETA: 0s - loss: 0.0488 - accuracy: 0.98 - ETA: 0s - loss: 0.0614 - accuracy: 0.98 - ETA: 0s - loss: 0.0627 - accuracy: 0.98 - ETA: 0s - loss: 0.0614 - accuracy: 0.98 - ETA: 0s - loss: 0.0615 - accuracy: 0.98 - ETA: 0s - loss: 0.0592 - accuracy: 0.98 - 2s 93us/step - loss: 0.0572 - accuracy: 0.9815 - val_loss: 0.0420 - val_accuracy: 0.9871\n",
"Epoch 35/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0795 - accuracy: 0.96 - ETA: 0s - loss: 0.0474 - accuracy: 0.98 - ETA: 0s - loss: 0.0459 - accuracy: 0.98 - ETA: 0s - loss: 0.0480 - accuracy: 0.98 - ETA: 0s - loss: 0.0458 - accuracy: 0.98 - ETA: 0s - loss: 0.0477 - accuracy: 0.98 - ETA: 0s - loss: 0.0560 - accuracy: 0.98 - 2s 95us/step - loss: 0.0579 - accuracy: 0.9807 - val_loss: 0.0606 - val_accuracy: 0.9777\n",
"Epoch 36/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"18068/18068 [==============================] - ETA: 0s - loss: 0.0130 - accuracy: 1.00 - ETA: 0s - loss: 0.0689 - accuracy: 0.97 - ETA: 0s - loss: 0.0682 - accuracy: 0.97 - ETA: 0s - loss: 0.0658 - accuracy: 0.97 - ETA: 0s - loss: 0.0601 - accuracy: 0.97 - ETA: 0s - loss: 0.0572 - accuracy: 0.98 - ETA: 0s - loss: 0.0540 - accuracy: 0.98 - 2s 93us/step - loss: 0.0523 - accuracy: 0.9823 - val_loss: 0.0444 - val_accuracy: 0.9877\n",
"Epoch 37/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0141 - accuracy: 1.00 - ETA: 0s - loss: 0.0639 - accuracy: 0.97 - ETA: 0s - loss: 0.0546 - accuracy: 0.97 - ETA: 0s - loss: 0.0501 - accuracy: 0.98 - ETA: 0s - loss: 0.0509 - accuracy: 0.98 - ETA: 0s - loss: 0.0528 - accuracy: 0.98 - ETA: 0s - loss: 0.0533 - accuracy: 0.98 - 2s 95us/step - loss: 0.0527 - accuracy: 0.9806 - val_loss: 0.0399 - val_accuracy: 0.9892\n",
"Epoch 38/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0120 - accuracy: 1.00 - ETA: 0s - loss: 0.0439 - accuracy: 0.98 - ETA: 0s - loss: 0.0460 - accuracy: 0.98 - ETA: 0s - loss: 0.0474 - accuracy: 0.98 - ETA: 0s - loss: 0.0488 - accuracy: 0.98 - ETA: 0s - loss: 0.0470 - accuracy: 0.98 - ETA: 0s - loss: 0.0468 - accuracy: 0.98 - 2s 97us/step - loss: 0.0490 - accuracy: 0.9841 - val_loss: 0.0560 - val_accuracy: 0.9819\n",
"Epoch 39/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0037 - accuracy: 1.00 - ETA: 0s - loss: 0.0482 - accuracy: 0.98 - ETA: 0s - loss: 0.0398 - accuracy: 0.98 - ETA: 0s - loss: 0.0396 - accuracy: 0.98 - ETA: 0s - loss: 0.0402 - accuracy: 0.98 - ETA: 0s - loss: 0.0419 - accuracy: 0.98 - ETA: 0s - loss: 0.0441 - accuracy: 0.98 - 2s 98us/step - loss: 0.0457 - accuracy: 0.9843 - val_loss: 0.0492 - val_accuracy: 0.9844\n",
"Epoch 40/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0412 - accuracy: 0.96 - ETA: 0s - loss: 0.0411 - accuracy: 0.98 - ETA: 0s - loss: 0.0500 - accuracy: 0.98 - ETA: 0s - loss: 0.0472 - accuracy: 0.98 - ETA: 0s - loss: 0.0477 - accuracy: 0.98 - ETA: 0s - loss: 0.0470 - accuracy: 0.98 - ETA: 0s - loss: 0.0469 - accuracy: 0.98 - 2s 97us/step - loss: 0.0465 - accuracy: 0.9842 - val_loss: 0.0679 - val_accuracy: 0.9757\n",
"Epoch 41/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0218 - accuracy: 1.00 - ETA: 0s - loss: 0.0453 - accuracy: 0.98 - ETA: 0s - loss: 0.0404 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - ETA: 0s - loss: 0.0435 - accuracy: 0.98 - ETA: 0s - loss: 0.0439 - accuracy: 0.98 - ETA: 0s - loss: 0.0423 - accuracy: 0.98 - 2s 95us/step - loss: 0.0461 - accuracy: 0.9846 - val_loss: 0.0624 - val_accuracy: 0.9818\n",
"Epoch 42/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1395 - accuracy: 0.96 - ETA: 0s - loss: 0.0579 - accuracy: 0.97 - ETA: 0s - loss: 0.0520 - accuracy: 0.98 - ETA: 0s - loss: 0.0567 - accuracy: 0.97 - ETA: 0s - loss: 0.0531 - accuracy: 0.98 - ETA: 0s - loss: 0.0481 - accuracy: 0.98 - ETA: 0s - loss: 0.0461 - accuracy: 0.98 - 2s 97us/step - loss: 0.0461 - accuracy: 0.9842 - val_loss: 0.0568 - val_accuracy: 0.9820\n",
"Epoch 43/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1796 - accuracy: 0.93 - ETA: 0s - loss: 0.0395 - accuracy: 0.98 - ETA: 0s - loss: 0.0388 - accuracy: 0.98 - ETA: 0s - loss: 0.0365 - accuracy: 0.98 - ETA: 0s - loss: 0.0386 - accuracy: 0.98 - ETA: 0s - loss: 0.0428 - accuracy: 0.98 - ETA: 0s - loss: 0.0423 - accuracy: 0.98 - ETA: 0s - loss: 0.0420 - accuracy: 0.98 - 2s 95us/step - loss: 0.0415 - accuracy: 0.9852 - val_loss: 0.0502 - val_accuracy: 0.9832\n",
"Epoch 44/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0091 - accuracy: 1.00 - ETA: 0s - loss: 0.0421 - accuracy: 0.98 - ETA: 0s - loss: 0.0521 - accuracy: 0.98 - ETA: 0s - loss: 0.0472 - accuracy: 0.98 - ETA: 0s - loss: 0.0458 - accuracy: 0.98 - ETA: 0s - loss: 0.0460 - accuracy: 0.98 - ETA: 0s - loss: 0.0442 - accuracy: 0.98 - 2s 97us/step - loss: 0.0442 - accuracy: 0.9846 - val_loss: 0.0770 - val_accuracy: 0.9756\n",
"Epoch 45/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1832 - accuracy: 0.90 - ETA: 0s - loss: 0.0467 - accuracy: 0.98 - ETA: 0s - loss: 0.0432 - accuracy: 0.98 - ETA: 0s - loss: 0.0455 - accuracy: 0.98 - ETA: 0s - loss: 0.0492 - accuracy: 0.98 - ETA: 0s - loss: 0.0501 - accuracy: 0.98 - ETA: 0s - loss: 0.0489 - accuracy: 0.98 - 2s 95us/step - loss: 0.0478 - accuracy: 0.9835 - val_loss: 0.0404 - val_accuracy: 0.9876\n",
"Epoch 46/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1283 - accuracy: 0.93 - ETA: 0s - loss: 0.0580 - accuracy: 0.98 - ETA: 0s - loss: 0.0460 - accuracy: 0.98 - ETA: 0s - loss: 0.0409 - accuracy: 0.98 - ETA: 0s - loss: 0.0432 - accuracy: 0.98 - ETA: 0s - loss: 0.0453 - accuracy: 0.98 - ETA: 0s - loss: 0.0464 - accuracy: 0.98 - 2s 97us/step - loss: 0.0454 - accuracy: 0.9852 - val_loss: 0.0507 - val_accuracy: 0.9834\n",
"Epoch 47/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0083 - accuracy: 1.00 - ETA: 0s - loss: 0.0326 - accuracy: 0.99 - ETA: 0s - loss: 0.0341 - accuracy: 0.98 - ETA: 0s - loss: 0.0374 - accuracy: 0.98 - ETA: 0s - loss: 0.0397 - accuracy: 0.98 - ETA: 0s - loss: 0.0426 - accuracy: 0.98 - ETA: 0s - loss: 0.0426 - accuracy: 0.98 - 2s 95us/step - loss: 0.0431 - accuracy: 0.9860 - val_loss: 0.0376 - val_accuracy: 0.9880\n",
"Epoch 48/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0127 - accuracy: 1.00 - ETA: 0s - loss: 0.0370 - accuracy: 0.98 - ETA: 0s - loss: 0.0380 - accuracy: 0.98 - ETA: 0s - loss: 0.0364 - accuracy: 0.98 - ETA: 0s - loss: 0.0375 - accuracy: 0.98 - ETA: 0s - loss: 0.0429 - accuracy: 0.98 - ETA: 0s - loss: 0.0431 - accuracy: 0.98 - 2s 93us/step - loss: 0.0416 - accuracy: 0.9862 - val_loss: 0.0383 - val_accuracy: 0.9888\n",
"Epoch 49/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0018 - accuracy: 1.00 - ETA: 0s - loss: 0.0304 - accuracy: 0.98 - ETA: 0s - loss: 0.0320 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.98 - ETA: 0s - loss: 0.0392 - accuracy: 0.98 - ETA: 0s - loss: 0.0393 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - 2s 97us/step - loss: 0.0398 - accuracy: 0.9867 - val_loss: 0.0483 - val_accuracy: 0.9870\n",
"Epoch 50/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0272 - accuracy: 1.00 - ETA: 0s - loss: 0.0242 - accuracy: 0.99 - ETA: 0s - loss: 0.0285 - accuracy: 0.99 - ETA: 0s - loss: 0.0330 - accuracy: 0.98 - ETA: 0s - loss: 0.0439 - accuracy: 0.98 - ETA: 0s - loss: 0.0465 - accuracy: 0.98 - ETA: 0s - loss: 0.0441 - accuracy: 0.98 - 2s 96us/step - loss: 0.0455 - accuracy: 0.9846 - val_loss: 0.1553 - val_accuracy: 0.9496\n",
"Epoch 51/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.2229 - accuracy: 0.96 - ETA: 0s - loss: 0.0702 - accuracy: 0.97 - ETA: 0s - loss: 0.0525 - accuracy: 0.98 - ETA: 0s - loss: 0.0484 - accuracy: 0.98 - ETA: 0s - loss: 0.0464 - accuracy: 0.98 - ETA: 0s - loss: 0.0452 - accuracy: 0.98 - ETA: 0s - loss: 0.0438 - accuracy: 0.98 - 2s 98us/step - loss: 0.0427 - accuracy: 0.9851 - val_loss: 0.0405 - val_accuracy: 0.9872\n",
"Epoch 52/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0044 - accuracy: 1.00 - ETA: 0s - loss: 0.0345 - accuracy: 0.98 - ETA: 0s - loss: 0.0323 - accuracy: 0.98 - ETA: 0s - loss: 0.0305 - accuracy: 0.98 - ETA: 0s - loss: 0.0303 - accuracy: 0.98 - ETA: 0s - loss: 0.0313 - accuracy: 0.98 - ETA: 0s - loss: 0.0353 - accuracy: 0.98 - 2s 101us/step - loss: 0.0372 - accuracy: 0.9867 - val_loss: 0.0507 - val_accuracy: 0.9849\n",
"Epoch 53/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0992 - accuracy: 0.96 - ETA: 0s - loss: 0.0372 - accuracy: 0.98 - ETA: 0s - loss: 0.0325 - accuracy: 0.98 - ETA: 0s - loss: 0.0338 - accuracy: 0.98 - ETA: 0s - loss: 0.0350 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0330 - accuracy: 0.98 - 2s 96us/step - loss: 0.0341 - accuracy: 0.9890 - val_loss: 0.0706 - val_accuracy: 0.9775\n",
"Epoch 54/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"18068/18068 [==============================] - ETA: 0s - loss: 0.0255 - accuracy: 1.00 - ETA: 0s - loss: 0.0471 - accuracy: 0.98 - ETA: 0s - loss: 0.0442 - accuracy: 0.98 - ETA: 0s - loss: 0.0541 - accuracy: 0.98 - ETA: 0s - loss: 0.0506 - accuracy: 0.98 - ETA: 0s - loss: 0.0458 - accuracy: 0.98 - ETA: 0s - loss: 0.0431 - accuracy: 0.98 - 2s 98us/step - loss: 0.0431 - accuracy: 0.9851 - val_loss: 0.0332 - val_accuracy: 0.9903\n",
"Epoch 55/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0017 - accuracy: 1.00 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0270 - accuracy: 0.99 - ETA: 0s - loss: 0.0321 - accuracy: 0.98 - ETA: 0s - loss: 0.0424 - accuracy: 0.98 - ETA: 0s - loss: 0.0421 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - 2s 98us/step - loss: 0.0380 - accuracy: 0.9869 - val_loss: 0.0261 - val_accuracy: 0.9926\n",
"Epoch 56/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0202 - accuracy: 1.00 - ETA: 0s - loss: 0.0348 - accuracy: 0.98 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0280 - accuracy: 0.99 - ETA: 0s - loss: 0.0316 - accuracy: 0.99 - ETA: 0s - loss: 0.0324 - accuracy: 0.98 - ETA: 0s - loss: 0.0369 - accuracy: 0.98 - 2s 100us/step - loss: 0.0365 - accuracy: 0.9878 - val_loss: 0.0421 - val_accuracy: 0.9857\n",
"Epoch 57/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0309 - accuracy: 1.00 - ETA: 0s - loss: 0.0217 - accuracy: 0.99 - ETA: 0s - loss: 0.0268 - accuracy: 0.99 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - ETA: 0s - loss: 0.0272 - accuracy: 0.99 - ETA: 0s - loss: 0.0348 - accuracy: 0.98 - 2s 100us/step - loss: 0.0378 - accuracy: 0.9872 - val_loss: 0.0563 - val_accuracy: 0.9826\n",
"Epoch 58/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0666 - accuracy: 0.96 - ETA: 0s - loss: 0.0312 - accuracy: 0.99 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0267 - accuracy: 0.99 - ETA: 0s - loss: 0.0328 - accuracy: 0.98 - ETA: 0s - loss: 0.0355 - accuracy: 0.98 - ETA: 0s - loss: 0.0363 - accuracy: 0.98 - 2s 100us/step - loss: 0.0372 - accuracy: 0.9870 - val_loss: 0.1505 - val_accuracy: 0.9517\n",
"Epoch 59/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1813 - accuracy: 0.96 - ETA: 0s - loss: 0.0483 - accuracy: 0.98 - ETA: 0s - loss: 0.0370 - accuracy: 0.98 - ETA: 0s - loss: 0.0355 - accuracy: 0.98 - ETA: 0s - loss: 0.0349 - accuracy: 0.98 - ETA: 0s - loss: 0.0339 - accuracy: 0.98 - ETA: 0s - loss: 0.0337 - accuracy: 0.98 - 2s 102us/step - loss: 0.0329 - accuracy: 0.9886 - val_loss: 0.0502 - val_accuracy: 0.9844\n",
"Epoch 60/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0191 - accuracy: 1.00 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - ETA: 0s - loss: 0.0333 - accuracy: 0.98 - ETA: 0s - loss: 0.0374 - accuracy: 0.98 - ETA: 0s - loss: 0.0342 - accuracy: 0.98 - ETA: 0s - loss: 0.0349 - accuracy: 0.98 - ETA: 0s - loss: 0.0343 - accuracy: 0.98 - 2s 97us/step - loss: 0.0381 - accuracy: 0.9862 - val_loss: 0.0817 - val_accuracy: 0.9748\n",
"Epoch 61/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1110 - accuracy: 0.96 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0375 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0304 - accuracy: 0.98 - ETA: 0s - loss: 0.0320 - accuracy: 0.98 - 2s 94us/step - loss: 0.0326 - accuracy: 0.9883 - val_loss: 0.0324 - val_accuracy: 0.9908\n",
"Epoch 62/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0520 - accuracy: 0.96 - ETA: 0s - loss: 0.0428 - accuracy: 0.98 - ETA: 0s - loss: 0.0428 - accuracy: 0.98 - ETA: 0s - loss: 0.0391 - accuracy: 0.98 - ETA: 0s - loss: 0.0362 - accuracy: 0.98 - ETA: 0s - loss: 0.0350 - accuracy: 0.98 - ETA: 0s - loss: 0.0353 - accuracy: 0.98 - 2s 94us/step - loss: 0.0344 - accuracy: 0.9882 - val_loss: 0.0375 - val_accuracy: 0.9880\n",
"Epoch 63/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0866 - accuracy: 0.96 - ETA: 0s - loss: 0.0334 - accuracy: 0.98 - ETA: 0s - loss: 0.0292 - accuracy: 0.98 - ETA: 0s - loss: 0.0268 - accuracy: 0.99 - ETA: 0s - loss: 0.0276 - accuracy: 0.99 - ETA: 0s - loss: 0.0418 - accuracy: 0.98 - ETA: 0s - loss: 0.0397 - accuracy: 0.98 - 2s 97us/step - loss: 0.0384 - accuracy: 0.9880 - val_loss: 0.0308 - val_accuracy: 0.9910\n",
"Epoch 64/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0635 - accuracy: 0.96 - ETA: 0s - loss: 0.0280 - accuracy: 0.99 - ETA: 0s - loss: 0.0262 - accuracy: 0.99 - ETA: 0s - loss: 0.0331 - accuracy: 0.98 - ETA: 0s - loss: 0.0329 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0309 - accuracy: 0.99 - 2s 98us/step - loss: 0.0302 - accuracy: 0.9904 - val_loss: 0.0318 - val_accuracy: 0.9913\n",
"Epoch 65/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0202 - accuracy: 1.00 - ETA: 0s - loss: 0.0188 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0267 - accuracy: 0.99 - ETA: 0s - loss: 0.0278 - accuracy: 0.99 - ETA: 0s - loss: 0.0303 - accuracy: 0.99 - 2s 98us/step - loss: 0.0297 - accuracy: 0.9904 - val_loss: 0.0412 - val_accuracy: 0.9882\n",
"Epoch 66/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0062 - accuracy: 1.00 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0294 - accuracy: 0.98 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0311 - accuracy: 0.99 - ETA: 0s - loss: 0.0308 - accuracy: 0.98 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - 2s 99us/step - loss: 0.0329 - accuracy: 0.9893 - val_loss: 0.0662 - val_accuracy: 0.9754\n",
"Epoch 67/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0316 - accuracy: 0.96 - ETA: 0s - loss: 0.0315 - accuracy: 0.98 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0263 - accuracy: 0.99 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - 2s 95us/step - loss: 0.0269 - accuracy: 0.9915 - val_loss: 0.0258 - val_accuracy: 0.9924\n",
"Epoch 68/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0184 - accuracy: 1.00 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0245 - accuracy: 0.99 - ETA: 0s - loss: 0.0271 - accuracy: 0.99 - ETA: 0s - loss: 0.0286 - accuracy: 0.99 - ETA: 0s - loss: 0.0297 - accuracy: 0.98 - ETA: 0s - loss: 0.0333 - accuracy: 0.98 - 2s 97us/step - loss: 0.0330 - accuracy: 0.9892 - val_loss: 0.0395 - val_accuracy: 0.9862\n",
"Epoch 69/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0023 - accuracy: 1.00 - ETA: 0s - loss: 0.0443 - accuracy: 0.98 - ETA: 0s - loss: 0.0333 - accuracy: 0.98 - ETA: 0s - loss: 0.0322 - accuracy: 0.98 - ETA: 0s - loss: 0.0344 - accuracy: 0.98 - ETA: 0s - loss: 0.0348 - accuracy: 0.98 - ETA: 0s - loss: 0.0343 - accuracy: 0.98 - 2s 101us/step - loss: 0.0318 - accuracy: 0.9893 - val_loss: 0.0282 - val_accuracy: 0.9916\n",
"Epoch 70/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0045 - accuracy: 1.00 - ETA: 0s - loss: 0.0306 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.98 - ETA: 0s - loss: 0.0355 - accuracy: 0.98 - ETA: 0s - loss: 0.0358 - accuracy: 0.98 - ETA: 0s - loss: 0.0386 - accuracy: 0.98 - ETA: 0s - loss: 0.0364 - accuracy: 0.98 - 2s 101us/step - loss: 0.0350 - accuracy: 0.9886 - val_loss: 0.0411 - val_accuracy: 0.9877\n",
"Epoch 71/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1661 - accuracy: 0.96 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0219 - accuracy: 0.99 - ETA: 0s - loss: 0.0219 - accuracy: 0.99 - ETA: 0s - loss: 0.0232 - accuracy: 0.99 - ETA: 0s - loss: 0.0214 - accuracy: 0.99 - ETA: 0s - loss: 0.0215 - accuracy: 0.99 - 2s 97us/step - loss: 0.0235 - accuracy: 0.9923 - val_loss: 0.0373 - val_accuracy: 0.9881\n",
"Epoch 72/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"18068/18068 [==============================] - ETA: 0s - loss: 0.0066 - accuracy: 1.00 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0317 - accuracy: 0.98 - ETA: 0s - loss: 0.0312 - accuracy: 0.98 - ETA: 0s - loss: 0.0347 - accuracy: 0.98 - ETA: 0s - loss: 0.0337 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.98 - 2s 95us/step - loss: 0.0316 - accuracy: 0.9888 - val_loss: 0.0365 - val_accuracy: 0.9889\n",
"Epoch 73/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0025 - accuracy: 1.00 - ETA: 0s - loss: 0.0276 - accuracy: 0.98 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0272 - accuracy: 0.99 - ETA: 0s - loss: 0.0278 - accuracy: 0.99 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0274 - accuracy: 0.99 - 2s 97us/step - loss: 0.0304 - accuracy: 0.9897 - val_loss: 0.1574 - val_accuracy: 0.9417\n",
"Epoch 74/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1581 - accuracy: 0.93 - ETA: 0s - loss: 0.0238 - accuracy: 0.99 - ETA: 0s - loss: 0.0226 - accuracy: 0.99 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0273 - accuracy: 0.99 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - 2s 97us/step - loss: 0.0281 - accuracy: 0.9904 - val_loss: 0.0361 - val_accuracy: 0.9866\n",
"Epoch 75/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1132 - accuracy: 0.96 - ETA: 0s - loss: 0.0242 - accuracy: 0.99 - ETA: 0s - loss: 0.0260 - accuracy: 0.99 - ETA: 0s - loss: 0.0269 - accuracy: 0.99 - ETA: 0s - loss: 0.0292 - accuracy: 0.99 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - ETA: 0s - loss: 0.0274 - accuracy: 0.99 - 2s 98us/step - loss: 0.0262 - accuracy: 0.9918 - val_loss: 0.0262 - val_accuracy: 0.9920\n",
"Epoch 76/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0058 - accuracy: 1.00 - ETA: 0s - loss: 0.0158 - accuracy: 0.99 - ETA: 0s - loss: 0.0210 - accuracy: 0.99 - ETA: 0s - loss: 0.0223 - accuracy: 0.99 - ETA: 0s - loss: 0.0250 - accuracy: 0.99 - ETA: 0s - loss: 0.0251 - accuracy: 0.99 - ETA: 0s - loss: 0.0277 - accuracy: 0.99 - 2s 94us/step - loss: 0.0280 - accuracy: 0.9905 - val_loss: 0.0269 - val_accuracy: 0.9918\n",
"Epoch 77/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0045 - accuracy: 1.00 - ETA: 0s - loss: 0.0264 - accuracy: 0.99 - ETA: 0s - loss: 0.0226 - accuracy: 0.99 - ETA: 0s - loss: 0.0178 - accuracy: 0.99 - ETA: 0s - loss: 0.0208 - accuracy: 0.99 - ETA: 0s - loss: 0.0248 - accuracy: 0.99 - ETA: 0s - loss: 0.0244 - accuracy: 0.99 - 2s 98us/step - loss: 0.0261 - accuracy: 0.9912 - val_loss: 0.0540 - val_accuracy: 0.9809\n",
"Epoch 78/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0312 - accuracy: 1.00 - ETA: 0s - loss: 0.0224 - accuracy: 0.99 - ETA: 0s - loss: 0.0276 - accuracy: 0.99 - ETA: 0s - loss: 0.0277 - accuracy: 0.99 - ETA: 0s - loss: 0.0288 - accuracy: 0.99 - ETA: 0s - loss: 0.0275 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - 2s 97us/step - loss: 0.0268 - accuracy: 0.9911 - val_loss: 0.0334 - val_accuracy: 0.9908\n",
"Epoch 79/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0066 - accuracy: 1.00 - ETA: 0s - loss: 0.0261 - accuracy: 0.98 - ETA: 0s - loss: 0.0284 - accuracy: 0.98 - ETA: 0s - loss: 0.0250 - accuracy: 0.99 - ETA: 0s - loss: 0.0323 - accuracy: 0.98 - ETA: 0s - loss: 0.0301 - accuracy: 0.99 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - 2s 94us/step - loss: 0.0277 - accuracy: 0.9910 - val_loss: 0.0764 - val_accuracy: 0.9735\n",
"Epoch 80/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0497 - accuracy: 0.96 - ETA: 0s - loss: 0.0316 - accuracy: 0.98 - ETA: 0s - loss: 0.0396 - accuracy: 0.98 - ETA: 0s - loss: 0.0420 - accuracy: 0.98 - ETA: 0s - loss: 0.0374 - accuracy: 0.98 - ETA: 0s - loss: 0.0370 - accuracy: 0.98 - ETA: 0s - loss: 0.0343 - accuracy: 0.98 - 2s 93us/step - loss: 0.0327 - accuracy: 0.9884 - val_loss: 0.0308 - val_accuracy: 0.9921\n",
"Epoch 81/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0094 - accuracy: 1.00 - ETA: 0s - loss: 0.0156 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0249 - accuracy: 0.99 - ETA: 0s - loss: 0.0268 - accuracy: 0.99 - ETA: 0s - loss: 0.0242 - accuracy: 0.99 - ETA: 0s - loss: 0.0229 - accuracy: 0.99 - 2s 98us/step - loss: 0.0220 - accuracy: 0.9931 - val_loss: 0.0310 - val_accuracy: 0.9896\n",
"Epoch 82/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 5.9038e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0228 - accuracy: 0.9914 - ETA: 0s - loss: 0.0214 - accuracy: 0.99 - ETA: 0s - loss: 0.0204 - accuracy: 0.99 - ETA: 0s - loss: 0.0228 - accuracy: 0.99 - ETA: 0s - loss: 0.0257 - accuracy: 0.99 - ETA: 0s - loss: 0.0246 - accuracy: 0.99 - 2s 94us/step - loss: 0.0246 - accuracy: 0.9911 - val_loss: 0.0409 - val_accuracy: 0.9863\n",
"Epoch 83/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 6.2001e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0330 - accuracy: 0.9878 - ETA: 0s - loss: 0.0410 - accuracy: 0.98 - ETA: 0s - loss: 0.0346 - accuracy: 0.98 - ETA: 0s - loss: 0.0317 - accuracy: 0.98 - ETA: 0s - loss: 0.0321 - accuracy: 0.98 - ETA: 0s - loss: 0.0305 - accuracy: 0.98 - 2s 97us/step - loss: 0.0286 - accuracy: 0.9901 - val_loss: 0.0233 - val_accuracy: 0.9931\n",
"Epoch 84/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0143 - accuracy: 1.00 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0233 - accuracy: 0.99 - ETA: 0s - loss: 0.0224 - accuracy: 0.99 - ETA: 0s - loss: 0.0250 - accuracy: 0.99 - ETA: 0s - loss: 0.0323 - accuracy: 0.98 - ETA: 0s - loss: 0.0354 - accuracy: 0.98 - 2s 100us/step - loss: 0.0329 - accuracy: 0.9888 - val_loss: 0.0236 - val_accuracy: 0.9920\n",
"Epoch 85/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.1124 - accuracy: 0.93 - ETA: 0s - loss: 0.0144 - accuracy: 0.99 - ETA: 0s - loss: 0.0157 - accuracy: 0.99 - ETA: 0s - loss: 0.0179 - accuracy: 0.99 - ETA: 0s - loss: 0.0184 - accuracy: 0.99 - ETA: 0s - loss: 0.0179 - accuracy: 0.99 - ETA: 0s - loss: 0.0191 - accuracy: 0.99 - 2s 100us/step - loss: 0.0184 - accuracy: 0.9936 - val_loss: 0.0257 - val_accuracy: 0.9923\n",
"Epoch 86/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.1240 - accuracy: 0.93 - ETA: 0s - loss: 0.0351 - accuracy: 0.98 - ETA: 0s - loss: 0.0290 - accuracy: 0.98 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0213 - accuracy: 0.99 - ETA: 0s - loss: 0.0206 - accuracy: 0.99 - ETA: 0s - loss: 0.0288 - accuracy: 0.99 - 2s 93us/step - loss: 0.0303 - accuracy: 0.9896 - val_loss: 0.0298 - val_accuracy: 0.9899\n",
"Epoch 87/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0170 - accuracy: 1.00 - ETA: 0s - loss: 0.0520 - accuracy: 0.98 - ETA: 0s - loss: 0.0377 - accuracy: 0.98 - ETA: 0s - loss: 0.0303 - accuracy: 0.98 - ETA: 0s - loss: 0.0280 - accuracy: 0.99 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - ETA: 0s - loss: 0.0247 - accuracy: 0.99 - 2s 94us/step - loss: 0.0242 - accuracy: 0.9916 - val_loss: 0.0368 - val_accuracy: 0.9900\n",
"Epoch 88/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0029 - accuracy: 1.00 - ETA: 0s - loss: 0.0234 - accuracy: 0.99 - ETA: 0s - loss: 0.0309 - accuracy: 0.99 - ETA: 0s - loss: 0.0336 - accuracy: 0.98 - ETA: 0s - loss: 0.0293 - accuracy: 0.99 - ETA: 0s - loss: 0.0262 - accuracy: 0.99 - ETA: 0s - loss: 0.0245 - accuracy: 0.99 - 2s 98us/step - loss: 0.0235 - accuracy: 0.9919 - val_loss: 0.0353 - val_accuracy: 0.9898\n",
"Epoch 89/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0204 - accuracy: 1.00 - ETA: 0s - loss: 0.0228 - accuracy: 0.99 - ETA: 0s - loss: 0.0310 - accuracy: 0.98 - ETA: 0s - loss: 0.0281 - accuracy: 0.99 - ETA: 0s - loss: 0.0277 - accuracy: 0.99 - ETA: 0s - loss: 0.0273 - accuracy: 0.99 - ETA: 0s - loss: 0.0280 - accuracy: 0.98 - 2s 96us/step - loss: 0.0293 - accuracy: 0.9896 - val_loss: 0.0315 - val_accuracy: 0.9903\n",
"Epoch 90/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"18068/18068 [==============================] - ETA: 1s - loss: 0.0029 - accuracy: 1.00 - ETA: 0s - loss: 0.0310 - accuracy: 0.99 - ETA: 0s - loss: 0.0258 - accuracy: 0.99 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0220 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0259 - accuracy: 0.99 - 2s 92us/step - loss: 0.0248 - accuracy: 0.9925 - val_loss: 0.0330 - val_accuracy: 0.9895\n",
"Epoch 91/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0244 - accuracy: 1.00 - ETA: 0s - loss: 0.0134 - accuracy: 0.99 - ETA: 0s - loss: 0.0164 - accuracy: 0.99 - ETA: 0s - loss: 0.0225 - accuracy: 0.99 - ETA: 0s - loss: 0.0231 - accuracy: 0.99 - ETA: 0s - loss: 0.0249 - accuracy: 0.99 - ETA: 0s - loss: 0.0236 - accuracy: 0.99 - 2s 98us/step - loss: 0.0228 - accuracy: 0.9924 - val_loss: 0.0276 - val_accuracy: 0.9914\n",
"Epoch 92/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0057 - accuracy: 1.00 - ETA: 0s - loss: 0.0239 - accuracy: 0.99 - ETA: 0s - loss: 0.0238 - accuracy: 0.99 - ETA: 0s - loss: 0.0214 - accuracy: 0.99 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0205 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - 2s 99us/step - loss: 0.0213 - accuracy: 0.9927 - val_loss: 0.0323 - val_accuracy: 0.9899\n",
"Epoch 93/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 2.3066e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0247 - accuracy: 0.9918 - ETA: 0s - loss: 0.0236 - accuracy: 0.99 - ETA: 0s - loss: 0.0215 - accuracy: 0.99 - ETA: 0s - loss: 0.0220 - accuracy: 0.99 - ETA: 0s - loss: 0.0200 - accuracy: 0.99 - ETA: 0s - loss: 0.0198 - accuracy: 0.99 - 2s 95us/step - loss: 0.0250 - accuracy: 0.9923 - val_loss: 0.1360 - val_accuracy: 0.9603\n",
"Epoch 94/100\n",
"18068/18068 [==============================] - ETA: 0s - loss: 0.0579 - accuracy: 0.96 - ETA: 0s - loss: 0.0235 - accuracy: 0.99 - ETA: 0s - loss: 0.0210 - accuracy: 0.99 - ETA: 0s - loss: 0.0253 - accuracy: 0.99 - ETA: 0s - loss: 0.0286 - accuracy: 0.99 - ETA: 0s - loss: 0.0270 - accuracy: 0.99 - ETA: 0s - loss: 0.0251 - accuracy: 0.99 - 2s 97us/step - loss: 0.0238 - accuracy: 0.9920 - val_loss: 0.0203 - val_accuracy: 0.9939\n",
"Epoch 95/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0031 - accuracy: 1.00 - ETA: 0s - loss: 0.0116 - accuracy: 0.99 - ETA: 0s - loss: 0.0159 - accuracy: 0.99 - ETA: 0s - loss: 0.0166 - accuracy: 0.99 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0207 - accuracy: 0.99 - ETA: 0s - loss: 0.0192 - accuracy: 0.99 - 2s 96us/step - loss: 0.0189 - accuracy: 0.9934 - val_loss: 0.0234 - val_accuracy: 0.9933\n",
"Epoch 96/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 1.9295e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0450 - accuracy: 0.9853 - ETA: 0s - loss: 0.0335 - accuracy: 0.98 - ETA: 0s - loss: 0.0266 - accuracy: 0.99 - ETA: 0s - loss: 0.0320 - accuracy: 0.98 - ETA: 0s - loss: 0.0289 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - 2s 95us/step - loss: 0.0259 - accuracy: 0.9911 - val_loss: 0.0221 - val_accuracy: 0.9936\n",
"Epoch 97/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0011 - accuracy: 1.00 - ETA: 0s - loss: 0.0193 - accuracy: 0.99 - ETA: 0s - loss: 0.0295 - accuracy: 0.98 - ETA: 0s - loss: 0.0255 - accuracy: 0.99 - ETA: 0s - loss: 0.0218 - accuracy: 0.99 - ETA: 0s - loss: 0.0220 - accuracy: 0.99 - ETA: 0s - loss: 0.0205 - accuracy: 0.99 - 2s 96us/step - loss: 0.0215 - accuracy: 0.9926 - val_loss: 0.0838 - val_accuracy: 0.9744\n",
"Epoch 98/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 6.6428e-04 - accuracy: 1.00 - ETA: 0s - loss: 0.0324 - accuracy: 0.9899 - ETA: 0s - loss: 0.0235 - accuracy: 0.99 - ETA: 0s - loss: 0.0230 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0212 - accuracy: 0.99 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - 2s 94us/step - loss: 0.0280 - accuracy: 0.9908 - val_loss: 0.0490 - val_accuracy: 0.9835\n",
"Epoch 99/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0147 - accuracy: 1.00 - ETA: 0s - loss: 0.0185 - accuracy: 0.99 - ETA: 0s - loss: 0.0153 - accuracy: 0.99 - ETA: 0s - loss: 0.0169 - accuracy: 0.99 - ETA: 0s - loss: 0.0163 - accuracy: 0.99 - ETA: 0s - loss: 0.0156 - accuracy: 0.99 - ETA: 0s - loss: 0.0170 - accuracy: 0.99 - 2s 97us/step - loss: 0.0165 - accuracy: 0.9946 - val_loss: 0.0241 - val_accuracy: 0.9921\n",
"Epoch 100/100\n",
"18068/18068 [==============================] - ETA: 1s - loss: 0.0017 - accuracy: 1.00 - ETA: 0s - loss: 0.0244 - accuracy: 0.99 - ETA: 0s - loss: 0.0232 - accuracy: 0.99 - ETA: 0s - loss: 0.0245 - accuracy: 0.99 - ETA: 0s - loss: 0.0256 - accuracy: 0.99 - ETA: 0s - loss: 0.0261 - accuracy: 0.99 - ETA: 0s - loss: 0.0238 - accuracy: 0.99 - ETA: 0s - loss: 0.0228 - accuracy: 0.99 - 2s 101us/step - loss: 0.0228 - accuracy: 0.9919 - val_loss: 0.0591 - val_accuracy: 0.9803\n",
"Initializing network.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"100%|████████████████████████████████████████████████████████████████████████████████████████| 100/100 [00:00<?, ?it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Round 1.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s]\n",
" 1%|▊ | 1/100 [00:01<02:52, 1.74s/it]\n",
" 2%|█▋ | 2/100 [00:03<02:51, 1.75s/it]\n",
" 3%|██▍ | 3/100 [00:05<02:49, 1.75s/it]\n",
" 4%|███▎ | 4/100 [00:07<02:48, 1.75s/it]\n",
" 5%|████ | 5/100 [00:08<02:46, 1.75s/it]\n",
" 6%|████▉ | 6/100 [00:10<02:43, 1.74s/it]\n",
" 7%|█████▋ | 7/100 [00:12<02:42, 1.74s/it]\n",
" 8%|██████▌ | 8/100 [00:13<02:39, 1.74s/it]\n",
" 9%|███████▍ | 9/100 [00:15<02:36, 1.72s/it]\n",
" 10%|████████ | 10/100 [00:17<02:33, 1.70s/it]\n",
" 11%|████████▉ | 11/100 [00:18<02:30, 1.69s/it]\n",
" 12%|█████████▋ | 12/100 [00:20<02:28, 1.69s/it]\n",
" 13%|██████████▌ | 13/100 [00:22<02:25, 1.68s/it]\n",
" 14%|███████████▎ | 14/100 [00:23<02:24, 1.68s/it]\n",
" 15%|████████████▏ | 15/100 [00:25<02:22, 1.68s/it]\n",
" 16%|████████████▉ | 16/100 [00:27<02:20, 1.68s/it]\n",
" 17%|█████████████▊ | 17/100 [00:29<02:21, 1.70s/it]\n",
" 18%|██████████████▌ | 18/100 [00:30<02:18, 1.69s/it]\n",
" 19%|███████████████▍ | 19/100 [00:32<02:16, 1.68s/it]\n",
" 20%|████████████████▏ | 20/100 [00:34<02:14, 1.68s/it]\n",
" 21%|█████████████████ | 21/100 [00:35<02:12, 1.68s/it]\n",
" 22%|█████████████████▊ | 22/100 [00:37<02:12, 1.69s/it]\n",
" 23%|██████████████████▋ | 23/100 [00:39<02:09, 1.69s/it]\n",
" 24%|███████████████████▍ | 24/100 [00:40<02:07, 1.68s/it]\n",
" 25%|████████████████████▎ | 25/100 [00:42<02:05, 1.68s/it]\n",
" 26%|█████████████████████ | 26/100 [00:44<02:04, 1.68s/it]\n",
" 27%|█████████████████████▊ | 27/100 [00:45<02:02, 1.68s/it]\n",
" 28%|██████████████████████▋ | 28/100 [00:47<02:00, 1.68s/it]\n",
" 29%|███████████████████████▍ | 29/100 [00:49<01:59, 1.68s/it]\n",
" 30%|████████████████████████▎ | 30/100 [00:50<01:57, 1.68s/it]\n",
" 31%|█████████████████████████ | 31/100 [00:52<01:55, 1.68s/it]\n",
" 32%|█████████████████████████▉ | 32/100 [00:54<01:53, 1.67s/it]\n",
" 33%|██████████████████████████▋ | 33/100 [00:55<01:51, 1.67s/it]\n",
" 34%|███████████████████████████▌ | 34/100 [00:57<01:50, 1.67s/it]\n",
" 35%|████████████████████████████▎ | 35/100 [00:59<01:48, 1.67s/it]\n",
" 36%|█████████████████████████████▏ | 36/100 [01:00<01:46, 1.67s/it]\n",
" 37%|█████████████████████████████▉ | 37/100 [01:02<01:45, 1.67s/it]\n",
" 38%|██████████████████████████████▊ | 38/100 [01:04<01:44, 1.68s/it]\n",
" 39%|███████████████████████████████▌ | 39/100 [01:05<01:42, 1.68s/it]\n",
" 40%|████████████████████████████████▍ | 40/100 [01:07<01:40, 1.67s/it]\n",
" 41%|█████████████████████████████████▏ | 41/100 [01:09<01:38, 1.67s/it]\n",
" 42%|██████████████████████████████████ | 42/100 [01:10<01:36, 1.67s/it]\n",
" 43%|██████████████████████████████████▊ | 43/100 [01:12<01:35, 1.68s/it]\n",
" 44%|███████████████████████████████████▋ | 44/100 [01:14<01:33, 1.67s/it]\n",
" 45%|████████████████████████████████████▍ | 45/100 [01:15<01:32, 1.68s/it]\n",
" 46%|█████████████████████████████████████▎ | 46/100 [01:17<01:30, 1.67s/it]\n",
" 47%|██████████████████████████████████████ | 47/100 [01:19<01:28, 1.68s/it]\n",
" 48%|██████████████████████████████████████▉ | 48/100 [01:20<01:27, 1.67s/it]\n",
" 49%|███████████████████████████████████████▋ | 49/100 [01:22<01:25, 1.67s/it]\n",
" 50%|████████████████████████████████████████▌ | 50/100 [01:24<01:23, 1.67s/it]\n",
" 51%|█████████████████████████████████████████▎ | 51/100 [01:26<01:21, 1.67s/it]\n",
" 52%|██████████████████████████████████████████ | 52/100 [01:27<01:20, 1.67s/it]\n",
" 53%|██████████████████████████████████████████▉ | 53/100 [01:29<01:18, 1.68s/it]\n",
" 54%|███████████████████████████████████████████▋ | 54/100 [01:31<01:17, 1.68s/it]\n",
" 55%|████████████████████████████████████████████▌ | 55/100 [01:32<01:15, 1.67s/it]\n",
" 56%|█████████████████████████████████████████████▎ | 56/100 [01:34<01:13, 1.67s/it]\n",
" 57%|██████████████████████████████████████████████▏ | 57/100 [01:36<01:11, 1.67s/it]\n",
" 58%|██████████████████████████████████████████████▉ | 58/100 [01:37<01:10, 1.67s/it]\n",
" 59%|███████████████████████████████████████████████▊ | 59/100 [01:39<01:08, 1.68s/it]\n",
" 60%|████████████████████████████████████████████████▌ | 60/100 [01:41<01:07, 1.68s/it]\n",
" 61%|█████████████████████████████████████████████████▍ | 61/100 [01:42<01:05, 1.68s/it]\n",
" 62%|██████████████████████████████████████████████████▏ | 62/100 [01:44<01:03, 1.67s/it]\n",
" 63%|███████████████████████████████████████████████████ | 63/100 [01:46<01:02, 1.68s/it]\n",
" 64%|███████████████████████████████████████████████████▊ | 64/100 [01:47<01:00, 1.67s/it]\n",
" 65%|████████████████████████████████████████████████████▋ | 65/100 [01:49<00:58, 1.67s/it]\n",
" 66%|█████████████████████████████████████████████████████▍ | 66/100 [01:51<00:56, 1.67s/it]\n",
" 67%|██████████████████████████████████████████████████████▎ | 67/100 [01:52<00:55, 1.69s/it]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 68%|███████████████████████████████████████████████████████ | 68/100 [01:54<00:54, 1.69s/it]\n",
" 69%|███████████████████████████████████████████████████████▉ | 69/100 [01:56<00:53, 1.72s/it]\n",
" 70%|████████████████████████████████████████████████████████▋ | 70/100 [01:58<00:51, 1.72s/it]\n",
" 71%|█████████████████████████████████████████████████████████▌ | 71/100 [01:59<00:49, 1.72s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 12.0s finished\n",
"[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 236\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 72%|██████████████████████████████████████████████████████████▎ | 72/100 [02:26<04:21, 9.34s/it]\n",
" 73%|███████████████████████████████████████████████████████████▏ | 73/100 [02:28<03:10, 7.04s/it]\n",
" 74%|███████████████████████████████████████████████████████████▉ | 74/100 [02:30<02:21, 5.43s/it]\n",
" 75%|████████████████████████████████████████████████████████████▊ | 75/100 [02:31<01:47, 4.31s/it]\n",
" 76%|█████████████████████████████████████████████████████████████▌ | 76/100 [02:33<01:24, 3.51s/it]\n",
" 77%|██████████████████████████████████████████████████████████████▎ | 77/100 [02:35<01:08, 2.96s/it]\n",
" 78%|███████████████████████████████████████████████████████████████▏ | 78/100 [02:36<00:56, 2.58s/it]\n",
" 79%|███████████████████████████████████████████████████████████████▉ | 79/100 [02:38<00:48, 2.30s/it]\n",
" 80%|████████████████████████████████████████████████████████████████▊ | 80/100 [02:40<00:42, 2.11s/it]\n",
" 81%|█████████████████████████████████████████████████████████████████▌ | 81/100 [02:41<00:37, 1.98s/it]\n",
" 82%|██████████████████████████████████████████████████████████████████▍ | 82/100 [02:43<00:33, 1.89s/it]\n",
" 83%|███████████████████████████████████████████████████████████████████▏ | 83/100 [02:45<00:30, 1.82s/it]\n",
" 84%|████████████████████████████████████████████████████████████████████ | 84/100 [02:46<00:28, 1.77s/it]\n",
" 85%|████████████████████████████████████████████████████████████████████▊ | 85/100 [02:48<00:26, 1.74s/it]\n",
" 86%|█████████████████████████████████████████████████████████████████████▋ | 86/100 [02:50<00:24, 1.72s/it]\n",
" 87%|██████████████████████████████████████████████████████████████████████▍ | 87/100 [02:51<00:22, 1.70s/it]\n",
" 88%|███████████████████████████████████████████████████████████████████████▎ | 88/100 [02:53<00:20, 1.70s/it]\n",
" 89%|████████████████████████████████████████████████████████████████████████ | 89/100 [02:55<00:18, 1.69s/it]\n",
" 90%|████████████████████████████████████████████████████████████████████████▉ | 90/100 [02:56<00:16, 1.69s/it]\n",
" 91%|█████████████████████████████████████████████████████████████████████████▋ | 91/100 [02:58<00:15, 1.68s/it]\n",
" 92%|██████████████████████████████████████████████████████████████████████████▌ | 92/100 [03:00<00:13, 1.68s/it]\n",
" 93%|███████████████████████████████████████████████████████████████████████████▎ | 93/100 [03:01<00:11, 1.68s/it]\n",
" 94%|████████████████████████████████████████████████████████████████████████████▏ | 94/100 [03:03<00:10, 1.67s/it]\n",
" 95%|████████████████████████████████████████████████████████████████████████████▉ | 95/100 [03:05<00:08, 1.67s/it]\n",
" 96%|█████████████████████████████████████████████████████████████████████████████▊ | 96/100 [03:06<00:06, 1.67s/it]\n",
" 97%|██████████████████████████████████████████████████████████████████████████████▌ | 97/100 [03:08<00:04, 1.67s/it]\n",
" 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [03:10<00:03, 1.66s/it]\n",
" 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [03:11<00:01, 1.67s/it]\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:13<00:00, 1.94s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 0.19349391767446147; global model accuracy: 0.9243025779724121\n",
"Round 2.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s]\n",
" 1%|▊ | 1/100 [00:01<02:43, 1.66s/it]\n",
" 2%|█▋ | 2/100 [00:03<02:42, 1.66s/it]\n",
" 3%|██▍ | 3/100 [00:04<02:40, 1.66s/it]\n",
" 4%|███▎ | 4/100 [00:06<02:39, 1.66s/it]\n",
" 5%|████ | 5/100 [00:08<02:37, 1.66s/it]\n",
" 6%|████▉ | 6/100 [00:09<02:36, 1.66s/it]\n",
" 7%|█████▋ | 7/100 [00:11<02:34, 1.66s/it]\n",
" 8%|██████▌ | 8/100 [00:13<02:32, 1.66s/it]\n",
" 9%|███████▍ | 9/100 [00:14<02:30, 1.65s/it]\n",
" 10%|████████ | 10/100 [00:16<02:28, 1.65s/it]\n",
" 11%|████████▉ | 11/100 [00:18<02:27, 1.66s/it]\n",
" 12%|█████████▋ | 12/100 [00:19<02:26, 1.66s/it]\n",
" 13%|██████████▌ | 13/100 [00:21<02:24, 1.66s/it]\n",
" 14%|███████████▎ | 14/100 [00:23<02:22, 1.66s/it]\n",
" 15%|████████████▏ | 15/100 [00:24<02:21, 1.66s/it]\n",
" 16%|████████████▉ | 16/100 [00:26<02:19, 1.66s/it]\n",
" 17%|█████████████▊ | 17/100 [00:28<02:18, 1.67s/it]\n",
" 18%|██████████████▌ | 18/100 [00:29<02:16, 1.67s/it]\n",
" 19%|███████████████▍ | 19/100 [00:31<02:14, 1.66s/it]\n",
" 20%|████████████████▏ | 20/100 [00:33<02:13, 1.66s/it]\n",
" 21%|█████████████████ | 21/100 [00:34<02:11, 1.66s/it]\n",
" 22%|█████████████████▊ | 22/100 [00:36<02:09, 1.66s/it]\n",
" 23%|██████████████████▋ | 23/100 [00:38<02:07, 1.66s/it]\n",
" 24%|███████████████████▍ | 24/100 [00:39<02:05, 1.66s/it]\n",
" 25%|████████████████████▎ | 25/100 [00:41<02:04, 1.66s/it]\n",
" 26%|█████████████████████ | 26/100 [00:43<02:03, 1.66s/it]\n",
" 27%|█████████████████████▊ | 27/100 [00:44<02:01, 1.66s/it]\n",
" 28%|██████████████████████▋ | 28/100 [00:46<01:59, 1.66s/it]\n",
" 29%|███████████████████████▍ | 29/100 [00:48<01:57, 1.66s/it]\n",
" 30%|████████████████████████▎ | 30/100 [00:49<01:55, 1.66s/it]\n",
" 31%|█████████████████████████ | 31/100 [00:51<01:54, 1.66s/it]\n",
" 32%|█████████████████████████▉ | 32/100 [00:53<01:52, 1.65s/it]\n",
" 33%|██████████████████████████▋ | 33/100 [00:54<01:50, 1.65s/it]\n",
" 34%|███████████████████████████▌ | 34/100 [00:56<01:49, 1.65s/it]\n",
" 35%|████████████████████████████▎ | 35/100 [00:58<01:47, 1.65s/it]\n",
" 36%|█████████████████████████████▏ | 36/100 [00:59<01:46, 1.66s/it]\n",
" 37%|█████████████████████████████▉ | 37/100 [01:01<01:44, 1.67s/it]\n",
" 38%|██████████████████████████████▊ | 38/100 [01:03<01:43, 1.67s/it]\n",
" 39%|███████████████████████████████▌ | 39/100 [01:04<01:42, 1.67s/it]\n",
" 40%|████████████████████████████████▍ | 40/100 [01:06<01:40, 1.67s/it]\n",
" 41%|█████████████████████████████████▏ | 41/100 [01:08<01:38, 1.66s/it]\n",
" 42%|██████████████████████████████████ | 42/100 [01:09<01:36, 1.66s/it]\n",
" 43%|██████████████████████████████████▊ | 43/100 [01:11<01:34, 1.65s/it]\n",
" 44%|███████████████████████████████████▋ | 44/100 [01:13<01:32, 1.65s/it]\n",
" 45%|████████████████████████████████████▍ | 45/100 [01:14<01:30, 1.65s/it]\n",
" 46%|█████████████████████████████████████▎ | 46/100 [01:16<01:29, 1.65s/it]\n",
" 47%|██████████████████████████████████████ | 47/100 [01:17<01:27, 1.65s/it]\n",
" 48%|██████████████████████████████████████▉ | 48/100 [01:19<01:26, 1.66s/it]\n",
" 49%|███████████████████████████████████████▋ | 49/100 [01:21<01:24, 1.66s/it]\n",
" 50%|████████████████████████████████████████▌ | 50/100 [01:22<01:23, 1.66s/it]\n",
" 51%|█████████████████████████████████████████▎ | 51/100 [01:24<01:21, 1.66s/it]\n",
" 52%|██████████████████████████████████████████ | 52/100 [01:26<01:19, 1.66s/it]\n",
" 53%|██████████████████████████████████████████▉ | 53/100 [01:27<01:18, 1.67s/it]\n",
" 54%|███████████████████████████████████████████▋ | 54/100 [01:29<01:16, 1.66s/it]\n",
" 55%|████████████████████████████████████████████▌ | 55/100 [01:31<01:14, 1.66s/it]\n",
" 56%|█████████████████████████████████████████████▎ | 56/100 [01:32<01:13, 1.66s/it]\n",
" 57%|██████████████████████████████████████████████▏ | 57/100 [01:34<01:11, 1.66s/it]\n",
" 58%|██████████████████████████████████████████████▉ | 58/100 [01:36<01:09, 1.66s/it]\n",
" 59%|███████████████████████████████████████████████▊ | 59/100 [01:37<01:07, 1.66s/it]\n",
" 60%|████████████████████████████████████████████████▌ | 60/100 [01:39<01:06, 1.66s/it]\n",
" 61%|█████████████████████████████████████████████████▍ | 61/100 [01:41<01:04, 1.66s/it]\n",
" 62%|██████████████████████████████████████████████████▏ | 62/100 [01:42<01:03, 1.66s/it]\n",
" 63%|███████████████████████████████████████████████████ | 63/100 [01:44<01:01, 1.66s/it]\n",
" 64%|███████████████████████████████████████████████████▊ | 64/100 [01:46<00:59, 1.66s/it]\n",
" 65%|████████████████████████████████████████████████████▋ | 65/100 [01:47<00:58, 1.66s/it]\n",
" 66%|█████████████████████████████████████████████████████▍ | 66/100 [01:49<00:56, 1.66s/it]\n",
" 67%|██████████████████████████████████████████████████████▎ | 67/100 [01:51<00:54, 1.66s/it]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 68%|███████████████████████████████████████████████████████ | 68/100 [01:52<00:52, 1.66s/it]\n",
" 69%|███████████████████████████████████████████████████████▉ | 69/100 [01:54<00:51, 1.67s/it]\n",
" 70%|████████████████████████████████████████████████████████▋ | 70/100 [01:56<00:49, 1.67s/it]\n",
" 71%|█████████████████████████████████████████████████████████▌ | 71/100 [01:57<00:48, 1.67s/it]\n",
" 72%|██████████████████████████████████████████████████████████▎ | 72/100 [01:59<00:46, 1.67s/it]\n",
" 73%|███████████████████████████████████████████████████████████▏ | 73/100 [02:01<00:44, 1.66s/it]\n",
" 74%|███████████████████████████████████████████████████████████▉ | 74/100 [02:02<00:43, 1.66s/it]\n",
" 75%|████████████████████████████████████████████████████████████▊ | 75/100 [02:04<00:41, 1.66s/it]\n",
" 76%|█████████████████████████████████████████████████████████████▌ | 76/100 [02:06<00:39, 1.66s/it]\n",
" 77%|██████████████████████████████████████████████████████████████▎ | 77/100 [02:07<00:38, 1.66s/it]\n",
" 78%|███████████████████████████████████████████████████████████████▏ | 78/100 [02:09<00:36, 1.66s/it]\n",
" 79%|███████████████████████████████████████████████████████████████▉ | 79/100 [02:11<00:34, 1.66s/it]\n",
" 80%|████████████████████████████████████████████████████████████████▊ | 80/100 [02:12<00:33, 1.66s/it]\n",
" 81%|█████████████████████████████████████████████████████████████████▌ | 81/100 [02:14<00:31, 1.66s/it]\n",
" 82%|██████████████████████████████████████████████████████████████████▍ | 82/100 [02:16<00:29, 1.66s/it]\n",
" 83%|███████████████████████████████████████████████████████████████████▏ | 83/100 [02:17<00:28, 1.66s/it]\n",
" 84%|████████████████████████████████████████████████████████████████████ | 84/100 [02:19<00:26, 1.66s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 1014\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 85%|████████████████████████████████████████████████████████████████████▊ | 85/100 [03:13<04:22, 17.51s/it]\n",
" 86%|█████████████████████████████████████████████████████████████████████▋ | 86/100 [03:15<02:58, 12.75s/it]\n",
" 87%|██████████████████████████████████████████████████████████████████████▍ | 87/100 [03:17<02:02, 9.43s/it]\n",
" 88%|███████████████████████████████████████████████████████████████████████▎ | 88/100 [03:18<01:25, 7.10s/it]\n",
" 89%|████████████████████████████████████████████████████████████████████████ | 89/100 [03:20<01:00, 5.47s/it]\n",
" 90%|████████████████████████████████████████████████████████████████████████▉ | 90/100 [03:22<00:43, 4.32s/it]\n",
" 91%|█████████████████████████████████████████████████████████████████████████▋ | 91/100 [03:23<00:31, 3.52s/it]\n",
" 92%|██████████████████████████████████████████████████████████████████████████▌ | 92/100 [03:25<00:23, 2.96s/it]\n",
" 93%|███████████████████████████████████████████████████████████████████████████▎ | 93/100 [03:27<00:17, 2.57s/it]\n",
" 94%|████████████████████████████████████████████████████████████████████████████▏ | 94/100 [03:28<00:13, 2.29s/it]\n",
" 95%|████████████████████████████████████████████████████████████████████████████▉ | 95/100 [03:30<00:10, 2.10s/it]\n",
" 96%|█████████████████████████████████████████████████████████████████████████████▊ | 96/100 [03:32<00:07, 1.98s/it]\n",
" 97%|██████████████████████████████████████████████████████████████████████████████▌ | 97/100 [03:33<00:05, 1.88s/it]\n",
" 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [03:35<00:03, 1.81s/it]\n",
" 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [03:37<00:01, 1.76s/it]\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:38<00:00, 2.19s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 0.056399353472181495; global model accuracy: 0.9811399579048157\n",
"Round 3.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s]\n",
" 1%|▊ | 1/100 [00:01<02:45, 1.67s/it]\n",
" 2%|█▋ | 2/100 [00:03<02:43, 1.67s/it]\n",
" 3%|██▍ | 3/100 [00:04<02:41, 1.66s/it]\n",
" 4%|███▎ | 4/100 [00:06<02:39, 1.66s/it]\n",
" 5%|████ | 5/100 [00:08<02:37, 1.66s/it]\n",
" 6%|████▉ | 6/100 [00:09<02:35, 1.66s/it]\n",
" 7%|█████▋ | 7/100 [00:11<02:34, 1.66s/it]\n",
" 8%|██████▌ | 8/100 [00:13<02:32, 1.65s/it]\n",
" 9%|███████▍ | 9/100 [00:14<02:30, 1.65s/it]\n",
" 10%|████████ | 10/100 [00:16<02:28, 1.65s/it]\n",
" 11%|████████▉ | 11/100 [00:18<02:26, 1.65s/it]\n",
" 12%|█████████▋ | 12/100 [00:19<02:25, 1.66s/it]\n",
" 13%|██████████▌ | 13/100 [00:21<02:24, 1.66s/it]\n",
" 14%|███████████▎ | 14/100 [00:23<02:22, 1.66s/it]\n",
" 15%|████████████▏ | 15/100 [00:24<02:21, 1.66s/it]\n",
" 16%|████████████▉ | 16/100 [00:26<02:19, 1.66s/it]\n",
" 17%|█████████████▊ | 17/100 [00:28<02:17, 1.66s/it]\n",
" 18%|██████████████▌ | 18/100 [00:29<02:15, 1.66s/it]\n",
" 19%|███████████████▍ | 19/100 [00:31<02:14, 1.66s/it]\n",
" 20%|████████████████▏ | 20/100 [00:33<02:13, 1.67s/it]\n",
" 21%|█████████████████ | 21/100 [00:34<02:11, 1.66s/it]\n",
" 22%|█████████████████▊ | 22/100 [00:36<02:09, 1.66s/it]\n",
" 23%|██████████████████▋ | 23/100 [00:38<02:07, 1.66s/it]\n",
" 24%|███████████████████▍ | 24/100 [00:39<02:06, 1.66s/it]\n",
" 25%|████████████████████▎ | 25/100 [00:41<02:04, 1.66s/it]\n",
" 26%|█████████████████████ | 26/100 [00:43<02:03, 1.66s/it]\n",
" 27%|█████████████████████▊ | 27/100 [00:44<02:01, 1.66s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 230\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 28%|██████████████████████▋ | 28/100 [00:59<06:31, 5.44s/it]\n",
" 29%|███████████████████████▍ | 29/100 [01:00<05:05, 4.30s/it]\n",
" 30%|████████████████████████▎ | 30/100 [01:02<04:06, 3.51s/it]\n",
" 31%|█████████████████████████ | 31/100 [01:04<03:23, 2.96s/it]\n",
" 32%|█████████████████████████▉ | 32/100 [01:05<02:54, 2.57s/it]\n",
" 33%|██████████████████████████▋ | 33/100 [01:07<02:33, 2.30s/it]\n",
" 34%|███████████████████████████▌ | 34/100 [01:08<02:18, 2.10s/it]\n",
" 35%|████████████████████████████▎ | 35/100 [01:10<02:07, 1.97s/it]\n",
" 36%|█████████████████████████████▏ | 36/100 [01:12<02:00, 1.89s/it]\n",
" 37%|█████████████████████████████▉ | 37/100 [01:14<01:54, 1.82s/it]\n",
" 38%|██████████████████████████████▊ | 38/100 [01:15<01:49, 1.77s/it]\n",
" 39%|███████████████████████████████▌ | 39/100 [01:17<01:45, 1.73s/it]\n",
" 40%|████████████████████████████████▍ | 40/100 [01:18<01:41, 1.70s/it]\n",
" 41%|█████████████████████████████████▏ | 41/100 [01:20<01:39, 1.69s/it]\n",
" 42%|██████████████████████████████████ | 42/100 [01:22<01:36, 1.67s/it]\n",
" 43%|██████████████████████████████████▊ | 43/100 [01:23<01:34, 1.66s/it]\n",
" 44%|███████████████████████████████████▋ | 44/100 [01:25<01:32, 1.66s/it]\n",
" 45%|████████████████████████████████████▍ | 45/100 [01:27<01:32, 1.68s/it]\n",
" 46%|█████████████████████████████████████▎ | 46/100 [01:28<01:30, 1.67s/it]\n",
" 47%|██████████████████████████████████████ | 47/100 [01:30<01:28, 1.67s/it]\n",
" 48%|██████████████████████████████████████▉ | 48/100 [01:32<01:26, 1.66s/it]\n",
" 49%|███████████████████████████████████████▋ | 49/100 [01:33<01:24, 1.65s/it]\n",
" 50%|████████████████████████████████████████▌ | 50/100 [01:35<01:22, 1.65s/it]\n",
" 51%|█████████████████████████████████████████▎ | 51/100 [01:37<01:21, 1.66s/it]\n",
" 52%|██████████████████████████████████████████ | 52/100 [01:38<01:19, 1.65s/it]\n",
" 53%|██████████████████████████████████████████▉ | 53/100 [01:40<01:17, 1.65s/it]\n",
" 54%|███████████████████████████████████████████▋ | 54/100 [01:42<01:15, 1.65s/it]\n",
" 55%|████████████████████████████████████████████▌ | 55/100 [01:43<01:14, 1.65s/it]\n",
" 56%|█████████████████████████████████████████████▎ | 56/100 [01:45<01:12, 1.65s/it]\n",
" 57%|██████████████████████████████████████████████▏ | 57/100 [01:46<01:10, 1.64s/it]\n",
" 58%|██████████████████████████████████████████████▉ | 58/100 [01:48<01:08, 1.64s/it]\n",
" 59%|███████████████████████████████████████████████▊ | 59/100 [01:50<01:07, 1.64s/it]\n",
" 60%|████████████████████████████████████████████████▌ | 60/100 [01:51<01:05, 1.64s/it]\n",
" 61%|█████████████████████████████████████████████████▍ | 61/100 [01:53<01:04, 1.65s/it]\n",
" 62%|██████████████████████████████████████████████████▏ | 62/100 [01:55<01:02, 1.65s/it]\n",
" 63%|███████████████████████████████████████████████████ | 63/100 [01:56<01:00, 1.64s/it]\n",
" 64%|███████████████████████████████████████████████████▊ | 64/100 [01:58<00:59, 1.64s/it]\n",
" 65%|████████████████████████████████████████████████████▋ | 65/100 [02:00<00:57, 1.64s/it]\n",
" 66%|█████████████████████████████████████████████████████▍ | 66/100 [02:01<00:55, 1.64s/it]\n",
" 67%|██████████████████████████████████████████████████████▎ | 67/100 [02:03<00:54, 1.64s/it]\n",
" 68%|███████████████████████████████████████████████████████ | 68/100 [02:05<00:52, 1.64s/it]\n",
" 69%|███████████████████████████████████████████████████████▉ | 69/100 [02:06<00:50, 1.64s/it]\n",
" 70%|████████████████████████████████████████████████████████▋ | 70/100 [02:08<00:49, 1.65s/it]\n",
" 71%|█████████████████████████████████████████████████████████▌ | 71/100 [02:10<00:47, 1.65s/it]\n",
" 72%|██████████████████████████████████████████████████████████▎ | 72/100 [02:11<00:46, 1.65s/it]\n",
" 73%|███████████████████████████████████████████████████████████▏ | 73/100 [02:13<00:44, 1.64s/it]\n",
" 74%|███████████████████████████████████████████████████████████▉ | 74/100 [02:14<00:42, 1.64s/it]\n",
" 75%|████████████████████████████████████████████████████████████▊ | 75/100 [02:16<00:41, 1.65s/it]\n",
" 76%|█████████████████████████████████████████████████████████████▌ | 76/100 [02:18<00:39, 1.65s/it]\n",
" 77%|██████████████████████████████████████████████████████████████▎ | 77/100 [02:19<00:37, 1.64s/it]\n",
" 78%|███████████████████████████████████████████████████████████████▏ | 78/100 [02:21<00:36, 1.65s/it]\n",
" 79%|███████████████████████████████████████████████████████████████▉ | 79/100 [02:23<00:34, 1.65s/it]\n",
" 80%|████████████████████████████████████████████████████████████████▊ | 80/100 [02:24<00:32, 1.65s/it]\n",
" 81%|█████████████████████████████████████████████████████████████████▌ | 81/100 [02:26<00:31, 1.65s/it]\n",
" 82%|██████████████████████████████████████████████████████████████████▍ | 82/100 [02:28<00:29, 1.65s/it]\n",
" 83%|███████████████████████████████████████████████████████████████████▏ | 83/100 [02:29<00:27, 1.65s/it]\n",
" 84%|████████████████████████████████████████████████████████████████████ | 84/100 [02:31<00:26, 1.64s/it]\n",
" 85%|████████████████████████████████████████████████████████████████████▊ | 85/100 [02:33<00:24, 1.65s/it]\n",
" 86%|█████████████████████████████████████████████████████████████████████▋ | 86/100 [02:34<00:23, 1.65s/it]\n",
" 87%|██████████████████████████████████████████████████████████████████████▍ | 87/100 [02:36<00:21, 1.65s/it]\n",
" 88%|███████████████████████████████████████████████████████████████████████▎ | 88/100 [02:38<00:19, 1.65s/it]\n",
" 89%|████████████████████████████████████████████████████████████████████████ | 89/100 [02:39<00:18, 1.65s/it]\n",
" 90%|████████████████████████████████████████████████████████████████████████▉ | 90/100 [02:41<00:16, 1.65s/it]\n",
" 91%|█████████████████████████████████████████████████████████████████████████▋ | 91/100 [02:42<00:14, 1.65s/it]\n",
" 92%|██████████████████████████████████████████████████████████████████████████▌ | 92/100 [02:44<00:13, 1.65s/it]\n",
" 93%|███████████████████████████████████████████████████████████████████████████▎ | 93/100 [02:46<00:11, 1.65s/it]\n",
" 94%|████████████████████████████████████████████████████████████████████████████▏ | 94/100 [02:47<00:09, 1.65s/it]\n",
" 95%|████████████████████████████████████████████████████████████████████████████▉ | 95/100 [02:49<00:08, 1.65s/it]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 96%|█████████████████████████████████████████████████████████████████████████████▊ | 96/100 [02:51<00:06, 1.65s/it]\n",
" 97%|██████████████████████████████████████████████████████████████████████████████▌ | 97/100 [02:52<00:04, 1.65s/it]\n",
" 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [02:54<00:03, 1.65s/it]\n",
" 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [02:56<00:01, 1.65s/it]\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [02:57<00:00, 1.78s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 0.08008340218102815; global model accuracy: 0.9698019027709961\n",
"Round 4.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 391\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [00:22<00:00, 4.48it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 0.805769821976991; global model accuracy: 0.7652534246444702\n",
"Round 5.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 2932\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [02:28<00:00, 1.49s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 2.228477299086053; global model accuracy: 0.646946370601654\n",
"Round 6.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 4490\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [03:46<00:00, 2.26s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 4.57180075867313; global model accuracy: 0.5873737335205078\n",
"Round 7.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 5231\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [04:32<00:00, 2.72s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 7.691132786578953; global model accuracy: 0.5873737335205078\n",
"Round 8.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s]\n",
" 1%|▊ | 1/100 [00:01<02:57, 1.79s/it]\n",
" 2%|█▋ | 2/100 [00:03<02:55, 1.79s/it]\n",
" 3%|██▍ | 3/100 [00:05<02:53, 1.78s/it]\n",
" 4%|███▎ | 4/100 [00:07<02:52, 1.80s/it]\n",
" 5%|████ | 5/100 [00:08<02:50, 1.79s/it]\n",
" 6%|████▉ | 6/100 [00:10<02:47, 1.78s/it]\n",
" 7%|█████▋ | 7/100 [00:12<02:44, 1.77s/it]\n",
" 8%|██████▌ | 8/100 [00:14<02:42, 1.76s/it]\n",
" 9%|███████▍ | 9/100 [00:15<02:40, 1.77s/it]\n",
" 10%|████████ | 10/100 [00:17<02:38, 1.77s/it]\n",
" 11%|████████▉ | 11/100 [00:19<02:37, 1.77s/it]\n",
" 12%|█████████▋ | 12/100 [00:21<02:36, 1.77s/it]\n",
" 13%|██████████▌ | 13/100 [00:23<02:34, 1.77s/it]\n",
" 14%|███████████▎ | 14/100 [00:24<02:32, 1.78s/it]\n",
" 15%|████████████▏ | 15/100 [00:26<02:30, 1.77s/it]\n",
" 16%|████████████▉ | 16/100 [00:28<02:29, 1.77s/it]\n",
" 17%|█████████████▊ | 17/100 [00:30<02:27, 1.77s/it]\n",
" 18%|██████████████▌ | 18/100 [00:31<02:25, 1.77s/it]\n",
" 19%|███████████████▍ | 19/100 [00:33<02:22, 1.76s/it]\n",
" 20%|████████████████▏ | 20/100 [00:35<02:20, 1.76s/it]\n",
" 21%|█████████████████ | 21/100 [00:37<02:20, 1.77s/it]\n",
" 22%|█████████████████▊ | 22/100 [00:39<02:19, 1.79s/it]\n",
" 23%|██████████████████▋ | 23/100 [00:40<02:17, 1.78s/it]\n",
" 24%|███████████████████▍ | 24/100 [00:42<02:15, 1.78s/it]\n",
" 25%|████████████████████▎ | 25/100 [00:44<02:13, 1.77s/it]\n",
" 26%|█████████████████████ | 26/100 [00:46<02:10, 1.77s/it]\n",
" 27%|█████████████████████▊ | 27/100 [00:47<02:08, 1.76s/it]\n",
" 28%|██████████████████████▋ | 28/100 [00:49<02:06, 1.76s/it]\n",
" 29%|███████████████████████▍ | 29/100 [00:51<02:04, 1.76s/it]\n",
" 30%|████████████████████████▎ | 30/100 [00:53<02:03, 1.76s/it]\n",
" 31%|█████████████████████████ | 31/100 [00:54<02:02, 1.77s/it]\n",
" 32%|█████████████████████████▉ | 32/100 [00:56<02:00, 1.77s/it]\n",
" 33%|██████████████████████████▋ | 33/100 [00:58<01:57, 1.76s/it]\n",
" 34%|███████████████████████████▌ | 34/100 [01:00<01:56, 1.77s/it]\n",
" 35%|████████████████████████████▎ | 35/100 [01:01<01:54, 1.76s/it]\n",
" 36%|█████████████████████████████▏ | 36/100 [01:03<01:52, 1.76s/it]\n",
" 37%|█████████████████████████████▉ | 37/100 [01:05<01:50, 1.76s/it]\n",
" 38%|██████████████████████████████▊ | 38/100 [01:07<01:48, 1.75s/it]\n",
" 39%|███████████████████████████████▌ | 39/100 [01:08<01:46, 1.75s/it]\n",
" 40%|████████████████████████████████▍ | 40/100 [01:10<01:45, 1.75s/it]\n",
" 41%|█████████████████████████████████▏ | 41/100 [01:12<01:43, 1.75s/it]\n",
" 42%|██████████████████████████████████ | 42/100 [01:14<01:42, 1.78s/it]\n",
" 43%|██████████████████████████████████▊ | 43/100 [01:16<01:41, 1.77s/it]\n",
" 44%|███████████████████████████████████▋ | 44/100 [01:17<01:39, 1.77s/it]\n",
" 45%|████████████████████████████████████▍ | 45/100 [01:19<01:36, 1.76s/it]\n",
" 46%|█████████████████████████████████████▎ | 46/100 [01:21<01:35, 1.76s/it]\n",
" 47%|██████████████████████████████████████ | 47/100 [01:23<01:33, 1.76s/it]\n",
" 48%|██████████████████████████████████████▉ | 48/100 [01:24<01:31, 1.76s/it]\n",
" 49%|███████████████████████████████████████▋ | 49/100 [01:26<01:29, 1.76s/it]\n",
" 50%|████████████████████████████████████████▌ | 50/100 [01:28<01:27, 1.76s/it]\n",
" 51%|█████████████████████████████████████████▎ | 51/100 [01:30<01:26, 1.76s/it]\n",
" 52%|██████████████████████████████████████████ | 52/100 [01:31<01:24, 1.76s/it]\n",
" 53%|██████████████████████████████████████████▉ | 53/100 [01:33<01:22, 1.76s/it]\n",
" 54%|███████████████████████████████████████████▋ | 54/100 [01:35<01:21, 1.77s/it]\n",
" 55%|████████████████████████████████████████████▌ | 55/100 [01:37<01:19, 1.77s/it]\n",
" 56%|█████████████████████████████████████████████▎ | 56/100 [01:38<01:17, 1.77s/it]\n",
" 57%|██████████████████████████████████████████████▏ | 57/100 [01:40<01:15, 1.76s/it]\n",
" 58%|██████████████████████████████████████████████▉ | 58/100 [01:42<01:13, 1.76s/it]\n",
" 59%|███████████████████████████████████████████████▊ | 59/100 [01:44<01:12, 1.76s/it]\n",
" 60%|████████████████████████████████████████████████▌ | 60/100 [01:46<01:10, 1.76s/it]\n",
" 61%|█████████████████████████████████████████████████▍ | 61/100 [01:47<01:08, 1.77s/it]\n",
" 62%|██████████████████████████████████████████████████▏ | 62/100 [01:49<01:07, 1.77s/it]\n",
" 63%|███████████████████████████████████████████████████ | 63/100 [01:51<01:05, 1.77s/it]\n",
" 64%|███████████████████████████████████████████████████▊ | 64/100 [01:53<01:03, 1.77s/it]\n",
" 65%|████████████████████████████████████████████████████▋ | 65/100 [01:54<01:02, 1.78s/it]\n",
" 66%|█████████████████████████████████████████████████████▍ | 66/100 [01:56<01:00, 1.77s/it]\n",
" 67%|██████████████████████████████████████████████████████▎ | 67/100 [01:58<00:57, 1.76s/it]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 68%|███████████████████████████████████████████████████████ | 68/100 [02:00<00:55, 1.75s/it]\n",
" 69%|███████████████████████████████████████████████████████▉ | 69/100 [02:01<00:54, 1.75s/it]\n",
" 70%|████████████████████████████████████████████████████████▋ | 70/100 [02:03<00:52, 1.75s/it]\n",
" 71%|█████████████████████████████████████████████████████████▌ | 71/100 [02:05<00:50, 1.75s/it]\n",
" 72%|██████████████████████████████████████████████████████████▎ | 72/100 [02:07<00:49, 1.75s/it]\n",
" 73%|███████████████████████████████████████████████████████████▏ | 73/100 [02:08<00:47, 1.75s/it]\n",
" 74%|███████████████████████████████████████████████████████████▉ | 74/100 [02:10<00:45, 1.74s/it]\n",
" 75%|████████████████████████████████████████████████████████████▊ | 75/100 [02:12<00:43, 1.75s/it]\n",
" 76%|█████████████████████████████████████████████████████████████▌ | 76/100 [02:14<00:41, 1.75s/it]\n",
" 77%|██████████████████████████████████████████████████████████████▎ | 77/100 [02:15<00:39, 1.71s/it]\n",
" 78%|███████████████████████████████████████████████████████████████▏ | 78/100 [02:17<00:37, 1.69s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 5199\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 79%|███████████████████████████████████████████████████████████████▉ | 79/100 [06:44<28:29, 81.39s/it]\n",
" 80%|████████████████████████████████████████████████████████████████▊ | 80/100 [06:46<19:09, 57.49s/it]\n",
" 81%|█████████████████████████████████████████████████████████████████▌ | 81/100 [06:48<12:54, 40.74s/it]\n",
" 82%|██████████████████████████████████████████████████████████████████▍ | 82/100 [06:49<08:42, 29.02s/it]\n",
" 83%|███████████████████████████████████████████████████████████████████▏ | 83/100 [06:51<05:53, 20.82s/it]\n",
" 84%|████████████████████████████████████████████████████████████████████ | 84/100 [06:53<04:01, 15.08s/it]\n",
" 85%|████████████████████████████████████████████████████████████████████▊ | 85/100 [06:54<02:45, 11.05s/it]\n",
" 86%|█████████████████████████████████████████████████████████████████████▋ | 86/100 [06:56<01:55, 8.24s/it]\n",
" 87%|██████████████████████████████████████████████████████████████████████▍ | 87/100 [06:58<01:21, 6.27s/it]\n",
" 88%|███████████████████████████████████████████████████████████████████████▎ | 88/100 [06:59<00:58, 4.89s/it]\n",
" 89%|████████████████████████████████████████████████████████████████████████ | 89/100 [07:01<00:43, 3.93s/it]\n",
" 90%|████████████████████████████████████████████████████████████████████████▉ | 90/100 [07:03<00:32, 3.24s/it]\n",
" 91%|█████████████████████████████████████████████████████████████████████████▋ | 91/100 [07:04<00:24, 2.77s/it]\n",
" 92%|██████████████████████████████████████████████████████████████████████████▌ | 92/100 [07:06<00:19, 2.44s/it]\n",
" 93%|███████████████████████████████████████████████████████████████████████████▎ | 93/100 [07:08<00:15, 2.21s/it]\n",
" 94%|████████████████████████████████████████████████████████████████████████████▏ | 94/100 [07:09<00:12, 2.05s/it]\n",
" 95%|████████████████████████████████████████████████████████████████████████████▉ | 95/100 [07:11<00:09, 1.94s/it]\n",
" 96%|█████████████████████████████████████████████████████████████████████████████▊ | 96/100 [07:13<00:07, 1.86s/it]\n",
" 97%|██████████████████████████████████████████████████████████████████████████████▌ | 97/100 [07:14<00:05, 1.81s/it]\n",
" 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [07:16<00:03, 1.77s/it]\n",
" 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [07:18<00:01, 1.74s/it]\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [07:19<00:00, 4.40s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 1.8344421140176637; global model accuracy: 0.7665107846260071\n",
"Round 9.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s]\n",
" 1%|▊ | 1/100 [00:01<02:56, 1.78s/it]\n",
" 2%|█▋ | 2/100 [00:03<02:55, 1.79s/it]\n",
" 3%|██▍ | 3/100 [00:05<02:54, 1.80s/it]\n",
" 4%|███▎ | 4/100 [00:07<02:52, 1.79s/it]\n",
" 5%|████ | 5/100 [00:08<02:50, 1.79s/it]\n",
" 6%|████▉ | 6/100 [00:10<02:48, 1.79s/it]\n",
" 7%|█████▋ | 7/100 [00:12<02:45, 1.78s/it]\n",
" 8%|██████▌ | 8/100 [00:14<02:40, 1.75s/it]\n",
" 9%|███████▍ | 9/100 [00:15<02:37, 1.73s/it]\n",
" 10%|████████ | 10/100 [00:17<02:34, 1.72s/it]\n",
" 11%|████████▉ | 11/100 [00:19<02:32, 1.71s/it]\n",
" 12%|█████████▋ | 12/100 [00:21<02:31, 1.72s/it]\n",
" 13%|██████████▌ | 13/100 [00:22<02:28, 1.71s/it]\n",
" 14%|███████████▎ | 14/100 [00:24<02:27, 1.71s/it]\n",
" 15%|████████████▏ | 15/100 [00:26<02:25, 1.71s/it]\n",
" 16%|████████████▉ | 16/100 [00:27<02:21, 1.69s/it]\n",
" 17%|█████████████▊ | 17/100 [00:29<02:19, 1.69s/it]\n",
" 18%|██████████████▌ | 18/100 [00:31<02:17, 1.68s/it]\n",
" 19%|███████████████▍ | 19/100 [00:32<02:16, 1.69s/it]\n",
" 20%|████████████████▏ | 20/100 [00:34<02:16, 1.71s/it]\n",
" 21%|█████████████████ | 21/100 [00:36<02:17, 1.73s/it]\n",
" 22%|█████████████████▊ | 22/100 [00:38<02:16, 1.75s/it]\n",
" 23%|██████████████████▋ | 23/100 [00:39<02:15, 1.76s/it]\n",
" 24%|███████████████████▍ | 24/100 [00:41<02:14, 1.77s/it]\n",
" 25%|████████████████████▎ | 25/100 [00:43<02:12, 1.77s/it]\n",
" 26%|█████████████████████ | 26/100 [00:45<02:08, 1.74s/it]\n",
" 27%|█████████████████████▊ | 27/100 [00:46<02:05, 1.72s/it]\n",
" 28%|██████████████████████▋ | 28/100 [00:48<02:02, 1.70s/it]\n",
" 29%|███████████████████████▍ | 29/100 [00:50<01:59, 1.69s/it]\n",
" 30%|████████████████████████▎ | 30/100 [00:51<01:57, 1.69s/it]\n",
" 31%|█████████████████████████ | 31/100 [00:53<01:56, 1.69s/it]\n",
" 32%|█████████████████████████▉ | 32/100 [00:55<01:54, 1.69s/it]\n",
" 33%|██████████████████████████▋ | 33/100 [00:56<01:52, 1.68s/it]\n",
" 34%|███████████████████████████▌ | 34/100 [00:58<01:50, 1.68s/it]\n",
" 35%|████████████████████████████▎ | 35/100 [01:00<01:48, 1.68s/it]\n",
" 36%|█████████████████████████████▏ | 36/100 [01:01<01:47, 1.67s/it]\n",
" 37%|█████████████████████████████▉ | 37/100 [01:03<01:45, 1.67s/it]\n",
" 38%|██████████████████████████████▊ | 38/100 [01:05<01:43, 1.67s/it]\n",
" 39%|███████████████████████████████▌ | 39/100 [01:06<01:41, 1.67s/it]\n",
" 40%|████████████████████████████████▍ | 40/100 [01:08<01:39, 1.66s/it]\n",
" 41%|█████████████████████████████████▏ | 41/100 [01:10<01:38, 1.66s/it]\n",
" 42%|██████████████████████████████████ | 42/100 [01:11<01:35, 1.65s/it]\n",
" 43%|██████████████████████████████████▊ | 43/100 [01:13<01:34, 1.66s/it]\n",
" 44%|███████████████████████████████████▋ | 44/100 [01:15<01:32, 1.66s/it]\n",
" 45%|████████████████████████████████████▍ | 45/100 [01:16<01:31, 1.66s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.3s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 2984\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 46%|█████████████████████████████████████▎ | 46/100 [03:53<43:26, 48.27s/it]\n",
" 47%|██████████████████████████████████████ | 47/100 [03:55<30:17, 34.29s/it]\n",
" 48%|██████████████████████████████████████▉ | 48/100 [03:57<21:14, 24.50s/it]\n",
" 49%|███████████████████████████████████████▋ | 49/100 [03:58<15:00, 17.65s/it]\n",
" 50%|████████████████████████████████████████▌ | 50/100 [04:00<10:42, 12.86s/it]\n",
" 51%|█████████████████████████████████████████▎ | 51/100 [04:02<07:45, 9.50s/it]\n",
" 52%|██████████████████████████████████████████ | 52/100 [04:03<05:43, 7.16s/it]\n",
" 53%|██████████████████████████████████████████▉ | 53/100 [04:05<04:19, 5.51s/it]\n",
" 54%|███████████████████████████████████████████▋ | 54/100 [04:07<03:20, 4.36s/it]\n",
" 55%|████████████████████████████████████████████▌ | 55/100 [04:08<02:39, 3.55s/it]\n",
" 56%|█████████████████████████████████████████████▎ | 56/100 [04:10<02:11, 2.99s/it]\n",
" 57%|██████████████████████████████████████████████▏ | 57/100 [04:12<01:51, 2.59s/it]\n",
" 58%|██████████████████████████████████████████████▉ | 58/100 [04:13<01:37, 2.32s/it]\n",
" 59%|███████████████████████████████████████████████▊ | 59/100 [04:15<01:27, 2.12s/it]\n",
" 60%|████████████████████████████████████████████████▌ | 60/100 [04:17<01:19, 1.99s/it]\n",
" 61%|█████████████████████████████████████████████████▍ | 61/100 [04:18<01:13, 1.90s/it]\n",
" 62%|██████████████████████████████████████████████████▏ | 62/100 [04:20<01:09, 1.83s/it]\n",
" 63%|███████████████████████████████████████████████████ | 63/100 [04:22<01:04, 1.75s/it]\n",
" 64%|███████████████████████████████████████████████████▊ | 64/100 [04:23<01:01, 1.71s/it]\n",
" 65%|████████████████████████████████████████████████████▋ | 65/100 [04:25<00:58, 1.68s/it]\n",
" 66%|█████████████████████████████████████████████████████▍ | 66/100 [04:27<00:56, 1.66s/it]\n",
" 67%|██████████████████████████████████████████████████████▎ | 67/100 [04:28<00:54, 1.64s/it]\n",
" 68%|███████████████████████████████████████████████████████ | 68/100 [04:30<00:52, 1.63s/it]\n",
" 69%|███████████████████████████████████████████████████████▉ | 69/100 [04:31<00:50, 1.62s/it]\n",
" 70%|████████████████████████████████████████████████████████▋ | 70/100 [04:33<00:48, 1.61s/it]\n",
" 71%|█████████████████████████████████████████████████████████▌ | 71/100 [04:34<00:46, 1.60s/it]\n",
" 72%|██████████████████████████████████████████████████████████▎ | 72/100 [04:36<00:44, 1.60s/it]\n",
" 73%|███████████████████████████████████████████████████████████▏ | 73/100 [04:38<00:43, 1.60s/it]\n",
" 74%|███████████████████████████████████████████████████████████▉ | 74/100 [04:39<00:41, 1.60s/it]\n",
" 75%|████████████████████████████████████████████████████████████▊ | 75/100 [04:41<00:40, 1.61s/it]\n",
" 76%|█████████████████████████████████████████████████████████████▌ | 76/100 [04:43<00:38, 1.61s/it]\n",
" 77%|██████████████████████████████████████████████████████████████▎ | 77/100 [04:44<00:36, 1.61s/it]\n",
" 78%|███████████████████████████████████████████████████████████████▏ | 78/100 [04:46<00:35, 1.60s/it]\n",
" 79%|███████████████████████████████████████████████████████████████▉ | 79/100 [04:47<00:33, 1.60s/it]\n",
" 80%|████████████████████████████████████████████████████████████████▊ | 80/100 [04:49<00:31, 1.59s/it]\n",
" 81%|█████████████████████████████████████████████████████████████████▌ | 81/100 [04:50<00:30, 1.60s/it]\n",
" 82%|██████████████████████████████████████████████████████████████████▍ | 82/100 [04:52<00:28, 1.61s/it]\n",
" 83%|███████████████████████████████████████████████████████████████████▏ | 83/100 [04:54<00:27, 1.61s/it]\n",
" 84%|████████████████████████████████████████████████████████████████████ | 84/100 [04:55<00:25, 1.61s/it]\n",
" 85%|████████████████████████████████████████████████████████████████████▊ | 85/100 [04:57<00:24, 1.61s/it]\n",
" 86%|█████████████████████████████████████████████████████████████████████▋ | 86/100 [04:59<00:22, 1.60s/it]\n",
" 87%|██████████████████████████████████████████████████████████████████████▍ | 87/100 [05:00<00:20, 1.59s/it]\n",
" 88%|███████████████████████████████████████████████████████████████████████▎ | 88/100 [05:02<00:19, 1.59s/it]\n",
" 89%|████████████████████████████████████████████████████████████████████████ | 89/100 [05:03<00:17, 1.59s/it]\n",
" 90%|████████████████████████████████████████████████████████████████████████▉ | 90/100 [05:05<00:15, 1.59s/it]\n",
" 91%|█████████████████████████████████████████████████████████████████████████▋ | 91/100 [05:06<00:14, 1.59s/it]\n",
" 92%|██████████████████████████████████████████████████████████████████████████▌ | 92/100 [05:08<00:12, 1.59s/it]\n",
" 93%|███████████████████████████████████████████████████████████████████████████▎ | 93/100 [05:10<00:11, 1.60s/it]\n",
" 94%|████████████████████████████████████████████████████████████████████████████▏ | 94/100 [05:11<00:09, 1.60s/it]\n",
" 95%|████████████████████████████████████████████████████████████████████████████▉ | 95/100 [05:13<00:08, 1.60s/it]\n",
" 96%|█████████████████████████████████████████████████████████████████████████████▊ | 96/100 [05:14<00:06, 1.60s/it]\n",
" 97%|██████████████████████████████████████████████████████████████████████████████▌ | 97/100 [05:16<00:04, 1.60s/it]\n",
" 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [05:18<00:03, 1.60s/it]\n",
" 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [05:19<00:01, 1.60s/it]\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [05:21<00:00, 3.21s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 3.510807255450716; global model accuracy: 0.5873957872390747\n",
"Round 10.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 0%| | 0/100 [00:00<?, ?it/s]\n",
" 1%|▊ | 1/100 [00:01<02:50, 1.72s/it]\n",
" 2%|█▋ | 2/100 [00:03<02:49, 1.73s/it]\n",
" 3%|██▍ | 3/100 [00:05<02:47, 1.72s/it]\n",
" 4%|███▎ | 4/100 [00:06<02:45, 1.73s/it]\n",
" 5%|████ | 5/100 [00:08<02:43, 1.72s/it]\n",
" 6%|████▉ | 6/100 [00:10<02:40, 1.70s/it]\n",
" 7%|█████▋ | 7/100 [00:11<02:38, 1.70s/it]\n",
" 8%|██████▌ | 8/100 [00:13<02:35, 1.69s/it]\n",
" 9%|███████▍ | 9/100 [00:15<02:34, 1.70s/it]\n",
" 10%|████████ | 10/100 [00:17<02:33, 1.71s/it]\n",
" 11%|████████▉ | 11/100 [00:18<02:32, 1.72s/it]\n",
" 12%|█████████▋ | 12/100 [00:20<02:30, 1.71s/it]\n",
" 13%|██████████▌ | 13/100 [00:22<02:29, 1.72s/it]\n",
" 14%|███████████▎ | 14/100 [00:23<02:28, 1.72s/it]\n",
" 15%|████████████▏ | 15/100 [00:25<02:25, 1.72s/it]\n",
" 16%|████████████▉ | 16/100 [00:27<02:24, 1.72s/it]\n",
" 17%|█████████████▊ | 17/100 [00:29<02:23, 1.73s/it]\n",
" 18%|██████████████▌ | 18/100 [00:30<02:21, 1.73s/it]\n",
" 19%|███████████████▍ | 19/100 [00:32<02:19, 1.72s/it]\n",
" 20%|████████████████▏ | 20/100 [00:34<02:16, 1.71s/it]\n",
" 21%|█████████████████ | 21/100 [00:35<02:14, 1.71s/it]\n",
" 22%|█████████████████▊ | 22/100 [00:37<02:12, 1.70s/it]\n",
" 23%|██████████████████▋ | 23/100 [00:39<02:10, 1.70s/it]\n",
" 24%|███████████████████▍ | 24/100 [00:41<02:09, 1.70s/it]\n",
" 25%|████████████████████▎ | 25/100 [00:42<02:07, 1.71s/it]\n",
" 26%|█████████████████████ | 26/100 [00:44<02:05, 1.70s/it]\n",
" 27%|█████████████████████▊ | 27/100 [00:46<02:04, 1.70s/it]\n",
" 28%|██████████████████████▋ | 28/100 [00:47<02:02, 1.70s/it]\n",
" 29%|███████████████████████▍ | 29/100 [00:49<02:00, 1.69s/it]\n",
" 30%|████████████████████████▎ | 30/100 [00:51<01:59, 1.71s/it]\n",
" 31%|█████████████████████████ | 31/100 [00:53<01:57, 1.71s/it]\n",
" 32%|█████████████████████████▉ | 32/100 [00:54<01:56, 1.71s/it]\n",
" 33%|██████████████████████████▋ | 33/100 [00:56<01:54, 1.71s/it]\n",
" 34%|███████████████████████████▌ | 34/100 [00:58<01:52, 1.70s/it]\n",
" 35%|████████████████████████████▎ | 35/100 [00:59<01:50, 1.69s/it]\n",
" 36%|█████████████████████████████▏ | 36/100 [01:01<01:48, 1.69s/it]\n",
" 37%|█████████████████████████████▉ | 37/100 [01:03<01:46, 1.70s/it]\n",
" 38%|██████████████████████████████▊ | 38/100 [01:04<01:44, 1.69s/it]\n",
" 39%|███████████████████████████████▌ | 39/100 [01:06<01:43, 1.70s/it]\n",
" 40%|████████████████████████████████▍ | 40/100 [01:08<01:42, 1.70s/it]\n",
" 41%|█████████████████████████████████▏ | 41/100 [01:09<01:40, 1.70s/it]\n",
" 42%|██████████████████████████████████ | 42/100 [01:11<01:38, 1.69s/it]\n",
" 43%|██████████████████████████████████▊ | 43/100 [01:13<01:36, 1.69s/it]\n",
" 44%|███████████████████████████████████▋ | 44/100 [01:15<01:34, 1.69s/it]\n",
" 45%|████████████████████████████████████▍ | 45/100 [01:16<01:33, 1.70s/it]\n",
" 46%|█████████████████████████████████████▎ | 46/100 [01:18<01:31, 1.70s/it]\n",
" 47%|██████████████████████████████████████ | 47/100 [01:20<01:30, 1.70s/it]\n",
" 48%|██████████████████████████████████████▉ | 48/100 [01:21<01:28, 1.70s/it]\n",
" 49%|███████████████████████████████████████▋ | 49/100 [01:23<01:26, 1.70s/it]\n",
" 50%|████████████████████████████████████████▌ | 50/100 [01:25<01:24, 1.69s/it]\n",
" 51%|█████████████████████████████████████████▎ | 51/100 [01:26<01:22, 1.69s/it]\n",
" 52%|██████████████████████████████████████████ | 52/100 [01:28<01:20, 1.69s/it]\n",
" 53%|██████████████████████████████████████████▉ | 53/100 [01:30<01:19, 1.69s/it]\n",
" 54%|███████████████████████████████████████████▋ | 54/100 [01:32<01:18, 1.70s/it]\n",
" 55%|████████████████████████████████████████████▌ | 55/100 [01:33<01:16, 1.69s/it]\n",
" 56%|█████████████████████████████████████████████▎ | 56/100 [01:35<01:14, 1.70s/it]\n",
" 57%|██████████████████████████████████████████████▏ | 57/100 [01:37<01:13, 1.71s/it]\n",
" 58%|██████████████████████████████████████████████▉ | 58/100 [01:38<01:12, 1.72s/it]\n",
" 59%|███████████████████████████████████████████████▊ | 59/100 [01:40<01:10, 1.72s/it]\n",
" 60%|████████████████████████████████████████████████▌ | 60/100 [01:42<01:08, 1.72s/it]\n",
" 61%|█████████████████████████████████████████████████▍ | 61/100 [01:44<01:06, 1.72s/it]\n",
" 62%|██████████████████████████████████████████████████▏ | 62/100 [01:45<01:04, 1.71s/it]\n",
" 63%|███████████████████████████████████████████████████ | 63/100 [01:47<01:03, 1.71s/it]\n",
" 64%|███████████████████████████████████████████████████▊ | 64/100 [01:49<01:02, 1.74s/it]\n",
" 65%|████████████████████████████████████████████████████▋ | 65/100 [01:50<01:00, 1.73s/it]\n",
" 66%|█████████████████████████████████████████████████████▍ | 66/100 [01:52<00:58, 1.73s/it]\n",
" 67%|██████████████████████████████████████████████████████▎ | 67/100 [01:54<00:56, 1.72s/it]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 68%|███████████████████████████████████████████████████████ | 68/100 [01:56<00:55, 1.72s/it]\n",
" 69%|███████████████████████████████████████████████████████▉ | 69/100 [01:57<00:53, 1.73s/it]\n",
" 70%|████████████████████████████████████████████████████████▋ | 70/100 [01:59<00:51, 1.73s/it]\n",
" 71%|█████████████████████████████████████████████████████████▌ | 71/100 [02:01<00:50, 1.73s/it]\n",
" 72%|██████████████████████████████████████████████████████████▎ | 72/100 [02:03<00:48, 1.72s/it]\n",
" 73%|███████████████████████████████████████████████████████████▏ | 73/100 [02:04<00:46, 1.73s/it]\n",
" 74%|███████████████████████████████████████████████████████████▉ | 74/100 [02:06<00:44, 1.71s/it]\n",
" 75%|████████████████████████████████████████████████████████████▊ | 75/100 [02:08<00:42, 1.72s/it]\n",
" 76%|█████████████████████████████████████████████████████████████▌ | 76/100 [02:09<00:41, 1.72s/it][Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
"[Parallel(n_jobs=1)]: Done 1000 out of 1000 | elapsed: 1.2s finished\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"the number of miss classified sampels is 5261\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" 77%|██████████████████████████████████████████████████████████████▎ | 77/100 [06:34<30:50, 80.47s/it]\n",
" 78%|███████████████████████████████████████████████████████████████▏ | 78/100 [06:35<20:50, 56.85s/it]\n",
" 79%|███████████████████████████████████████████████████████████████▉ | 79/100 [06:37<14:06, 40.30s/it]\n",
" 80%|████████████████████████████████████████████████████████████████▊ | 80/100 [06:39<09:34, 28.72s/it]\n",
" 81%|█████████████████████████████████████████████████████████████████▌ | 81/100 [06:40<06:31, 20.61s/it]\n",
" 82%|██████████████████████████████████████████████████████████████████▍ | 82/100 [06:42<04:29, 14.95s/it]\n",
" 83%|███████████████████████████████████████████████████████████████████▏ | 83/100 [06:44<03:06, 10.97s/it]\n",
" 84%|████████████████████████████████████████████████████████████████████ | 84/100 [06:46<02:11, 8.19s/it]\n",
" 85%|████████████████████████████████████████████████████████████████████▊ | 85/100 [06:47<01:33, 6.24s/it]\n",
" 86%|█████████████████████████████████████████████████████████████████████▋ | 86/100 [06:49<01:08, 4.88s/it]\n",
" 87%|██████████████████████████████████████████████████████████████████████▍ | 87/100 [06:51<00:50, 3.92s/it]\n",
" 88%|███████████████████████████████████████████████████████████████████████▎ | 88/100 [06:52<00:39, 3.26s/it]\n",
" 89%|████████████████████████████████████████████████████████████████████████ | 89/100 [06:54<00:30, 2.80s/it]\n",
" 90%|████████████████████████████████████████████████████████████████████████▉ | 90/100 [06:56<00:24, 2.46s/it]\n",
" 91%|█████████████████████████████████████████████████████████████████████████▋ | 91/100 [06:57<00:20, 2.25s/it]\n",
" 92%|██████████████████████████████████████████████████████████████████████████▌ | 92/100 [06:59<00:16, 2.11s/it]\n",
" 93%|███████████████████████████████████████████████████████████████████████████▎ | 93/100 [07:01<00:14, 2.02s/it]\n",
" 94%|████████████████████████████████████████████████████████████████████████████▏ | 94/100 [07:03<00:11, 1.93s/it]\n",
" 95%|████████████████████████████████████████████████████████████████████████████▉ | 95/100 [07:05<00:09, 1.87s/it]\n",
" 96%|█████████████████████████████████████████████████████████████████████████████▊ | 96/100 [07:06<00:07, 1.82s/it]\n",
" 97%|██████████████████████████████████████████████████████████████████████████████▌ | 97/100 [07:08<00:05, 1.78s/it]\n",
" 98%|███████████████████████████████████████████████████████████████████████████████▍ | 98/100 [07:10<00:03, 1.75s/it]\n",
" 99%|████████████████████████████████████████████████████████████████████████████████▏| 99/100 [07:11<00:01, 1.73s/it]\n",
"100%|████████████████████████████████████████████████████████████████████████████████| 100/100 [07:13<00:00, 4.34s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Global model loss: 0.8507050739322711; global model accuracy: 0.7916574478149414\n"
]
}
],
"source": [
"# FL Begain\n",
"# TESTBED\n",
"\n",
"#####################\n",
"# SYSTEM PARAMETERS #\n",
"#####################\n",
"for c in range(1):\n",
" number_attackers = c\n",
" # threshold = 0.011\n",
" threshold =0.00034\n",
" counter = 0 \n",
" peers_selected=random.sample(range(n_peers), number_attackers+1)\n",
" scaner = peers_selected[0]\n",
" mal = peers_selected[1 :]\n",
" # Percentage and number of peers participating at each global training epoch\n",
" percentage_participants = 1.0\n",
" n_participants = int(n_peers * percentage_participants)\n",
"\n",
" # Number of global training epochs\n",
" n_rounds = 10\n",
" start_attack_round = 4\n",
" end_attack_round = 7\n",
" # Number of local training epochs per global training epoch\n",
" n_local_rounds = 5\n",
"\n",
" # Local batch size\n",
" local_batch_size = 32\n",
"\n",
" # Local learning rate\n",
" local_lr = 0.001\n",
"\n",
" # Global learning rate or 'gain'\n",
" model_substitution_rate = 1.0\n",
"\n",
" # Attack detection / prevention mechanism = {None, 'distance', 'median', 'accuracy', 'krum'}\n",
" discard_outliers = None\n",
"\n",
" # Used in 'dist' attack detection, defines how far the outliers are (1.5 is a typical value)\n",
" tau = 1.5\n",
"\n",
" # Used in 'accuracy' attack detection, defines the error margin for the accuracy improvement\n",
" sensitivity = 0.05\n",
"\n",
" # Used in 'krum' attack detection, defines how many byzantine attackers we want to defend against\n",
" tolerance=4\n",
"\n",
" # Prevent suspicious peers from participating again, only valid for 'dist' and 'accuracy'\n",
" ban_malicious = False\n",
"\n",
" # Clear nans and infinites in model updates\n",
" clear_nans = True\n",
"\n",
" number_for_threshold1 = numpy.empty(20, dtype=float)\n",
" number_for_threshold2 = numpy.empty(20, dtype=float)\n",
" for r in range(len(number_for_threshold1)):\n",
" number_for_threshold1[r] = 0\n",
" number_for_threshold2[r] = 0\n",
"\n",
" ########################\n",
" # ATTACK CONFIGURATION #\n",
" ########################\n",
"\n",
" # Percentage of malicious peers\n",
" r_malicious_peers = 0.0\n",
"\n",
" # Number of malicious peers (absolute or relative to total number of peers)\n",
" n_malicious_peers = int(n_peers * r_malicious_peers)\n",
" #n_malicious_peers = 1\n",
"\n",
" # Malicious peers\n",
" malicious_peer = range(n_malicious_peers)\n",
"\n",
" # Target for coalitions\n",
" common_attack_target = [4,7]\n",
"\n",
" # Target class of the attack, per each malicious peer\n",
" malicious_targets = dict([(p, t) for p,t in zip(malicious_peer, [common_attack_target]*n_malicious_peers)])\n",
"\n",
" # Boosting parameter per each malicious peer\n",
" common_malicious_boost = 12\n",
" malicious_boost = dict([(p, b) for p,b in zip(malicious_peer, [common_malicious_boost]*n_malicious_peers)])\n",
"\n",
" ###########\n",
" # METRICS #\n",
" ###########\n",
" metrics = {'accuracy': [],\n",
" 'atk_effectivity': [],\n",
" 'update_distances': [],\n",
" 'outliers_detected': [],\n",
"\n",
" 'acc_no_target': []}\n",
"\n",
" ####################################\n",
" # MODEL AND NETWORK INITIALIZATION #\n",
" ####################################\n",
" inputs = X_train[0*ss:0*ss+ss]\n",
" outputs = y_train[0*ss:0*ss+ss]\n",
" global_model = build_model(inputs,outputs)\n",
" n_layers = len(trainable_layers(global_model))\n",
"\n",
" print('Initializing network.')\n",
" sleep(1)\n",
" network = []\n",
" for i in tqdm(range(n_peers)):\n",
" ss = int(len(X_train)/n_peers)\n",
" inputs = X_train[i*ss:i*ss+ss]\n",
" outputs = y_train[i*ss:i*ss+ss]\n",
" # network.append(build_model(inputs, outputs))\n",
" network.append(global_model)\n",
"\n",
"\n",
" banned_peers = set()\n",
"\n",
" ##################\n",
" # BEGIN TRAINING #\n",
" ##################\n",
" for t in range(n_rounds):\n",
" print(f'Round {t+1}.')\n",
" sleep(1)\n",
"\n",
" ## SERVER SIDE #################################################################\n",
" # Fetch global model parameters\n",
" global_weights, global_biases = get_parameters(global_model)\n",
"\n",
" if clear_nans:\n",
" global_weights, global_biases = nans_to_zero(global_weights, global_biases)\n",
"\n",
" # Initialize peer update lists\n",
" network_weight_updates = []\n",
" network_bias_updates = []\n",
"\n",
" # Selection of participant peers in this global training epoch\n",
" if ban_malicious:\n",
" good_peers = list([p for i,p in enumerate(network) if i not in banned_peers])\n",
" n_participants = n_participants if n_participants <= len(good_peers) else int(len(good_peers) * percentage_participants)\n",
" participants = random.sample(list(enumerate(good_peers)), n_participants)\n",
" else:\n",
" participants = random.sample(list(enumerate(network)),n_participants)\n",
" ################################################################################\n",
"\n",
"\n",
" ## CLIENT SIDE #################################################################\n",
" for i, local_model in tqdm(participants):\n",
"\n",
" # Update local model with global parameters \n",
" set_parameters(local_model, global_weights, global_biases)\n",
"\n",
" # Initialization of user data\n",
" ss = int(len(X_train)/n_peers)\n",
" inputs = X_train[i*ss:i*ss+ss]\n",
" outputs = y_train[i*ss:i*ss+ss]\n",
"\n",
" # print(\"worker number \", i,\" from \", n_peers)\n",
" # print(\" number of data in worker \", i ,\" is \", len(inputs))\n",
"\n",
"\n",
" # do the forest here\n",
"\n",
"\n",
"\n",
"\n",
" # counter = counter+1\n",
"\n",
" if(i == scaner):\n",
" X_train_local, X_test_local, y_train_local, y_test_local = train_test_split(inputs,outputs, test_size=0.7, random_state=rs)\n",
" inputs = X_train_local\n",
" outputs = y_train_local\n",
" if(t == 0):\n",
" forest = build_forest(X_train_local,y_train_local)\n",
" forest_predictions = forest.predict(X_test_local)\n",
" acc_forest = np.mean([t==p for t,p in zip(y_test_local, forest_predictions)])\n",
" # imp = forest.feature_importances_\n",
" # FI_dic1[t] = imp\n",
" FL_predict1 = global_model.predict(X_test_local)\n",
" imp = scan_wrong(forest_predictions, FL_predict1, forest , y_test_local, X_test_local)\n",
" FI_dic1[t] = imp\n",
"# if(t > 0):\n",
"# different_rouneds = FI_dic1[t-1] - FI_dic1[t]\n",
"# different_rouneds = abs(different_rouneds)\n",
"# number_for_threshold=0\n",
"# print(\"lenght of different \",len(different_rouneds))\n",
"# for H in range(len(different_rouneds)):\n",
"# number_for_threshold1[t] = number_for_threshold1[t] + different_rouneds[H]\n",
"# number_for_threshold = number_for_threshold1[t] - number_for_threshold1[t-1]\n",
"# if(t > 1):\n",
"# print(number_for_threshold)\n",
"# dic[c].append(abs(number_for_threshold))\n",
"# if(abs(number_for_threshold)>threshold):\n",
"# print(\"---------------------------------------------------------\")\n",
"# print(\"attack happened , in the round before which is \", t+1)\n",
"# print(\"from peer \", i)\n",
"# print(different_rouneds)\n",
"# print(number_for_threshold)\n",
"# print(\"---------------------------------------------------------\")\n",
"\n",
" # number_for_threshold1 = numpy.empty(19, dtype=float)\n",
" # for i in range(len(number_for_threshold1)):\n",
" # number_for_threshold1[i] = 0\n",
" # for j in range(len(FI_dic1)-1):\n",
" # number_for_threshold1 = numpy.empty(19, dtype=float)\n",
" # different_rouneds = FI_dic2[j] - FI_dic2[j]\n",
" # different_rouneds = abs(different_rouneds)\n",
" # # number_for_threshold=0\n",
" # for i in range(len(different_rouneds)):\n",
" # number_for_threshold1[j] = number_for_threshold1[j] + different_rouneds[i]\n",
"\n",
"\n",
"\n",
"\n",
"\n",
" # if(i == 12):\n",
" # X_train_local, X_test_local, y_train_local, y_test_local = train_test_split(inputs,outputs, test_size=0.7, random_state=rs)\n",
" # inputs = X_train_local\n",
" # outputs = y_train_local\n",
" # forest = build_forest(X_train_local,y_train_local)\n",
" # forest_predictions = forest.predict(X_test_local)\n",
" # acc_forest = np.mean([t==p for t,p in zip(y_test_local, forest_predictions)])\n",
" # imp = forest.feature_importances_\n",
" # # FI_dic2[t] = imp\n",
" # # FL_predict2 = global_model.predict(X_test_local)\n",
" # FL_predict2 = global_model.predict(X_test_local)\n",
" # imp = scan_wrong(forest_predictions, FL_predict2, forest , y_test_local, X_test_local)\n",
" # FI_dic2[t] = imp\n",
" # if(t > 0):\n",
" # different_rouneds = FI_dic2[t-1] - FI_dic2[t]\n",
" # different_rouneds = abs(different_rouneds)\n",
" # number_for_threshold=0\n",
" # for H in range(len(different_rouneds)):\n",
" # number_for_threshold2[t] = number_for_threshold2[t] + different_rouneds[H]\n",
" # number_for_threshold = number_for_threshold1[t] - number_for_threshold1[t-1]\n",
" # if(t>1):\n",
" # if(abs(number_for_threshold)>threshold):\n",
" # print(\"---------------------------------------------------------\")\n",
" # print(\"attack happened, in the round before which is \", t)\n",
" # print(\"from peer \", i)\n",
" # print(different_rouneds)\n",
" # print(number_for_threshold)\n",
" # print(\"---------------------------------------------------------\")\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
" # Local model update\n",
" if i in malicious_peer:\n",
" # Malicious peer\n",
" # Modify target class labels\n",
" outputs = poison(outputs, malicious_targets[i][0], malicious_targets[i][1])\n",
"\n",
" # Train local model with benign and malicious data\n",
" local_weight_updates, local_bias_updates = get_updates(local_model, \n",
" inputs, outputs, \n",
" local_batch_size, n_local_rounds)\n",
"\n",
" # Boost update\n",
" local_weight_updates, local_bias_updates = boosting(local_weight_updates, local_bias_updates, malicious_boost[i])\n",
"\n",
" if clear_nans:\n",
" local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)\n",
"\n",
"\n",
"\n",
" #here atttack done by Rami\n",
"\n",
"\n",
"\n",
" elif(t+1>=start_attack_round and t+1<=end_attack_round): \n",
" if (i in mal):\n",
" # print(\"peer \",i, \"round\", t)\n",
" print(\"I am peer \",i,\"I started the attack, at round\", t+1)\n",
" #attack\n",
" inputs = attack_data(inputs, feature_attacked)\n",
"\n",
" local_weight_updates, local_bias_updates = get_updates(local_model, \n",
" inputs, outputs, \n",
" local_batch_size, n_local_rounds)\n",
" if clear_nans:\n",
" local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)\n",
"\n",
"\n",
"\n",
" else:\n",
" # Benign peer\n",
" # Train local model \n",
" local_weight_updates, local_bias_updates = get_updates(local_model, \n",
" inputs, outputs, \n",
" local_batch_size, n_local_rounds)\n",
" if clear_nans:\n",
" local_weight_updates, local_bias_updates = nans_to_zero(local_weight_updates, local_bias_updates)\n",
"\n",
" # Send updates to the server\n",
" network_weight_updates.append(local_weight_updates)\n",
" network_bias_updates.append(local_bias_updates)\n",
"\n",
" ## END OF CLIENT SIDE ##########################################################\n",
"\n",
" ######################################\n",
" # SERVER SIDE AGGREGATION MECHANISMS #\n",
" ######################################\n",
"\n",
" # No detection of outliers\n",
" if discard_outliers == None:\n",
" # Aggregate client updates\n",
" aggregated_weights, aggregated_biases = aggregate(n_layers, \n",
" n_participants, \n",
" np.mean, \n",
" network_weight_updates, \n",
" network_bias_updates)\n",
"\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" # Apply updates to global model\n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
"\n",
" # Detection of outliers via distance metric\n",
" elif discard_outliers == 'distance':\n",
" # Compute the provisional aggregate\n",
" prov_agg_w, prov_agg_b = aggregate(n_layers, \n",
" n_participants, \n",
" np.mean, \n",
" network_weight_updates, \n",
" network_bias_updates)\n",
"\n",
" # Compute distances and IQR of individual updates to the provisional aggregate\n",
" distances = [dist_weights(prov_agg_w, w_i) for w_i in network_weight_updates]\n",
" q1 = np.percentile(distances, 25)\n",
" q3 = np.percentile(distances, 75)\n",
" iqr = q3 - q1\n",
" low = q1 - tau * iqr\n",
" high = q3 + tau * iqr\n",
"\n",
" # Discard outliers\n",
" good_updates = [i for i,v in enumerate(distances) if low <= v <= high]\n",
" agg_participants = len(good_updates)\n",
" network_weight_updates = [w for i,w in enumerate(network_weight_updates) if i in good_updates]\n",
" network_bias_updates = [b for i,b in enumerate(network_bias_updates) if i in good_updates]\n",
"\n",
" bad_participants = [i for i in range(n_participants) if i not in good_updates]\n",
" bad_participants = [participants[i][0] for i in bad_participants]\n",
"\n",
" # Flag offenders\n",
" banned_peers.update(bad_participants)\n",
"\n",
" metrics['outliers_detected'].append(bad_participants)\n",
"\n",
" # Compute definitive update\n",
" aggregated_weights, aggregated_biases = aggregate(n_layers, \n",
" agg_participants, \n",
" np.mean, \n",
" network_weight_updates, \n",
" network_bias_updates)\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" # Apply update\n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
"\n",
" #Detection via GMM\n",
" #elif discard_otliers == 'gmm':\n",
" #flatten_parameters = [flatten_parameters(w, b)[\n",
" #for w,b in zip(network_weight_updates, network_bias_updates)\n",
" #]]\n",
"\n",
" # Detection of outliers via accuracy metrics\n",
" elif discard_outliers == 'accuracy':\n",
" if t == 0:\n",
" # In the first epoch, all contributions are accepted\n",
" aggregated_weights, aggregated_biases = aggregate(n_layers, \n",
" n_participants, \n",
" np.mean, \n",
" network_weight_updates, \n",
" network_bias_updates)\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
" else: \n",
" # Check the change in accuracy for every contribution\n",
" test_accuracies = []\n",
" previous_epoch_global_accuracy = metrics['accuracy'][-1]\n",
" for k in range(len(local_weight_updates)):\n",
" test_model = build_model(local_lr)\n",
" set_parameters(test_model, global_weights, global_biases)\n",
" apply_updates(test_model, model_substitution_rate, network_weight_updates[k], network_bias_updates[k])\n",
" _, test_accuracy = test_model.evaluate(x_test, y_test, verbose=0)\n",
" test_accuracies.append(test_accuracy - previous_epoch_global_accuracy)\n",
"\n",
"\n",
" # An update is good if it improves (with some margin) the accuracy of the\n",
" # global model\n",
" good_updates = [i for i,v in enumerate(test_accuracies) if v + sensitivity >= 0.0]\n",
" agg_participants = len(good_updates)\n",
" network_weight_updates = [w for i,w in enumerate(network_weight_updates) if i in good_updates]\n",
" network_bias_updates = [b for i,b in enumerate(network_bias_updates) if i in good_updates]\n",
"\n",
" bad_participants = [i for i in range(n_participants) if i not in good_updates]\n",
" bad_participants = [participants[i][0] for i in bad_participants]\n",
"\n",
" # Flag offenders\n",
" banned_peers.update(bad_participants)\n",
"\n",
" metrics['outliers_detected'].append(bad_participants)\n",
"\n",
" # Compute definitive update\n",
" aggregated_weights, aggregated_biases = aggregate(n_layers, \n",
" agg_participants, \n",
" np.mean, \n",
" network_weight_updates, \n",
" network_bias_updates)\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" # Apply update\n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
"\n",
" # Robust aggregation via median\n",
" elif discard_outliers == 'median':\n",
" # Compute the aggregate as the component-wise median of local updates\n",
" aggregated_weights, aggregated_biases = aggregate(n_layers, \n",
" n_participants, \n",
" np.median,\n",
" network_weight_updates, \n",
" network_bias_updates)\n",
"\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" # Apply update \n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
"\n",
" # KRUM\n",
" elif discard_outliers == 'krum':\n",
" # First, we build a distance matrix for parameters\n",
" P = list(zip(network_weight_updates, network_bias_updates))\n",
" dist_matrix = [[dist_parameters(wi,bi,wj,bj) for wj,bj in P] for wi,bi in P]\n",
" scores = []\n",
" for index in range(len(P)):\n",
" distances_to_index = np.array(dist_matrix[index])\n",
" closest_to_index = np.argpartition(distances_to_index, n_participants-tolerance-1)[:n_participants-tolerance-1]\n",
" scores.append(np.sum(distances_to_index[closest_to_index]))\n",
" best = np.argmin(scores)\n",
" aggregated_weights = network_weight_updates[best]\n",
" aggregated_biases = network_bias_updates[best]\n",
"\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" # Apply update\n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
"\n",
" # Fallback case: no detection of outliers\n",
" else:\n",
" # Proceed as in first case\n",
" aggregated_weights, aggregated_biases = aggregate(n_layers, \n",
" n_participants, \n",
" np.mean, \n",
" network_weight_updates, \n",
" network_bias_updates)\n",
" if clear_nans:\n",
" aggregated_weights, aggregated_biases = nans_to_zero(aggregated_weights, aggregated_biases)\n",
"\n",
" apply_updates(global_model, model_substitution_rate, aggregated_weights, aggregated_biases)\n",
"\n",
" ###################\n",
" # COMPUTE METRICS #\n",
" ###################\n",
"\n",
" # Global model accuracy\n",
" score = global_model.evaluate(X_test, y_test, verbose=0)\n",
" print(f'Global model loss: {score[0]}; global model accuracy: {score[1]}')\n",
" metrics['accuracy'].append(score[1])\n",
"\n",
"\n",
" # Accuracy without the target\n",
" score = global_model.evaluate(X_test, y_test, verbose=0)\n",
" metrics['acc_no_target'].append(score[1])\n",
"\n",
" # Proportion of instances of the target class misclassified (a.k.a success of the attack)\n",
" #mat = confusion_matrix(np.argmax(shard1_traintest[i][\"y_test\"], axis=1), np.argmax(global_model.predict(shard1_traintest[i][\"X_test\"]), axis=1))\n",
" #trans_4_7 = (mat[4,7] - mat[4,4]) / (2 * (mat[4,4]+mat[4,7])) + 0.5\n",
" #metrics['atk_effectivity'].append(trans_4_7)\n",
"\n",
" # Distance of individual updates to the final aggregation\n",
" metrics['update_distances'].append([dist_weights(aggregated_weights, w_i) for w_i in network_weight_updates])\n",
"savecsv(dic,\"random_activity_100.csv\")\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 74,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[50]"
]
},
"execution_count": 74,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"peers_selected"
]
},
{
"cell_type": "code",
"execution_count": 75,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{0: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 1: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 2: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 3: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 4: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 5: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 6: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 7: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 8: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803]),\n",
" 9: array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803])}"
]
},
"execution_count": 75,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"FI_dic1"
]
},
{
"cell_type": "code",
"execution_count": 76,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803])"
]
},
"execution_count": 76,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"FI_dic1[9]"
]
},
{
"cell_type": "code",
"execution_count": 77,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([0.02392711, 0.02151661, 0.00187879, 0.00976734, 0.02580066,\n",
" 0.00160656, 0.01195348, 0.00133958, 0.00250978, 0.00889068,\n",
" 0.05876696, 0.00843687, 0.00386948, 0.01129838, 0.00221767,\n",
" 0.03796889, 0.0323155 , 0.00240375, 0.03846723, 0.03199342,\n",
" 0.00298984, 0.0034151 , 0.01042293, 0.0127904 , 0.01568604,\n",
" 0.14358194, 0.00452941, 0.07049981, 0.00657889, 0.0022481 ,\n",
" 0.0793962 , 0.00643942, 0.00239138, 0.06767149, 0.03591957,\n",
" 0.14058599, 0.02268127, 0.00775547, 0.02748803])"
]
},
"execution_count": 77,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"FI_dic1[9]"
]
},
{
"cell_type": "code",
"execution_count": 78,
"metadata": {},
"outputs": [],
"source": [
"z=FI_dic1[9].max(axis = 0)"
]
},
{
"cell_type": "code",
"execution_count": 79,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0.14358194388079848"
]
},
"execution_count": 79,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"z"
]
},
{
"cell_type": "code",
"execution_count": 80,
"metadata": {},
"outputs": [],
"source": [
"names = ['temp_hand','acceleration_16_x_hand',\n",
" 'acceleration_16_y_hand','acceleration_16_z_hand','acceleration_6_x_hand',\n",
" 'acceleration_6_y_hand','acceleration_6_z_hand','gyroscope_x_hand','gyroscope_y_hand',\n",
" 'gyroscope_z_hand','magnetometer_x_hand','magnetometer_y_hand','magnetometer_z_hand',\n",
" 'temp_chest','acceleration_16_x_chest','acceleration_16_y_chest','acceleration_16_z_chest','acceleration_6_x_chest',\n",
" 'acceleration_6_y_chest','acceleration_6_z_chest','gyroscope_x_chest','gyroscope_y_chest','gyroscope_z_chest',\n",
" 'magnetometer_x_chest','magnetometer_y_chest','magnetometer_z_chest','temp_ankle','acceleration_16_x_ankle',\n",
" 'acceleration_16_y_ankle','acceleration_16_z_ankle','acceleration_6_x_ankle','acceleration_6_y_ankle',\n",
" 'acceleration_6_z_ankle','gyroscope_x_ankle','gyroscope_y_ankle','gyroscope_z_ankle','magnetometer_x_ankle',\n",
" 'magnetometer_y_ankle','magnetometer_z_ankle']"
]
},
{
"cell_type": "code",
"execution_count": 81,
"metadata": {},
"outputs": [],
"source": [
"sort_index = np.argsort(FI_dic1[9])"
]
},
{
"cell_type": "code",
"execution_count": 82,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"gyroscope_x_hand , 0.001339578209529386\n",
"acceleration_6_y_hand , 0.001606558887748289\n",
"acceleration_16_y_hand , 0.0018787909132091745\n",
"acceleration_16_x_chest , 0.0022176706422468036\n",
"acceleration_16_z_ankle , 0.0022480971625748777\n",
"acceleration_6_z_ankle , 0.0023913839855701775\n",
"acceleration_6_x_chest , 0.002403746179862327\n",
"gyroscope_y_hand , 0.0025097802088887384\n",
"gyroscope_x_chest , 0.0029898374334842324\n",
"gyroscope_y_chest , 0.0034150962097039238\n",
"magnetometer_z_hand , 0.003869476254612703\n",
"temp_ankle , 0.004529408747298203\n",
"acceleration_6_y_ankle , 0.006439416466931228\n",
"acceleration_16_y_ankle , 0.006578886906287925\n",
"magnetometer_y_ankle , 0.007755468044512387\n",
"magnetometer_y_hand , 0.008436874146099699\n",
"gyroscope_z_hand , 0.00889068295023216\n",
"acceleration_16_z_hand , 0.009767340493272414\n",
"gyroscope_z_chest , 0.010422927880039217\n",
"temp_chest , 0.011298379110955319\n",
"acceleration_6_z_hand , 0.011953481433716266\n",
"magnetometer_x_chest , 0.012790397877654609\n",
"magnetometer_y_chest , 0.015686044856015324\n",
"acceleration_16_x_hand , 0.021516610095884795\n",
"magnetometer_x_ankle , 0.022681269695119605\n",
"temp_hand , 0.02392711201465511\n",
"acceleration_6_x_hand , 0.02580065752957123\n",
"magnetometer_z_ankle , 0.027488030896204812\n",
"acceleration_6_z_chest , 0.03199341773861375\n",
"acceleration_16_z_chest , 0.03231549779754809\n",
"gyroscope_y_ankle , 0.03591957265048234\n",
"acceleration_16_y_chest , 0.03796888825062477\n",
"acceleration_6_y_chest , 0.038467231682065194\n",
"magnetometer_x_hand , 0.05876695927124097\n",
"gyroscope_x_ankle , 0.06767148823051926\n",
"acceleration_16_x_ankle , 0.07049980505959738\n",
"acceleration_6_x_ankle , 0.07939620457238372\n",
"gyroscope_z_ankle , 0.1405859856342449\n",
"magnetometer_z_chest , 0.14358194388079848\n"
]
}
],
"source": [
"for x in sort_index:\n",
" print(names[x], ', ', FI_dic1[9][x])"
]
},
{
"cell_type": "code",
"execution_count": 83,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"39"
]
},
"execution_count": 83,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(FI_dic1[9])"
]
},
{
"cell_type": "code",
"execution_count": 84,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"39"
]
},
"execution_count": 84,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(names)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}