128 KiB
128 KiB
Predict municipality¶
In [1]:
import pandas import MySQLdb import numpy import json import re db = MySQLdb.connect(user='root', passwd='Nmmxhjgt1@', db='stimmenfryslan', charset='utf8') from matplotlib import pyplot import folium from IPython.display import display from shapely.geometry import Polygon, MultiPolygon, shape, Point from jupyter_progressbar import ProgressBar from collections import defaultdict from ipy_table import make_table from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.linear_model import LogisticRegression from sklearn.pipeline import Pipeline from sklearn.cross_validation import train_test_split from sklearn.svm import SVC from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier from scipy import stats from sklearn.model_selection import RandomizedSearchCV from sklearn.metrics import classification_report from confusion_matrix import plot_confusion_matrix from sklearn.metrics import confusion_matrix %matplotlib notebook from matplotlib import pyplot import autosklearn.classification from tpot import TPOTClassifier
/home/herbert/.virtualenvs/stimmenfryslan/lib/python3.5/site-packages/sklearn/cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20. "This module will be removed in 0.20.", DeprecationWarning)
In [2]:
# Borders of Frysian municipalities with open('Friesland_AL8.GeoJson') as f: gemeentes = json.load(f)
In [3]:
shapes = [shape(feature['geometry']) for feature in gemeentes['features']] gemeente_names = [feature['properties']['name'] for feature in gemeentes['features']] def get_gemeente(point): for i, shape in enumerate(shapes): if shape.contains(point): return i return -1
In [4]:
# Answers to how participants state a word should be pronounces. answers = pandas.read_sql(''' SELECT prediction_quiz_id, user_lat, user_lng, question_text, answer_text FROM core_surveyresult as survey INNER JOIN core_predictionquizresult as result ON survey.id = result.survey_result_id INNER JOIN core_predictionquizresultquestionanswer as answer ON result.id = answer.prediction_quiz_id ''', db) # Takes approximately 2 minutes gemeente_map = { (lng, lat): get_gemeente(Point(lng, lat)) for lng, lat in set(zip(answers['user_lng'], answers['user_lat'])) } answers['gemeente'] = [ gemeente_map[(lng, lat)] for lat, lng in zip(answers['user_lat'], answers['user_lng']) ] answers['pronunciation'] = [ s[s.find('(')+1:-1] for s in answers['answer_text'] ] answers['word'] = [ re.sub(r'\(.*\)', '', s).replace('"', '').strip() for s in answers['question_text'] ] answers['input'] = [ pronunciation + '_' + word for pronunciation, word in zip(answers['pronunciation'], answers['word']) ]
In [5]:
dataset = answers[['prediction_quiz_id', 'gemeente', 'input']].groupby( ['prediction_quiz_id', 'gemeente'] ).aggregate('+'.join) dataset.reset_index(inplace = True) dataset = dataset[dataset['gemeente'] >= 0]
In [6]:
characters = list({x for x in dataset['input'] for x in x})
In [7]:
X_train, X_test, y_train, y_test = train_test_split(dataset['input'], dataset['gemeente'])
In [9]:
tfidf = TfidfVectorizer(analyzer='char') svc = SVC() model = Pipeline(steps=[('tfidf', tfidf), ('svm', svc)]) parameters = { 'tfidf__ngram_range': [(1,5), (1,2), (1,6), (1,10), (1,3)], 'tfidf__max_features': [20], 'svm__C': stats.uniform(0, 100.), 'svm__kernel': ['linear', 'rbf'], 'svm__degree': stats.randint(0, 5), 'svm__gamma': stats.uniform(0, 10.), 'svm__coef0': stats.uniform(0, 10.), 'svm__shrinking': [True], 'svm__probability': [False], 'svm__cache_size': [2000], 'svm__class_weight': [None], 'svm__verbose': [False], 'svm__max_iter': [-1], 'svm__random_state': [None], } # run randomized search n_iter_search = 20 random_search = RandomizedSearchCV(model, param_distributions=parameters, n_iter=n_iter_search, n_jobs=8) random_search.fit(X_train, y_train)
/home/herbert/.virtualenvs/stimmenfryslan/lib/python3.5/site-packages/sklearn/model_selection/_split.py:605: Warning: The least populated class in y has only 1 members, which is too few. The minimum number of members in any class cannot be less than n_splits=3. % (min_groups, self.n_splits)), Warning)
Out[9]:
RandomizedSearchCV(cv=None, error_score='raise', estimator=Pipeline(memory=None, steps=[('tfidf', TfidfVectorizer(analyzer='char', binary=False, decode_error='strict', dtype=<class 'numpy.int64'>, encoding='utf-8', input='content', lowercase=True, max_df=1.0, max_features=None, min_df=1, ngram_range=(1, 1), norm='l2', preprocessor=None, smooth_idf=True, ..., max_iter=-1, probability=False, random_state=None, shrinking=True, tol=0.001, verbose=False))]), fit_params=None, iid=True, n_iter=20, n_jobs=8, param_distributions={'svm__cache_size': [2000], 'svm__coef0': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7fef8dfbea90>, 'svm__kernel': ['linear', 'rbf'], 'tfidf__max_features': [20], 'svm__class_weight': [None], 'tfidf__ngram_range': [(1, 5), (1, 2), (1, 6), (1, 10), (1, 3)], 'svm__shr...t 0x7fef8dfbe6d8>, 'svm__C': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7fef8dfd5240>}, pre_dispatch='2*n_jobs', random_state=None, refit=True, return_train_score='warn', scoring=None, verbose=0)
In [12]:
y_pred = random_search.predict(X_test) print(classification_report(y_test, y_pred))
precision recall f1-score support 0 0.38 0.53 0.44 91 1 0.50 0.50 0.50 2 2 0.35 0.22 0.27 54 3 0.32 0.18 0.23 51 4 0.38 0.59 0.46 46 5 0.23 0.14 0.17 22 6 0.00 0.00 0.00 2 7 0.33 0.19 0.24 31 8 1.00 0.03 0.06 35 9 0.21 0.29 0.25 68 10 0.00 0.00 0.00 4 11 0.28 0.14 0.18 37 12 0.00 0.00 0.00 4 13 0.23 0.26 0.24 35 14 0.42 0.72 0.53 134 16 0.36 0.25 0.30 52 18 0.48 0.19 0.27 73 19 0.00 0.00 0.00 1 avg / total 0.38 0.36 0.32 742
/home/herbert/.virtualenvs/stimmenfryslan/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. 'precision', 'predicted', average, warn_for)
In [13]:
(y_pred == y_test).mean()
Out[13]:
0.3557951482479784
In [16]:
existing_gemeente_names = [ gemeente_names[i] if i >= 0 else 'Onbekend' for i in sorted(set(y_test)) # dataset['gemeente'])) ]
In [94]:
y_pred = model.predict(X_test)
In [95]:
(y_pred == y_test).mean()
Out[95]:
0.37870619946091644
In [17]:
pyplot.rcParams['figure.figsize'] = (9.8, 10) plot_confusion_matrix(confusion_matrix(y_test, y_pred), classes=existing_gemeente_names) pyplot.tight_layout()
Confusion matrix, without normalization
In [86]:
print(classification_report(y_test, y_pred))
precision recall f1-score support 0 0.36 0.37 0.36 84 1 0.00 0.00 0.00 1 2 0.24 0.18 0.21 45 3 0.51 0.55 0.53 51 4 0.43 0.33 0.38 57 5 0.19 0.15 0.16 34 6 0.00 0.00 0.00 2 7 0.21 0.22 0.22 32 8 0.05 0.06 0.05 33 9 0.23 0.24 0.23 72 10 0.00 0.00 0.00 3 11 0.20 0.15 0.17 40 12 0.00 0.00 0.00 3 13 0.24 0.38 0.29 40 14 0.53 0.48 0.50 124 16 0.20 0.19 0.20 47 18 0.41 0.44 0.43 72 19 0.00 0.00 0.00 2 avg / total 0.33 0.32 0.32 742
In [19]:
import numpy from keras.datasets import imdb from keras.models import Sequential from keras.layers import Dense from keras.layers import LSTM from keras.layers.embeddings import Embedding from keras.preprocessing import sequence from keras.preprocessing import text from keras.optimizers import Adam numpy.random.seed(7)
Using TensorFlow backend.
In [20]:
char_to_num = {c: i for i, c in enumerate(characters)}
In [40]:
dataset['input'][0]
Out[40]:
'gɔ:n_gegaan+jun_avond+hø:l_heel+dɑɪ_dag+bɛi_bij+spɾɵts_sprak+eɪx_oog+jɛɾms_armen+tsi:s_kaas+dwɑɾkə_deurtje+sɪəɾə_koken+bwɑst_borst+fisk_vis+snɵən_zaterdag+tɾɑɪn_trein+ɡɪəl_geel+tosk_tand+sɛt_gezet+blɛ:t_blad'
In [34]:
[c.count('_') for c in dataset['input']]
Out[34]:
[19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, ...]
In [21]:
X = sequence.pad_sequences([[char_to_num[c] for c in c] for c in dataset['input']], value=len(char_to_num)) y = numpy.array(dataset['gemeente']) X_train, X_test, y_train, y_test = train_test_split(X, y)
In [22]:
# from sklearn.preprocessing import OneHotEncoder # n_values = max(X.ravel()) + 1 # enc = OneHotEncoder(n_values=n_values) # X_ = enc.fit_transform(X) # X_ = numpy.array(X_.todense()).reshape(X.shape + (n_values, ))
In [23]:
X[:, 20:]
Out[23]:
array([[31, 39, 31, ..., 37, 12, 48], [36, 15, 27, ..., 5, 5, 31], [26, 34, 6, ..., 5, 13, 48], ..., [ 3, 31, 19, ..., 39, 39, 37], [48, 3, 36, ..., 27, 36, 41], [ 0, 12, 41, ..., 43, 24, 26]], dtype=int32)
In [28]:
lstm = Sequential() lstm.add(Embedding(X.max() + 1, 512, input_length=X.shape[1])) lstm.add(LSTM(16)) lstm.add(Dense(max(y) + 1))
In [43]:
optimizer = Adam(lr=0.0001) lstm.compile(loss='sparse_categorical_crossentropy', metrics=['accuracy'], optimizer=optimizer)
In [44]:
lstm.fit(X_train, y_train, batch_size=512, epochs=100, validation_split=0.1)# , validation_data=(X_test, y_test), verbose=2)
Train on 2002 samples, validate on 223 samples Epoch 1/100 2002/2002 [==============================] - 3s 1ms/step - loss: 2.6484 - acc: 0.1518 - val_loss: 2.7888 - val_acc: 0.1659 Epoch 2/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6440 - acc: 0.1548 - val_loss: 2.7866 - val_acc: 0.1749 Epoch 3/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6419 - acc: 0.1608 - val_loss: 2.7870 - val_acc: 0.1749 Epoch 4/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6396 - acc: 0.1603 - val_loss: 2.7872 - val_acc: 0.1749 Epoch 5/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6373 - acc: 0.1603 - val_loss: 2.7858 - val_acc: 0.1749 Epoch 6/100 2002/2002 [==============================] - 2s 789us/step - loss: 2.6356 - acc: 0.1598 - val_loss: 2.7876 - val_acc: 0.1749 Epoch 7/100 2002/2002 [==============================] - 2s 792us/step - loss: 2.6343 - acc: 0.1588 - val_loss: 2.7876 - val_acc: 0.1749 Epoch 8/100 2002/2002 [==============================] - 2s 794us/step - loss: 2.6325 - acc: 0.1573 - val_loss: 2.7874 - val_acc: 0.1794 Epoch 9/100 2002/2002 [==============================] - 2s 794us/step - loss: 2.6309 - acc: 0.1588 - val_loss: 2.7881 - val_acc: 0.1794 Epoch 10/100 2002/2002 [==============================] - 2s 798us/step - loss: 2.6293 - acc: 0.1638 - val_loss: 2.7892 - val_acc: 0.1794 Epoch 11/100 2002/2002 [==============================] - 2s 864us/step - loss: 2.6283 - acc: 0.1653 - val_loss: 2.7885 - val_acc: 0.1794 Epoch 12/100 2002/2002 [==============================] - 2s 796us/step - loss: 2.6267 - acc: 0.1643 - val_loss: 2.7879 - val_acc: 0.1794 Epoch 13/100 2002/2002 [==============================] - 2s 791us/step - loss: 2.6255 - acc: 0.1618 - val_loss: 2.7867 - val_acc: 0.1794 Epoch 14/100 2002/2002 [==============================] - 2s 789us/step - loss: 2.6241 - acc: 0.1648 - val_loss: 2.7870 - val_acc: 0.1794 Epoch 15/100 2002/2002 [==============================] - 2s 847us/step - loss: 2.6228 - acc: 0.1658 - val_loss: 2.7872 - val_acc: 0.1749 Epoch 16/100 2002/2002 [==============================] - 2s 818us/step - loss: 2.6217 - acc: 0.1648 - val_loss: 2.7877 - val_acc: 0.1749 Epoch 17/100 2002/2002 [==============================] - 2s 890us/step - loss: 2.6206 - acc: 0.1643 - val_loss: 2.7872 - val_acc: 0.1794 Epoch 18/100 2002/2002 [==============================] - 2s 824us/step - loss: 2.6197 - acc: 0.1633 - val_loss: 2.7853 - val_acc: 0.1794 Epoch 19/100 2002/2002 [==============================] - 2s 826us/step - loss: 2.6183 - acc: 0.1643 - val_loss: 2.7860 - val_acc: 0.1839 Epoch 20/100 2002/2002 [==============================] - 2s 826us/step - loss: 2.6169 - acc: 0.1663 - val_loss: 2.7876 - val_acc: 0.1839 Epoch 21/100 2002/2002 [==============================] - 2s 830us/step - loss: 2.6160 - acc: 0.1683 - val_loss: 2.7878 - val_acc: 0.1839 Epoch 22/100 2002/2002 [==============================] - 2s 854us/step - loss: 2.6148 - acc: 0.1658 - val_loss: 2.7868 - val_acc: 0.1839 Epoch 23/100 2002/2002 [==============================] - 2s 878us/step - loss: 2.6140 - acc: 0.1633 - val_loss: 2.7866 - val_acc: 0.1794 Epoch 24/100 2002/2002 [==============================] - 2s 915us/step - loss: 2.6128 - acc: 0.1633 - val_loss: 2.7901 - val_acc: 0.1839 Epoch 25/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6115 - acc: 0.1663 - val_loss: 2.7887 - val_acc: 0.1839 Epoch 26/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6103 - acc: 0.1653 - val_loss: 2.7846 - val_acc: 0.1839 Epoch 27/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6091 - acc: 0.1643 - val_loss: 2.7841 - val_acc: 0.1839 Epoch 28/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6079 - acc: 0.1653 - val_loss: 2.7829 - val_acc: 0.1928 Epoch 29/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6067 - acc: 0.1663 - val_loss: 2.7830 - val_acc: 0.1928 Epoch 30/100 2002/2002 [==============================] - 7s 3ms/step - loss: 2.6056 - acc: 0.1673 - val_loss: 2.7854 - val_acc: 0.1928 Epoch 31/100 2002/2002 [==============================] - 5s 2ms/step - loss: 2.6048 - acc: 0.1673 - val_loss: 2.7889 - val_acc: 0.2018 Epoch 32/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6041 - acc: 0.1658 - val_loss: 2.7906 - val_acc: 0.2018 Epoch 33/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6027 - acc: 0.1663 - val_loss: 2.7852 - val_acc: 0.1928 Epoch 34/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6015 - acc: 0.1648 - val_loss: 2.7860 - val_acc: 0.2018 Epoch 35/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.6004 - acc: 0.1638 - val_loss: 2.7853 - val_acc: 0.1883 Epoch 36/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5999 - acc: 0.1633 - val_loss: 2.7845 - val_acc: 0.2018 Epoch 37/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5986 - acc: 0.1633 - val_loss: 2.7846 - val_acc: 0.1973 Epoch 38/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5976 - acc: 0.1648 - val_loss: 2.7831 - val_acc: 0.1973 Epoch 39/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5964 - acc: 0.1623 - val_loss: 2.7925 - val_acc: 0.1883 Epoch 40/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5953 - acc: 0.1628 - val_loss: 2.7848 - val_acc: 0.1794 Epoch 41/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5939 - acc: 0.1628 - val_loss: 2.7826 - val_acc: 0.1704 Epoch 42/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5932 - acc: 0.1638 - val_loss: 2.7877 - val_acc: 0.1704 Epoch 43/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5922 - acc: 0.1638 - val_loss: 2.7897 - val_acc: 0.1704 Epoch 44/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5907 - acc: 0.1658 - val_loss: 2.7886 - val_acc: 0.1839 Epoch 45/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5901 - acc: 0.1673 - val_loss: 2.8038 - val_acc: 0.1794 Epoch 46/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5883 - acc: 0.1663 - val_loss: 2.8267 - val_acc: 0.1839 Epoch 47/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5874 - acc: 0.1668 - val_loss: 2.8270 - val_acc: 0.1928 Epoch 48/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5862 - acc: 0.1658 - val_loss: 2.8259 - val_acc: 0.1883 Epoch 49/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5852 - acc: 0.1633 - val_loss: 2.8346 - val_acc: 0.1928 Epoch 50/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5839 - acc: 0.1633 - val_loss: 2.8440 - val_acc: 0.1794 Epoch 51/100 2002/2002 [==============================] - 7s 4ms/step - loss: 2.5833 - acc: 0.1653 - val_loss: 2.8654 - val_acc: 0.1794 Epoch 52/100 2002/2002 [==============================] - 10s 5ms/step - loss: 2.5806 - acc: 0.1693 - val_loss: 2.8259 - val_acc: 0.1839 Epoch 53/100 2002/2002 [==============================] - 5s 3ms/step - loss: 2.5820 - acc: 0.1678 - val_loss: 2.8230 - val_acc: 0.1839 Epoch 54/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5801 - acc: 0.1623 - val_loss: 2.7956 - val_acc: 0.1839 Epoch 55/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5795 - acc: 0.1628 - val_loss: 2.8682 - val_acc: 0.1794 Epoch 56/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5792 - acc: 0.1648 - val_loss: 2.8311 - val_acc: 0.1794 Epoch 57/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5820 - acc: 0.1623 - val_loss: 2.8232 - val_acc: 0.1749 Epoch 58/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5769 - acc: 0.1633 - val_loss: 2.8238 - val_acc: 0.1794 Epoch 59/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5747 - acc: 0.1653 - val_loss: 2.8675 - val_acc: 0.1794 Epoch 60/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5739 - acc: 0.1618 - val_loss: 2.8241 - val_acc: 0.1794 Epoch 61/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5730 - acc: 0.1623 - val_loss: 2.8704 - val_acc: 0.1839 Epoch 62/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5702 - acc: 0.1613 - val_loss: 2.8851 - val_acc: 0.1794 Epoch 63/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5691 - acc: 0.1638 - val_loss: 2.8291 - val_acc: 0.1794 Epoch 64/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5704 - acc: 0.1648 - val_loss: 2.8298 - val_acc: 0.1883 Epoch 65/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5678 - acc: 0.1618 - val_loss: 2.9033 - val_acc: 0.1749 Epoch 66/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5703 - acc: 0.1648 - val_loss: 2.8294 - val_acc: 0.1794 Epoch 67/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5781 - acc: 0.1623 - val_loss: 2.8335 - val_acc: 0.1749 Epoch 68/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5768 - acc: 0.1543 - val_loss: 2.8851 - val_acc: 0.1839 Epoch 69/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5769 - acc: 0.1623 - val_loss: 2.8766 - val_acc: 0.1570 Epoch 70/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5727 - acc: 0.1633 - val_loss: 2.8750 - val_acc: 0.1749 Epoch 71/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5679 - acc: 0.1693 - val_loss: 2.8760 - val_acc: 0.1883 Epoch 72/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5668 - acc: 0.1713 - val_loss: 2.8747 - val_acc: 0.1794 Epoch 73/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5633 - acc: 0.1713 - val_loss: 2.8755 - val_acc: 0.1614 Epoch 74/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5619 - acc: 0.1713 - val_loss: 2.9167 - val_acc: 0.1749 Epoch 75/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5693 - acc: 0.1718 - val_loss: 2.9258 - val_acc: 0.1839 Epoch 76/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5667 - acc: 0.1733 - val_loss: 2.9193 - val_acc: 0.1839 Epoch 77/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5670 - acc: 0.1788 - val_loss: 2.9670 - val_acc: 0.1794 Epoch 78/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5653 - acc: 0.1723 - val_loss: 2.9759 - val_acc: 0.1794 Epoch 79/100 2002/2002 [==============================] - 3s 1ms/step - loss: 2.5642 - acc: 0.1763 - val_loss: 3.0098 - val_acc: 0.1749 Epoch 80/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5623 - acc: 0.1758 - val_loss: 3.0134 - val_acc: 0.1704 Epoch 81/100 2002/2002 [==============================] - 8s 4ms/step - loss: 2.5618 - acc: 0.1728 - val_loss: 3.0448 - val_acc: 0.1704 Epoch 82/100 2002/2002 [==============================] - 9s 4ms/step - loss: 2.5648 - acc: 0.1758 - val_loss: 3.0002 - val_acc: 0.1659 Epoch 83/100 2002/2002 [==============================] - 9s 5ms/step - loss: 2.5589 - acc: 0.1733 - val_loss: 2.9652 - val_acc: 0.1749 Epoch 84/100 2002/2002 [==============================] - 3s 1ms/step - loss: 2.5584 - acc: 0.1728 - val_loss: 3.0391 - val_acc: 0.1749 Epoch 85/100 2002/2002 [==============================] - 2s 829us/step - loss: 2.5575 - acc: 0.1678 - val_loss: 2.9984 - val_acc: 0.1480 Epoch 86/100 2002/2002 [==============================] - 2s 801us/step - loss: 2.5658 - acc: 0.1698 - val_loss: 3.0056 - val_acc: 0.1614 Epoch 87/100 2002/2002 [==============================] - 2s 800us/step - loss: 2.5643 - acc: 0.1663 - val_loss: 3.0066 - val_acc: 0.1525 Epoch 88/100 2002/2002 [==============================] - 2s 813us/step - loss: 2.5621 - acc: 0.1633 - val_loss: 2.9622 - val_acc: 0.1525 Epoch 89/100 2002/2002 [==============================] - 2s 792us/step - loss: 2.5612 - acc: 0.1653 - val_loss: 2.9627 - val_acc: 0.1570 Epoch 90/100 2002/2002 [==============================] - 2s 816us/step - loss: 2.5575 - acc: 0.1683 - val_loss: 2.9586 - val_acc: 0.1749 Epoch 91/100 2002/2002 [==============================] - 2s 796us/step - loss: 2.5545 - acc: 0.1708 - val_loss: 2.9602 - val_acc: 0.1525 Epoch 92/100 2002/2002 [==============================] - 2s 801us/step - loss: 2.5522 - acc: 0.1738 - val_loss: 2.9574 - val_acc: 0.1614 Epoch 93/100 2002/2002 [==============================] - 2s 797us/step - loss: 2.5490 - acc: 0.1733 - val_loss: 2.9680 - val_acc: 0.1794 Epoch 94/100 2002/2002 [==============================] - 2s 810us/step - loss: 2.5481 - acc: 0.1748 - val_loss: 3.0203 - val_acc: 0.1704 Epoch 95/100 2002/2002 [==============================] - 2s 928us/step - loss: 2.5457 - acc: 0.1723 - val_loss: 3.0504 - val_acc: 0.1570 Epoch 96/100 2002/2002 [==============================] - 2s 949us/step - loss: 2.5475 - acc: 0.1738 - val_loss: 3.0056 - val_acc: 0.1480 Epoch 97/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5450 - acc: 0.1728 - val_loss: 3.0211 - val_acc: 0.1614 Epoch 98/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5511 - acc: 0.1683 - val_loss: 3.0546 - val_acc: 0.1614 Epoch 99/100 2002/2002 [==============================] - 2s 1ms/step - loss: 2.5457 - acc: 0.1703 - val_loss: 3.0207 - val_acc: 0.1614 Epoch 100/100 2002/2002 [==============================] - 2s 908us/step - loss: 2.5444 - acc: 0.1718 - val_loss: 3.0648 - val_acc: 0.1614
Out[44]:
<keras.callbacks.History at 0x7feedf76a860>