Skip to content

Instantly share code, notes, and snippets.

@senkumartup
Created May 14, 2018 09:07
Show Gist options
  • Select an option

  • Save senkumartup/37a878fde14d2df351c2094df7dfcefd to your computer and use it in GitHub Desktop.

Select an option

Save senkumartup/37a878fde14d2df351c2094df7dfcefd to your computer and use it in GitHub Desktop.
### DENSENET Implementation ON CIFAR-10
#### l = 12, k = 12, Compression = 1.0
#### TEST ACCURACY: 85.03%
```python
# https://keras.io/
#!pip install -q keras
import keras
```
/opt/anaconda2/lib/python2.7/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
from ._conv import register_converters as _register_converters
Using TensorFlow backend.
```python
import keras
from keras.datasets import cifar10
from keras.models import Model, Sequential
from keras.layers import Dense, Dropout, Flatten, Input, AveragePooling2D, merge, Activation
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.layers import Concatenate
from keras.optimizers import Adam
```
```python
# this part will prevent tensorflow to allocate all the avaliable GPU Memory
# backend
import tensorflow as tf
from keras import backend as k
# Don't pre-allocate memory; allocate as-needed
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
# Create a session with the above options specified.
k.tensorflow_backend.set_session(tf.Session(config=config))
```
```python
# Hyperparameters
batch_size = 128
num_classes = 10
epochs = 50
l = 40
num_filter = 12
compression = 1.0
dropout_rate = 0.25
```
```python
# Load CIFAR10 Data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
img_height, img_width, channel = x_train.shape[1],x_train.shape[2],x_train.shape[3]
# convert to one hot encoing
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
```
```python
# Dense Block
def add_denseblock(input, num_filter = 12, dropout_rate = 0.2):
global compression
temp = input
for _ in range(l):
BatchNorm = BatchNormalization()(temp)
relu = Activation('relu')(BatchNorm)
Conv2D_3_3 = Conv2D(int(num_filter*compression), (3,3), use_bias=False ,padding='same')(relu)
if dropout_rate>0:
Conv2D_3_3 = Dropout(dropout_rate)(Conv2D_3_3)
concat = Concatenate(axis=-1)([temp,Conv2D_3_3])
temp = concat
return temp
```
```python
def add_transition(input, num_filter = 12, dropout_rate = 0.2):
global compression
BatchNorm = BatchNormalization()(input)
relu = Activation('relu')(BatchNorm)
Conv2D_BottleNeck = Conv2D(int(num_filter*compression), (1,1), use_bias=False ,padding='same')(relu)
if dropout_rate>0:
Conv2D_BottleNeck = Dropout(dropout_rate)(Conv2D_BottleNeck)
avg = AveragePooling2D(pool_size=(2,2))(Conv2D_BottleNeck)
return avg
```
```python
def output_layer(input):
global compression
BatchNorm = BatchNormalization()(input)
relu = Activation('relu')(BatchNorm)
AvgPooling = AveragePooling2D(pool_size=(2,2))(relu)
flat = Flatten()(AvgPooling)
output = Dense(num_classes, activation='softmax')(flat)
return output
```
```python
num_filter = 12
dropout_rate = 0.2
l = 12
input = Input(shape=(img_height, img_width, channel,))
First_Conv2D = Conv2D(num_filter, (3,3), use_bias=False ,padding='same')(input)
First_Block = add_denseblock(First_Conv2D, num_filter, dropout_rate)
First_Transition = add_transition(First_Block, num_filter, dropout_rate)
Second_Block = add_denseblock(First_Transition, num_filter, dropout_rate)
Second_Transition = add_transition(Second_Block, num_filter, dropout_rate)
Third_Block = add_denseblock(Second_Transition, num_filter, dropout_rate)
Third_Transition = add_transition(Third_Block, num_filter, dropout_rate)
Last_Block = add_denseblock(Third_Transition, num_filter, dropout_rate)
output = output_layer(Last_Block)
```
```python
model = Model(inputs=[input], outputs=[output])
model.summary()
```
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) (None, 32, 32, 3) 0
__________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, 32, 32, 12) 324 input_1[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 32, 32, 12) 48 conv2d_1[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 32, 32, 12) 0 batch_normalization_1[0][0]
__________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, 32, 32, 12) 1296 activation_1[0][0]
__________________________________________________________________________________________________
dropout_1 (Dropout) (None, 32, 32, 12) 0 conv2d_2[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 32, 32, 24) 0 conv2d_1[0][0]
dropout_1[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 32, 32, 24) 96 concatenate_1[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 32, 32, 24) 0 batch_normalization_2[0][0]
__________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, 32, 32, 12) 2592 activation_2[0][0]
__________________________________________________________________________________________________
dropout_2 (Dropout) (None, 32, 32, 12) 0 conv2d_3[0][0]
__________________________________________________________________________________________________
concatenate_2 (Concatenate) (None, 32, 32, 36) 0 concatenate_1[0][0]
dropout_2[0][0]
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 32, 32, 36) 144 concatenate_2[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 32, 32, 36) 0 batch_normalization_3[0][0]
__________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, 32, 32, 12) 3888 activation_3[0][0]
__________________________________________________________________________________________________
dropout_3 (Dropout) (None, 32, 32, 12) 0 conv2d_4[0][0]
__________________________________________________________________________________________________
concatenate_3 (Concatenate) (None, 32, 32, 48) 0 concatenate_2[0][0]
dropout_3[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 32, 32, 48) 192 concatenate_3[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 32, 32, 48) 0 batch_normalization_4[0][0]
__________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, 32, 32, 12) 5184 activation_4[0][0]
__________________________________________________________________________________________________
dropout_4 (Dropout) (None, 32, 32, 12) 0 conv2d_5[0][0]
__________________________________________________________________________________________________
concatenate_4 (Concatenate) (None, 32, 32, 60) 0 concatenate_3[0][0]
dropout_4[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 32, 32, 60) 240 concatenate_4[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 32, 32, 60) 0 batch_normalization_5[0][0]
__________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, 32, 32, 12) 6480 activation_5[0][0]
__________________________________________________________________________________________________
dropout_5 (Dropout) (None, 32, 32, 12) 0 conv2d_6[0][0]
__________________________________________________________________________________________________
concatenate_5 (Concatenate) (None, 32, 32, 72) 0 concatenate_4[0][0]
dropout_5[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 32, 32, 72) 288 concatenate_5[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 32, 32, 72) 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, 32, 32, 12) 7776 activation_6[0][0]
__________________________________________________________________________________________________
dropout_6 (Dropout) (None, 32, 32, 12) 0 conv2d_7[0][0]
__________________________________________________________________________________________________
concatenate_6 (Concatenate) (None, 32, 32, 84) 0 concatenate_5[0][0]
dropout_6[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 32, 32, 84) 336 concatenate_6[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 32, 32, 84) 0 batch_normalization_7[0][0]
__________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, 32, 32, 12) 9072 activation_7[0][0]
__________________________________________________________________________________________________
dropout_7 (Dropout) (None, 32, 32, 12) 0 conv2d_8[0][0]
__________________________________________________________________________________________________
concatenate_7 (Concatenate) (None, 32, 32, 96) 0 concatenate_6[0][0]
dropout_7[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 32, 32, 96) 384 concatenate_7[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 32, 32, 96) 0 batch_normalization_8[0][0]
__________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, 32, 32, 12) 10368 activation_8[0][0]
__________________________________________________________________________________________________
dropout_8 (Dropout) (None, 32, 32, 12) 0 conv2d_9[0][0]
__________________________________________________________________________________________________
concatenate_8 (Concatenate) (None, 32, 32, 108) 0 concatenate_7[0][0]
dropout_8[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 32, 32, 108) 432 concatenate_8[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 32, 32, 108) 0 batch_normalization_9[0][0]
__________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, 32, 32, 12) 11664 activation_9[0][0]
__________________________________________________________________________________________________
dropout_9 (Dropout) (None, 32, 32, 12) 0 conv2d_10[0][0]
__________________________________________________________________________________________________
concatenate_9 (Concatenate) (None, 32, 32, 120) 0 concatenate_8[0][0]
dropout_9[0][0]
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 32, 32, 120) 480 concatenate_9[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 32, 32, 120) 0 batch_normalization_10[0][0]
__________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, 32, 32, 12) 12960 activation_10[0][0]
__________________________________________________________________________________________________
dropout_10 (Dropout) (None, 32, 32, 12) 0 conv2d_11[0][0]
__________________________________________________________________________________________________
concatenate_10 (Concatenate) (None, 32, 32, 132) 0 concatenate_9[0][0]
dropout_10[0][0]
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 32, 32, 132) 528 concatenate_10[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 32, 32, 132) 0 batch_normalization_11[0][0]
__________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, 32, 32, 12) 14256 activation_11[0][0]
__________________________________________________________________________________________________
dropout_11 (Dropout) (None, 32, 32, 12) 0 conv2d_12[0][0]
__________________________________________________________________________________________________
concatenate_11 (Concatenate) (None, 32, 32, 144) 0 concatenate_10[0][0]
dropout_11[0][0]
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 32, 32, 144) 576 concatenate_11[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 32, 32, 144) 0 batch_normalization_12[0][0]
__________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, 32, 32, 12) 15552 activation_12[0][0]
__________________________________________________________________________________________________
dropout_12 (Dropout) (None, 32, 32, 12) 0 conv2d_13[0][0]
__________________________________________________________________________________________________
concatenate_12 (Concatenate) (None, 32, 32, 156) 0 concatenate_11[0][0]
dropout_12[0][0]
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 32, 32, 156) 624 concatenate_12[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 32, 32, 156) 0 batch_normalization_13[0][0]
__________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, 32, 32, 12) 1872 activation_13[0][0]
__________________________________________________________________________________________________
dropout_13 (Dropout) (None, 32, 32, 12) 0 conv2d_14[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 16, 16, 12) 0 dropout_13[0][0]
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 16, 16, 12) 48 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 16, 16, 12) 0 batch_normalization_14[0][0]
__________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, 16, 16, 12) 1296 activation_14[0][0]
__________________________________________________________________________________________________
dropout_14 (Dropout) (None, 16, 16, 12) 0 conv2d_15[0][0]
__________________________________________________________________________________________________
concatenate_13 (Concatenate) (None, 16, 16, 24) 0 average_pooling2d_1[0][0]
dropout_14[0][0]
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 16, 16, 24) 96 concatenate_13[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 16, 16, 24) 0 batch_normalization_15[0][0]
__________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, 16, 16, 12) 2592 activation_15[0][0]
__________________________________________________________________________________________________
dropout_15 (Dropout) (None, 16, 16, 12) 0 conv2d_16[0][0]
__________________________________________________________________________________________________
concatenate_14 (Concatenate) (None, 16, 16, 36) 0 concatenate_13[0][0]
dropout_15[0][0]
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 16, 16, 36) 144 concatenate_14[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 16, 16, 36) 0 batch_normalization_16[0][0]
__________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, 16, 16, 12) 3888 activation_16[0][0]
__________________________________________________________________________________________________
dropout_16 (Dropout) (None, 16, 16, 12) 0 conv2d_17[0][0]
__________________________________________________________________________________________________
concatenate_15 (Concatenate) (None, 16, 16, 48) 0 concatenate_14[0][0]
dropout_16[0][0]
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 16, 16, 48) 192 concatenate_15[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 16, 16, 48) 0 batch_normalization_17[0][0]
__________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, 16, 16, 12) 5184 activation_17[0][0]
__________________________________________________________________________________________________
dropout_17 (Dropout) (None, 16, 16, 12) 0 conv2d_18[0][0]
__________________________________________________________________________________________________
concatenate_16 (Concatenate) (None, 16, 16, 60) 0 concatenate_15[0][0]
dropout_17[0][0]
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 16, 16, 60) 240 concatenate_16[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 16, 16, 60) 0 batch_normalization_18[0][0]
__________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, 16, 16, 12) 6480 activation_18[0][0]
__________________________________________________________________________________________________
dropout_18 (Dropout) (None, 16, 16, 12) 0 conv2d_19[0][0]
__________________________________________________________________________________________________
concatenate_17 (Concatenate) (None, 16, 16, 72) 0 concatenate_16[0][0]
dropout_18[0][0]
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 16, 16, 72) 288 concatenate_17[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 16, 16, 72) 0 batch_normalization_19[0][0]
__________________________________________________________________________________________________
conv2d_20 (Conv2D) (None, 16, 16, 12) 7776 activation_19[0][0]
__________________________________________________________________________________________________
dropout_19 (Dropout) (None, 16, 16, 12) 0 conv2d_20[0][0]
__________________________________________________________________________________________________
concatenate_18 (Concatenate) (None, 16, 16, 84) 0 concatenate_17[0][0]
dropout_19[0][0]
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 16, 16, 84) 336 concatenate_18[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 16, 16, 84) 0 batch_normalization_20[0][0]
__________________________________________________________________________________________________
conv2d_21 (Conv2D) (None, 16, 16, 12) 9072 activation_20[0][0]
__________________________________________________________________________________________________
dropout_20 (Dropout) (None, 16, 16, 12) 0 conv2d_21[0][0]
__________________________________________________________________________________________________
concatenate_19 (Concatenate) (None, 16, 16, 96) 0 concatenate_18[0][0]
dropout_20[0][0]
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 16, 16, 96) 384 concatenate_19[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 16, 16, 96) 0 batch_normalization_21[0][0]
__________________________________________________________________________________________________
conv2d_22 (Conv2D) (None, 16, 16, 12) 10368 activation_21[0][0]
__________________________________________________________________________________________________
dropout_21 (Dropout) (None, 16, 16, 12) 0 conv2d_22[0][0]
__________________________________________________________________________________________________
concatenate_20 (Concatenate) (None, 16, 16, 108) 0 concatenate_19[0][0]
dropout_21[0][0]
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 16, 16, 108) 432 concatenate_20[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 16, 16, 108) 0 batch_normalization_22[0][0]
__________________________________________________________________________________________________
conv2d_23 (Conv2D) (None, 16, 16, 12) 11664 activation_22[0][0]
__________________________________________________________________________________________________
dropout_22 (Dropout) (None, 16, 16, 12) 0 conv2d_23[0][0]
__________________________________________________________________________________________________
concatenate_21 (Concatenate) (None, 16, 16, 120) 0 concatenate_20[0][0]
dropout_22[0][0]
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 16, 16, 120) 480 concatenate_21[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 16, 16, 120) 0 batch_normalization_23[0][0]
__________________________________________________________________________________________________
conv2d_24 (Conv2D) (None, 16, 16, 12) 12960 activation_23[0][0]
__________________________________________________________________________________________________
dropout_23 (Dropout) (None, 16, 16, 12) 0 conv2d_24[0][0]
__________________________________________________________________________________________________
concatenate_22 (Concatenate) (None, 16, 16, 132) 0 concatenate_21[0][0]
dropout_23[0][0]
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 16, 16, 132) 528 concatenate_22[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 16, 16, 132) 0 batch_normalization_24[0][0]
__________________________________________________________________________________________________
conv2d_25 (Conv2D) (None, 16, 16, 12) 14256 activation_24[0][0]
__________________________________________________________________________________________________
dropout_24 (Dropout) (None, 16, 16, 12) 0 conv2d_25[0][0]
__________________________________________________________________________________________________
concatenate_23 (Concatenate) (None, 16, 16, 144) 0 concatenate_22[0][0]
dropout_24[0][0]
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 16, 16, 144) 576 concatenate_23[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 16, 16, 144) 0 batch_normalization_25[0][0]
__________________________________________________________________________________________________
conv2d_26 (Conv2D) (None, 16, 16, 12) 15552 activation_25[0][0]
__________________________________________________________________________________________________
dropout_25 (Dropout) (None, 16, 16, 12) 0 conv2d_26[0][0]
__________________________________________________________________________________________________
concatenate_24 (Concatenate) (None, 16, 16, 156) 0 concatenate_23[0][0]
dropout_25[0][0]
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 16, 16, 156) 624 concatenate_24[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 16, 16, 156) 0 batch_normalization_26[0][0]
__________________________________________________________________________________________________
conv2d_27 (Conv2D) (None, 16, 16, 12) 1872 activation_26[0][0]
__________________________________________________________________________________________________
dropout_26 (Dropout) (None, 16, 16, 12) 0 conv2d_27[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 8, 8, 12) 0 dropout_26[0][0]
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 8, 8, 12) 48 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 8, 8, 12) 0 batch_normalization_27[0][0]
__________________________________________________________________________________________________
conv2d_28 (Conv2D) (None, 8, 8, 12) 1296 activation_27[0][0]
__________________________________________________________________________________________________
dropout_27 (Dropout) (None, 8, 8, 12) 0 conv2d_28[0][0]
__________________________________________________________________________________________________
concatenate_25 (Concatenate) (None, 8, 8, 24) 0 average_pooling2d_2[0][0]
dropout_27[0][0]
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 8, 8, 24) 96 concatenate_25[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 8, 8, 24) 0 batch_normalization_28[0][0]
__________________________________________________________________________________________________
conv2d_29 (Conv2D) (None, 8, 8, 12) 2592 activation_28[0][0]
__________________________________________________________________________________________________
dropout_28 (Dropout) (None, 8, 8, 12) 0 conv2d_29[0][0]
__________________________________________________________________________________________________
concatenate_26 (Concatenate) (None, 8, 8, 36) 0 concatenate_25[0][0]
dropout_28[0][0]
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 8, 8, 36) 144 concatenate_26[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 8, 8, 36) 0 batch_normalization_29[0][0]
__________________________________________________________________________________________________
conv2d_30 (Conv2D) (None, 8, 8, 12) 3888 activation_29[0][0]
__________________________________________________________________________________________________
dropout_29 (Dropout) (None, 8, 8, 12) 0 conv2d_30[0][0]
__________________________________________________________________________________________________
concatenate_27 (Concatenate) (None, 8, 8, 48) 0 concatenate_26[0][0]
dropout_29[0][0]
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 8, 8, 48) 192 concatenate_27[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 8, 8, 48) 0 batch_normalization_30[0][0]
__________________________________________________________________________________________________
conv2d_31 (Conv2D) (None, 8, 8, 12) 5184 activation_30[0][0]
__________________________________________________________________________________________________
dropout_30 (Dropout) (None, 8, 8, 12) 0 conv2d_31[0][0]
__________________________________________________________________________________________________
concatenate_28 (Concatenate) (None, 8, 8, 60) 0 concatenate_27[0][0]
dropout_30[0][0]
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 8, 8, 60) 240 concatenate_28[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 8, 8, 60) 0 batch_normalization_31[0][0]
__________________________________________________________________________________________________
conv2d_32 (Conv2D) (None, 8, 8, 12) 6480 activation_31[0][0]
__________________________________________________________________________________________________
dropout_31 (Dropout) (None, 8, 8, 12) 0 conv2d_32[0][0]
__________________________________________________________________________________________________
concatenate_29 (Concatenate) (None, 8, 8, 72) 0 concatenate_28[0][0]
dropout_31[0][0]
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 8, 8, 72) 288 concatenate_29[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 8, 8, 72) 0 batch_normalization_32[0][0]
__________________________________________________________________________________________________
conv2d_33 (Conv2D) (None, 8, 8, 12) 7776 activation_32[0][0]
__________________________________________________________________________________________________
dropout_32 (Dropout) (None, 8, 8, 12) 0 conv2d_33[0][0]
__________________________________________________________________________________________________
concatenate_30 (Concatenate) (None, 8, 8, 84) 0 concatenate_29[0][0]
dropout_32[0][0]
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 8, 8, 84) 336 concatenate_30[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 8, 8, 84) 0 batch_normalization_33[0][0]
__________________________________________________________________________________________________
conv2d_34 (Conv2D) (None, 8, 8, 12) 9072 activation_33[0][0]
__________________________________________________________________________________________________
dropout_33 (Dropout) (None, 8, 8, 12) 0 conv2d_34[0][0]
__________________________________________________________________________________________________
concatenate_31 (Concatenate) (None, 8, 8, 96) 0 concatenate_30[0][0]
dropout_33[0][0]
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 8, 8, 96) 384 concatenate_31[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 8, 8, 96) 0 batch_normalization_34[0][0]
__________________________________________________________________________________________________
conv2d_35 (Conv2D) (None, 8, 8, 12) 10368 activation_34[0][0]
__________________________________________________________________________________________________
dropout_34 (Dropout) (None, 8, 8, 12) 0 conv2d_35[0][0]
__________________________________________________________________________________________________
concatenate_32 (Concatenate) (None, 8, 8, 108) 0 concatenate_31[0][0]
dropout_34[0][0]
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 8, 8, 108) 432 concatenate_32[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 8, 8, 108) 0 batch_normalization_35[0][0]
__________________________________________________________________________________________________
conv2d_36 (Conv2D) (None, 8, 8, 12) 11664 activation_35[0][0]
__________________________________________________________________________________________________
dropout_35 (Dropout) (None, 8, 8, 12) 0 conv2d_36[0][0]
__________________________________________________________________________________________________
concatenate_33 (Concatenate) (None, 8, 8, 120) 0 concatenate_32[0][0]
dropout_35[0][0]
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 8, 8, 120) 480 concatenate_33[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 8, 8, 120) 0 batch_normalization_36[0][0]
__________________________________________________________________________________________________
conv2d_37 (Conv2D) (None, 8, 8, 12) 12960 activation_36[0][0]
__________________________________________________________________________________________________
dropout_36 (Dropout) (None, 8, 8, 12) 0 conv2d_37[0][0]
__________________________________________________________________________________________________
concatenate_34 (Concatenate) (None, 8, 8, 132) 0 concatenate_33[0][0]
dropout_36[0][0]
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 8, 8, 132) 528 concatenate_34[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 8, 8, 132) 0 batch_normalization_37[0][0]
__________________________________________________________________________________________________
conv2d_38 (Conv2D) (None, 8, 8, 12) 14256 activation_37[0][0]
__________________________________________________________________________________________________
dropout_37 (Dropout) (None, 8, 8, 12) 0 conv2d_38[0][0]
__________________________________________________________________________________________________
concatenate_35 (Concatenate) (None, 8, 8, 144) 0 concatenate_34[0][0]
dropout_37[0][0]
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 8, 8, 144) 576 concatenate_35[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 8, 8, 144) 0 batch_normalization_38[0][0]
__________________________________________________________________________________________________
conv2d_39 (Conv2D) (None, 8, 8, 12) 15552 activation_38[0][0]
__________________________________________________________________________________________________
dropout_38 (Dropout) (None, 8, 8, 12) 0 conv2d_39[0][0]
__________________________________________________________________________________________________
concatenate_36 (Concatenate) (None, 8, 8, 156) 0 concatenate_35[0][0]
dropout_38[0][0]
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 8, 8, 156) 624 concatenate_36[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 8, 8, 156) 0 batch_normalization_39[0][0]
__________________________________________________________________________________________________
conv2d_40 (Conv2D) (None, 8, 8, 12) 1872 activation_39[0][0]
__________________________________________________________________________________________________
dropout_39 (Dropout) (None, 8, 8, 12) 0 conv2d_40[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 4, 4, 12) 0 dropout_39[0][0]
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 4, 4, 12) 48 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 4, 4, 12) 0 batch_normalization_40[0][0]
__________________________________________________________________________________________________
conv2d_41 (Conv2D) (None, 4, 4, 12) 1296 activation_40[0][0]
__________________________________________________________________________________________________
dropout_40 (Dropout) (None, 4, 4, 12) 0 conv2d_41[0][0]
__________________________________________________________________________________________________
concatenate_37 (Concatenate) (None, 4, 4, 24) 0 average_pooling2d_3[0][0]
dropout_40[0][0]
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 4, 4, 24) 96 concatenate_37[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 4, 4, 24) 0 batch_normalization_41[0][0]
__________________________________________________________________________________________________
conv2d_42 (Conv2D) (None, 4, 4, 12) 2592 activation_41[0][0]
__________________________________________________________________________________________________
dropout_41 (Dropout) (None, 4, 4, 12) 0 conv2d_42[0][0]
__________________________________________________________________________________________________
concatenate_38 (Concatenate) (None, 4, 4, 36) 0 concatenate_37[0][0]
dropout_41[0][0]
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 4, 4, 36) 144 concatenate_38[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 4, 4, 36) 0 batch_normalization_42[0][0]
__________________________________________________________________________________________________
conv2d_43 (Conv2D) (None, 4, 4, 12) 3888 activation_42[0][0]
__________________________________________________________________________________________________
dropout_42 (Dropout) (None, 4, 4, 12) 0 conv2d_43[0][0]
__________________________________________________________________________________________________
concatenate_39 (Concatenate) (None, 4, 4, 48) 0 concatenate_38[0][0]
dropout_42[0][0]
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 4, 4, 48) 192 concatenate_39[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 4, 4, 48) 0 batch_normalization_43[0][0]
__________________________________________________________________________________________________
conv2d_44 (Conv2D) (None, 4, 4, 12) 5184 activation_43[0][0]
__________________________________________________________________________________________________
dropout_43 (Dropout) (None, 4, 4, 12) 0 conv2d_44[0][0]
__________________________________________________________________________________________________
concatenate_40 (Concatenate) (None, 4, 4, 60) 0 concatenate_39[0][0]
dropout_43[0][0]
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 4, 4, 60) 240 concatenate_40[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 4, 4, 60) 0 batch_normalization_44[0][0]
__________________________________________________________________________________________________
conv2d_45 (Conv2D) (None, 4, 4, 12) 6480 activation_44[0][0]
__________________________________________________________________________________________________
dropout_44 (Dropout) (None, 4, 4, 12) 0 conv2d_45[0][0]
__________________________________________________________________________________________________
concatenate_41 (Concatenate) (None, 4, 4, 72) 0 concatenate_40[0][0]
dropout_44[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 4, 4, 72) 288 concatenate_41[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 4, 4, 72) 0 batch_normalization_45[0][0]
__________________________________________________________________________________________________
conv2d_46 (Conv2D) (None, 4, 4, 12) 7776 activation_45[0][0]
__________________________________________________________________________________________________
dropout_45 (Dropout) (None, 4, 4, 12) 0 conv2d_46[0][0]
__________________________________________________________________________________________________
concatenate_42 (Concatenate) (None, 4, 4, 84) 0 concatenate_41[0][0]
dropout_45[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 4, 4, 84) 336 concatenate_42[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 4, 4, 84) 0 batch_normalization_46[0][0]
__________________________________________________________________________________________________
conv2d_47 (Conv2D) (None, 4, 4, 12) 9072 activation_46[0][0]
__________________________________________________________________________________________________
dropout_46 (Dropout) (None, 4, 4, 12) 0 conv2d_47[0][0]
__________________________________________________________________________________________________
concatenate_43 (Concatenate) (None, 4, 4, 96) 0 concatenate_42[0][0]
dropout_46[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 4, 4, 96) 384 concatenate_43[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 4, 4, 96) 0 batch_normalization_47[0][0]
__________________________________________________________________________________________________
conv2d_48 (Conv2D) (None, 4, 4, 12) 10368 activation_47[0][0]
__________________________________________________________________________________________________
dropout_47 (Dropout) (None, 4, 4, 12) 0 conv2d_48[0][0]
__________________________________________________________________________________________________
concatenate_44 (Concatenate) (None, 4, 4, 108) 0 concatenate_43[0][0]
dropout_47[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 4, 4, 108) 432 concatenate_44[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 4, 4, 108) 0 batch_normalization_48[0][0]
__________________________________________________________________________________________________
conv2d_49 (Conv2D) (None, 4, 4, 12) 11664 activation_48[0][0]
__________________________________________________________________________________________________
dropout_48 (Dropout) (None, 4, 4, 12) 0 conv2d_49[0][0]
__________________________________________________________________________________________________
concatenate_45 (Concatenate) (None, 4, 4, 120) 0 concatenate_44[0][0]
dropout_48[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 4, 4, 120) 480 concatenate_45[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 4, 4, 120) 0 batch_normalization_49[0][0]
__________________________________________________________________________________________________
conv2d_50 (Conv2D) (None, 4, 4, 12) 12960 activation_49[0][0]
__________________________________________________________________________________________________
dropout_49 (Dropout) (None, 4, 4, 12) 0 conv2d_50[0][0]
__________________________________________________________________________________________________
concatenate_46 (Concatenate) (None, 4, 4, 132) 0 concatenate_45[0][0]
dropout_49[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 4, 4, 132) 528 concatenate_46[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 4, 4, 132) 0 batch_normalization_50[0][0]
__________________________________________________________________________________________________
conv2d_51 (Conv2D) (None, 4, 4, 12) 14256 activation_50[0][0]
__________________________________________________________________________________________________
dropout_50 (Dropout) (None, 4, 4, 12) 0 conv2d_51[0][0]
__________________________________________________________________________________________________
concatenate_47 (Concatenate) (None, 4, 4, 144) 0 concatenate_46[0][0]
dropout_50[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 4, 4, 144) 576 concatenate_47[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, 4, 4, 144) 0 batch_normalization_51[0][0]
__________________________________________________________________________________________________
conv2d_52 (Conv2D) (None, 4, 4, 12) 15552 activation_51[0][0]
__________________________________________________________________________________________________
dropout_51 (Dropout) (None, 4, 4, 12) 0 conv2d_52[0][0]
__________________________________________________________________________________________________
concatenate_48 (Concatenate) (None, 4, 4, 156) 0 concatenate_47[0][0]
dropout_51[0][0]
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 4, 4, 156) 624 concatenate_48[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, 4, 4, 156) 0 batch_normalization_52[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 2, 2, 156) 0 activation_52[0][0]
__________________________________________________________________________________________________
flatten_1 (Flatten) (None, 624) 0 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
dense_1 (Dense) (None, 10) 6250 flatten_1[0][0]
==================================================================================================
Total params: 434,014
Trainable params: 425,278
Non-trainable params: 8,736
__________________________________________________________________________________________________
```python
# determine Loss function and Optimizer
model.compile(loss='categorical_crossentropy',
optimizer=Adam(),
metrics=['accuracy'])
```
```python
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
```
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 115s 2ms/step - loss: 1.5987 - acc: 0.4085 - val_loss: 2.1517 - val_acc: 0.3493
Epoch 2/50
50000/50000 [==============================] - 103s 2ms/step - loss: 1.2189 - acc: 0.5583 - val_loss: 1.2469 - val_acc: 0.5709
Epoch 3/50
50000/50000 [==============================] - 104s 2ms/step - loss: 1.0307 - acc: 0.6304 - val_loss: 1.4701 - val_acc: 0.5562
Epoch 4/50
50000/50000 [==============================] - 105s 2ms/step - loss: 0.9178 - acc: 0.6740 - val_loss: 1.0500 - val_acc: 0.6428
Epoch 5/50
50000/50000 [==============================] - 106s 2ms/step - loss: 0.8330 - acc: 0.7038 - val_loss: 1.4071 - val_acc: 0.6010
Epoch 6/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.7679 - acc: 0.7262 - val_loss: 1.0303 - val_acc: 0.6821
Epoch 7/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.7148 - acc: 0.7478 - val_loss: 0.7924 - val_acc: 0.7425
Epoch 8/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.6712 - acc: 0.7658 - val_loss: 0.9088 - val_acc: 0.7200
Epoch 9/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.6359 - acc: 0.7767 - val_loss: 1.2848 - val_acc: 0.6585
Epoch 10/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.6014 - acc: 0.7902 - val_loss: 0.9815 - val_acc: 0.7150
Epoch 11/50
50000/50000 [==============================] - 108s 2ms/step - loss: 0.5757 - acc: 0.7982 - val_loss: 0.6950 - val_acc: 0.7825
Epoch 12/50
50000/50000 [==============================] - 109s 2ms/step - loss: 0.5491 - acc: 0.8063 - val_loss: 0.9691 - val_acc: 0.7294
Epoch 13/50
50000/50000 [==============================] - 109s 2ms/step - loss: 0.5281 - acc: 0.8145 - val_loss: 0.8880 - val_acc: 0.7390
Epoch 14/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.5087 - acc: 0.8212 - val_loss: 0.8069 - val_acc: 0.7642
Epoch 15/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.4919 - acc: 0.8276 - val_loss: 0.9532 - val_acc: 0.7312
Epoch 16/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.4746 - acc: 0.8343 - val_loss: 0.8637 - val_acc: 0.7397
Epoch 17/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.4587 - acc: 0.8409 - val_loss: 0.9358 - val_acc: 0.7566
Epoch 18/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.4455 - acc: 0.8437 - val_loss: 0.9758 - val_acc: 0.7461
Epoch 19/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.4295 - acc: 0.8498 - val_loss: 0.8895 - val_acc: 0.7506
Epoch 20/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.4214 - acc: 0.8523 - val_loss: 1.0407 - val_acc: 0.7216
Epoch 21/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.4101 - acc: 0.8554 - val_loss: 0.6335 - val_acc: 0.8108
Epoch 22/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.3959 - acc: 0.8610 - val_loss: 1.6771 - val_acc: 0.6306
Epoch 23/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.3816 - acc: 0.8636 - val_loss: 0.9494 - val_acc: 0.7503
Epoch 24/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.3751 - acc: 0.8657 - val_loss: 1.1680 - val_acc: 0.7254
Epoch 25/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.3661 - acc: 0.8712 - val_loss: 1.0375 - val_acc: 0.7378
Epoch 26/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.3588 - acc: 0.8749 - val_loss: 1.0749 - val_acc: 0.7364
Epoch 27/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3510 - acc: 0.8767 - val_loss: 0.8123 - val_acc: 0.7824
Epoch 28/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3471 - acc: 0.8785 - val_loss: 0.5629 - val_acc: 0.8402
Epoch 29/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.3322 - acc: 0.8838 - val_loss: 0.6815 - val_acc: 0.8099
Epoch 30/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3295 - acc: 0.8842 - val_loss: 1.1040 - val_acc: 0.7504
Epoch 31/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3206 - acc: 0.8871 - val_loss: 1.0198 - val_acc: 0.7633
Epoch 32/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3141 - acc: 0.8897 - val_loss: 0.6132 - val_acc: 0.8310
Epoch 33/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3103 - acc: 0.8892 - val_loss: 0.7206 - val_acc: 0.8124
Epoch 34/50
50000/50000 [==============================] - 111s 2ms/step - loss: 0.3048 - acc: 0.8929 - val_loss: 0.6906 - val_acc: 0.8110
Epoch 35/50
50000/50000 [==============================] - 110s 2ms/step - loss: 0.2982 - acc: 0.8950 - val_loss: 0.7035 - val_acc: 0.8040
Epoch 36/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2865 - acc: 0.8974 - val_loss: 1.1865 - val_acc: 0.7293
Epoch 37/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2805 - acc: 0.9001 - val_loss: 0.7301 - val_acc: 0.8129
Epoch 38/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2840 - acc: 0.8998 - val_loss: 0.8888 - val_acc: 0.7807
Epoch 39/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2738 - acc: 0.9033 - val_loss: 0.7588 - val_acc: 0.8064
Epoch 40/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2703 - acc: 0.9050 - val_loss: 0.6568 - val_acc: 0.8305
Epoch 41/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2681 - acc: 0.9060 - val_loss: 0.6591 - val_acc: 0.8259
Epoch 42/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2574 - acc: 0.9079 - val_loss: 0.7939 - val_acc: 0.8031
Epoch 43/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2540 - acc: 0.9103 - val_loss: 0.8753 - val_acc: 0.7890
Epoch 44/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2496 - acc: 0.9111 - val_loss: 0.6358 - val_acc: 0.8350
Epoch 45/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2482 - acc: 0.9120 - val_loss: 0.7516 - val_acc: 0.8176
Epoch 46/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2426 - acc: 0.9131 - val_loss: 0.8351 - val_acc: 0.7985
Epoch 47/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2438 - acc: 0.9138 - val_loss: 0.8294 - val_acc: 0.8047
Epoch 48/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2412 - acc: 0.9131 - val_loss: 0.6378 - val_acc: 0.8420
Epoch 49/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2333 - acc: 0.9162 - val_loss: 0.6079 - val_acc: 0.8443
Epoch 50/50
50000/50000 [==============================] - 107s 2ms/step - loss: 0.2275 - acc: 0.9185 - val_loss: 0.7489 - val_acc: 0.8255
<keras.callbacks.History at 0x7f5a1d856c10>
```python
# Test the model
score = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
```
10000/10000 [==============================] - 9s 911us/step
('Test loss:', 0.7488608881473541)
('Test accuracy:', 0.8255)
```python
# Save the trained weights in to .h5 format
model.save_weights("DNST_model.h5")
print("Saved model to disk")
```
Saved model to disk
```python
```
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment