3.24. Working with Keras

Conx allows you to easily move back and forth between Keras.

3.24.1. Moving to Keras

Consider this conx network:

In [1]:
from conx import Network, Layer, SGD

net = Network("XOR2")
net.add(Layer("input1", 2))
net.add(Layer("input2", 2))
net.add(Layer("hidden1", 2, activation="sigmoid"))
net.add(Layer("hidden2", 2, activation="sigmoid"))
net.add(Layer("shared-hidden", 2, activation="sigmoid"))
net.add(Layer("output1", 2, activation="sigmoid"))
net.add(Layer("output2", 2, activation="sigmoid"))

net.connect("input1", "hidden1")
net.connect("input2", "hidden2")
net.connect("hidden1", "shared-hidden")
net.connect("hidden2", "shared-hidden")
net.connect("shared-hidden", "output1")
net.connect("shared-hidden", "output2")

ds = [
    ([[0, 0],[0, 0]], [[0, 0],[0, 0]]),
    ([[0, 0],[1, 1]], [[1, 1],[1, 1]]),
    ([[1, 1],[0, 0]], [[1, 1],[1, 1]]),
    ([[1, 1],[1, 1]], [[0, 0],[0, 0]])
]
net.dataset.load(ds)
net.compile(error="mse", optimizer="sgd")
conx, version 3.4.3
Using Theano backend.
In [2]:
net.dashboard()

3.24.2. Conx model is a Keras Model

Requires:

  • pip install pydot-ng
In [3]:
from keras.utils.vis_utils import model_to_dot
from IPython.display import HTML
In [4]:
dot = model_to_dot(net.model, rankdir="BT")
In [5]:
HTML(dot.create_svg().decode())
Out[5]:
G 139903032760808 input1: InputLayer 139902991401424 hidden1: Dense 139903032760808->139902991401424 139904067381456 input2: InputLayer 139903032760416 hidden2: Dense 139904067381456->139903032760416 139902984363312 concatenate_1: Concatenate 139902991401424->139902984363312 139903032760416->139902984363312 139902991401928 shared-hidden: Dense 139902984363312->139902991401928 139902984302264 output1: Dense 139902991401928->139902984302264 139902984413256 output2: Dense 139902991401928->139902984413256

3.24.3. Generate a Keras Script

You can easily generate a Keras model by using net.model.get_confg():

In [6]:
from keras import Model
config = net.model.get_config()
model = Model.from_config(config)

In [7]:
print(code)
## Autogenerated by conx

import keras

kfunc = {} # dictionary to keep track of k's by layer

kfunc['input2'] = keras.layers.Input((2,), *(), **{'name': 'input2'})
k = kfunc['input2']
for f in [keras.layers.Dense(2, **{'activation': 'sigmoid', 'name': 'hidden2'})]:
    k = f(k)
kfunc['hidden2'] = k
kfunc['input1'] = keras.layers.Input((2,), *(), **{'name': 'input1'})
k = kfunc['input1']
for f in [keras.layers.Dense(2, **{'activation': 'sigmoid', 'name': 'hidden1'})]:
    k = f(k)
kfunc['hidden1'] = k
k = keras.layers.Concatenate()([kfunc[layer] for layer in ['hidden1', 'hidden2']])
for f in [keras.layers.Dense(2, **{'activation': 'sigmoid', 'name': 'shared-hidden'})]:
    k = f(k)
kfunc['shared-hidden'] = k
k = kfunc['shared-hidden']
for f in [keras.layers.Dense(2, **{'activation': 'sigmoid', 'name': 'output2'})]:
    k = f(k)
kfunc['output2'] = k
k = kfunc['shared-hidden']
for f in [keras.layers.Dense(2, **{'activation': 'sigmoid', 'name': 'output1'})]:
    k = f(k)
kfunc['output1'] = k
model = keras.models.Model(inputs=[kfunc['input1'],kfunc['input2']], outputs=[kfunc['output1'],kfunc['output2']])

You can execute the code, completely independently from conx:

In [8]:
exec(code)
In [9]:
model.compile(loss="mse", optimizer="adam")

But still use the dataset compiled by conx:

In [10]:
model.fit(net.dataset._inputs, net.dataset._targets)
Epoch 1/1
4/4 [==============================] - 0s 173us/step - loss: 0.5214 - output1_loss: 0.2628 - output2_loss: 0.2585
Out[10]:
<keras.callbacks.History at 0x7f3da3662780>
In [11]:
model.predict(net.dataset._inputs)
Out[11]:
[array([[ 0.35226727,  0.48041439],
        [ 0.34383395,  0.4885115 ],
        [ 0.34163588,  0.46447068],
        [ 0.33335805,  0.47244915]], dtype=float32),
 array([[ 0.53624219,  0.61730933],
        [ 0.5364688 ,  0.62940359],
        [ 0.54212278,  0.61782312],
        [ 0.5423615 ,  0.62982911]], dtype=float32)]

3.24.4. Moving from Keras

In [12]:
batch_size = 128
num_classes = 10
epochs = 12
img_rows, img_cols = (28, 28)
In [13]:
import keras.backend as K
if K.image_data_format() == 'channels_first':
    input_shape = (1, img_rows, img_cols)
else:
    input_shape = (img_rows, img_cols, 1)

3.24.5. Keras Imperative Interface

In [14]:
import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPool2D, Dropout, Flatten, Dense
In [15]:
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
                 activation='relu',
                 input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))

model.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.Adadelta(),
              metrics=['accuracy'])

model.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #
=================================================================
conv2d_1 (Conv2D)            (None, 26, 26, 32)        320
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 24, 24, 64)        18496
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 12, 12, 64)        0
_________________________________________________________________
dropout_1 (Dropout)          (None, 12, 12, 64)        0
_________________________________________________________________
flatten_1 (Flatten)          (None, 9216)              0
_________________________________________________________________
dense_1 (Dense)              (None, 128)               1179776
_________________________________________________________________
dropout_2 (Dropout)          (None, 128)               0
_________________________________________________________________
dense_2 (Dense)              (None, 10)                1290
=================================================================
Total params: 1,199,882
Trainable params: 1,199,882
Non-trainable params: 0
_________________________________________________________________

3.24.6. Keras Functional Interface

In [16]:
import keras
from keras.models import Model
from keras.layers import Input, Conv2D, MaxPool2D, Dropout, Flatten, Dense, Dropout
In [17]:
inputs = k = Input(input_shape, name="input")
k = Conv2D(32, kernel_size=(3, 3), activation='relu')(k)
k = Conv2D(64, (3, 3), activation='relu')(k)
k = MaxPool2D(pool_size=(2, 2))(k)
k = Dropout(0.25)(k)
k = Flatten()(k)
k = Dense(128, activation='relu')(k)
k = Dropout(0.5)(k)
k = Dense(num_classes, activation='softmax')(k)
model = Model(inputs=inputs, outputs=k)

model.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.Adadelta(),
              metrics=['accuracy'])
model.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #
=================================================================
input (InputLayer)           (None, 28, 28, 1)         0
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 26, 26, 32)        320
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 24, 24, 64)        18496
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 12, 12, 64)        0
_________________________________________________________________
dropout_3 (Dropout)          (None, 12, 12, 64)        0
_________________________________________________________________
flatten_2 (Flatten)          (None, 9216)              0
_________________________________________________________________
dense_3 (Dense)              (None, 128)               1179776
_________________________________________________________________
dropout_4 (Dropout)          (None, 128)               0
_________________________________________________________________
dense_4 (Dense)              (None, 10)                1290
=================================================================
Total params: 1,199,882
Trainable params: 1,199,882
Non-trainable params: 0
_________________________________________________________________

3.24.7. Conx Interface

In [18]:
from conx import Layer, Network, Conv2DLayer, MaxPool2DLayer, FlattenLayer
In [19]:
network = Network("MNIST-CNN")
network.add(Layer("input", input_shape))
network.add(Conv2DLayer("conv1", 32, (3, 3), activation='relu'))
network.add(Conv2DLayer("conv2", 64, (3, 3), activation='relu'))
network.add(MaxPool2DLayer("maxpool", (2, 2), dropout=0.25))
network.add(FlattenLayer("flatten"))
network.add(Layer("hidden", 128, activation='relu', dropout=0.5))
network.add(Layer("output", num_classes, activation='softmax'))

network.connect("input", "conv1")
network.connect("conv1", "conv2")
network.connect("conv2", "maxpool")
network.connect("maxpool", "flatten")
network.connect("flatten", "hidden")
network.connect("hidden", "output")

# or, because this is sequetial, and layers added in order:
# network.connect()

network.compile(loss="categorical_crossentropy",
                optimizer="adadelta")
network.model.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #
=================================================================
input (InputLayer)           (None, 28, 28, 1)         0
_________________________________________________________________
conv1 (Conv2D)               (None, 26, 26, 32)        320
_________________________________________________________________
conv2 (Conv2D)               (None, 24, 24, 64)        18496
_________________________________________________________________
maxpool (MaxPooling2D)       (None, 12, 12, 64)        0
_________________________________________________________________
dropout_5 (Dropout)          (None, 12, 12, 64)        0
_________________________________________________________________
flatten (Flatten)            (None, 9216)              0
_________________________________________________________________
hidden (Dense)               (None, 128)               1179776
_________________________________________________________________
dropout_6 (Dropout)          (None, 128)               0
_________________________________________________________________
output (Dense)               (None, 10)                1290
=================================================================
Total params: 1,199,882
Trainable params: 1,199,882
Non-trainable params: 0
_________________________________________________________________
In [20]:
from conx import import_keras_model
In [21]:
network2 = import_keras_model(model, "Network from Keras Model")
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-21-3fb4ea217620> in <module>()
----> 1 network2 = import_keras_model(model, "Network from Keras Model")

/usr/local/lib/python3.6/dist-packages/conx/utils.py in import_keras_model(model, network_name)
    295             clayer.keras_layer = clayer.k
    296         else:
--> 297             clayer = clayer_class(**layer.get_config())
    298             clayer.k = layer
    299             clayer.keras_layer = layer

TypeError: __init__() missing 1 required positional argument: 'shape'