I set the convolutional layer and the pooling layer, then deconvolute and de-pool, the input shape of 256 * 256 * 3 images, but finally there is a shape error:
def build_auto_encode_model(shape=(256,256,3)):
input_img = Input(shape=shape)
x = Convolution2D(16, (3, 3), activation='relu', padding='same')(input_img)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Convolution2D(8, (3, 3), activation='relu', padding='same')(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Convolution2D(8, (3, 3), activation='relu', padding='same')(x)
encoded = MaxPooling2D((2, 2), padding='same')(x)
x = Convolution2D(8, (3, 3), activation='relu', padding='same')(encoded)
x = UpSampling2D((2, 2))(x)
x = Convolution2D(8, (3, 3), activation='relu', padding='same')(x)
x = UpSampling2D((2, 2))(x)
x = Convolution2D(16, (3, 3), activation='relu')(x)
x = UpSampling2D((2, 2))(x)
decoded = Convolution2D(3, (3, 3), activation='sigmoid', padding='same')(x)
encoder = Model(inputs=input_img, outputs=encoded)
autoencoder = Model(inputs=input_img, outputs=decoded)
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
return encoder, autoencoder
def train_auto_encode_model(encoder_model_path="./data/encoder.h5"):
X = np.load("data/train.npy")
X_train = X[int(round(X.shape[0] * 0.2)):, :]
X_test = X[0:int(round(X.shape[0] * 0.2)), :]
encoder, autoencoder = build_auto_encode_model()
autoencoder.fit(X_train, X_train, epochs=10, batch_size=64, shuffle=True, validation_data=(X_test, X_test))
encoder.save(encoder_model_path)
Here is the error I get:
Error when checking target: expected conv2d_7 to have shape (252, 252, 3) but got array with shape (256, 256, 3)
Error trace-back:
By using autoencoder.summary()
you would see that the output shape of the last Conv2D layer is (None, 252, 252, 3)
; so the labels of shape (256,256,3)
are not compatible. The cause of this problem is that you have forgotten to set the padding
argument of previous Conv2D layer. By setting it to 'same'
this problem would resolve:
x = Convolution2D(16, (3, 3), activation='relu', padding='same')(x)