Search code examples
pythontensorflowkerasdeep-learninggoogle-colaboratory

CNN-LSTM error intialized Time Distributed


I am trying to build CNN-LSTM to predicting price but I get an error message when trying to build the model. The error message was

Please initialize 'TimeDistributed layer' with a 'tf.keras.layers.Layer' instance. You passed: <keras.layers.core.Flatten object at 0x7fc70c70f250>

Here is my CNN-LSTM model

model = Sequential()
model.add(TimeDistributed(Conv1D(filters=64, kernel_size=2, activation="relu"), input_shape=(None, n_steps, n_features)))
model.add(TimeDistributed(Conv1D(filters=128, kernel_size=2)))
model.add(TimeDistributed(MaxPooling1D(pool_size=2)))
model.add(TimeDistributed(Flatten()))
model.add(LSTM(100))
model.add(Dense(32))
model.add(Dense(1, activation="relu"))
model.compile(optimizer='Adam', loss='mean_squared_error')

earlyStop = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=30)

I have search for the solution here but I couldn't really understand, would be great if could help me. Thank you!


Solution

  • I was able to replicate your issue as shown below

    from tensorflow import keras
    from tensorflow.keras.models import Sequential
    from tensorflow.keras.layers import Dense, Conv1D , MaxPooling1D , LSTM, TimeDistributed
    from keras.layers import Flatten
    from tensorflow.keras.optimizers import Adam
    
    model = Sequential()
    model.add(TimeDistributed(Conv1D(filters=64, kernel_size=2, activation="relu"), input_shape=(None, n_steps, n_features)))
    model.add(TimeDistributed(Conv1D(filters=128, kernel_size=2)))
    model.add(TimeDistributed(MaxPooling1D(pool_size=2)))
    model.add(TimeDistributed(Flatten()))
    model.add(LSTM(100))
    model.add(Dense(32))
    model.add(Dense(1, activation="relu"))
    model.compile(optimizer='Adam', loss='mean_squared_error')
    

    Output:

    ---------------------------------------------------------------------------
    ValueError                                Traceback (most recent call last)
    <ipython-input-9-11436877029b> in <module>()
         11 model.add(TimeDistributed(Conv1D(filters=128, kernel_size=2)))
         12 model.add(TimeDistributed(MaxPooling1D(pool_size=2)))
    ---> 13 model.add(TimeDistributed(Flatten()))
         14 model.add(LSTM(100))
         15 model.add(Dense(32))
    
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/layers/wrappers.py in __init__(self, layer, **kwargs)
        125           'Please initialize `TimeDistributed` layer with a '
        126           '`tf.keras.layers.Layer` instance. You passed: {input}'.format(
    --> 127               input=layer))
        128     super(TimeDistributed, self).__init__(layer, **kwargs)
        129     self.supports_masking = True
    
    ValueError: Please initialize `TimeDistributed` layer with a `tf.keras.layers.Layer` instance. You passed: <keras.layers.core.Flatten object at 0x7fe9c42d5a50>
    

    Fixed code:

    Instead of from keras.layers import Flatten, if you can use from tensorflow.keras.layers import Flatten, will resolve your issue.

    from tensorflow import keras
    from tensorflow.keras.models import Sequential
    from tensorflow.keras.layers import Dense, Conv1D , MaxPooling1D , LSTM, TimeDistributed, Flatten
    #from keras.layers import Flatten
    from tensorflow.keras.optimizers import Adam
    
    model = Sequential()
    model.add(TimeDistributed(Conv1D(filters=64, kernel_size=2, activation="relu"), input_shape=(None, n_steps, n_features)))
    model.add(TimeDistributed(Conv1D(filters=128, kernel_size=2)))
    model.add(TimeDistributed(MaxPooling1D(pool_size=2)))
    model.add(TimeDistributed(Flatten()))
    model.add(LSTM(100))
    model.add(Dense(32))
    model.add(Dense(1, activation="relu"))
    model.compile(optimizer='Adam', loss='mean_squared_error')