I am trying to train a VAE model for music generation. When I run the following program, it shows an error:
All layers added to a Sequential model should have unique names. Name "" is already the name of a layer in this model. Update the
name
argument to pass a unique name.
tensorflow version is 2.3
class Resnet1DBlock(tf.keras.Model):
def __init__(self, kernel_size, filters, type = 'encode', prefix = ''):
super(Resnet1DBlock, self).__init__(name = '')
if type == 'encode':
self.conv1a = layers.Conv1D(filters, kernel_size, 2, padding = "same", \
name = prefix + 'conv1a')
self.conv1b = layers.Conv1D(filters, kernel_size, 1, padding = "same", \
name = prefix + 'conv1b')
self.norm1a = tfa.layers.InstanceNormalization(name = prefix + 'norm1a')
self.norm1b = tfa.layers.InstanceNormalization(name = prefix + 'norm1b')
elif type == 'decode':
self.conv1a = layers.Conv1DTranspose(filters, kernel_size, 1, padding = "same", \
name = prefix + 'conv1a')
self.conv1b = layers.Conv1DTranspose(filters, kernel_size, 1, padding = "same", \
name = prefix + 'conv1b')
self.norm1a = tf.keras.layers.BatchNormalization(name = prefix + 'norm1a')
self.norm1b = tf.keras.layers.BatchNormalization(name = prefix + 'norm1b')
else:
return None
def call(self, input_tensor):
x = tf.nn.relu(input_tensor)
x = self.conv1a(x)
x = self.norm1a(x)
x = layers.LeakyReLU(0.4)(x)
x = self.conv1b(x)
x = self.norm1b(x)
x = layers.LeakyReLU(0.4)(x)
x += input_tensor
return tf.nn.relu(x)
class CVAE(tf.keras.Model):
def __init__(self, latent_dim):
super(CVAE, self).__init__()
self.latent_dim = latent_dim
self.encoder = tf.keras.Sequential(
[
tf.keras.layers.InputLayer(input_shape = (1, 90001), name = 'input_encoder'),
layers.Conv1D(64, 1, 2, name = 'conv1_layer1'),
Resnet1DBlock(64, 1, 'encode', prefix = 'res1_'),
layers.Conv1D(128, 1, 2, name = 'conv1_layer2'),
Resnet1DBlock(128, 1, 'encode', prefix = 'res2_'),
layers.Conv1D(128, 1, 2, name = 'conv1_layer3'),
Resnet1DBlock(128, 1, 'encode', prefix = 'res3_'),
layers.Conv1D(256, 1, 2, name = 'conv1_layer4'),
Resnet1DBlock(256, 1, 'encode', prefix = 'res4_'),
layers.Flatten(name = 'flatten'),
layers.Dense(latent_dim + latent_dim, name = 'dense'),
]
)
self.decoder = tf.keras.Sequential(
[
tf.keras.layers.InputLayer(input_shape = (latent_dim,), name = 'input_decoder'),
layers.Reshape(target_shape = (1, latent_dim)),
Resnet1DBlock(512, 1, 'decode', prefix = 'res1_'),
layers.Conv1DTranspose(512, 1, 1, name = 'Conv1Trans_Layer1'),
Resnet1DBlock(256, 1, 'decode', prefix = 'res2_'),
layers.Conv1DTranspose(256, 1, 1, name = 'Conv1Trans_Layer2'),
Resnet1DBlock(128, 1, 'decode', prefix = 'res3_'),
layers.Conv1DTranspose(128, 1, 1, name = 'Conv1Trans_Layer3'),
Resnet1DBlock(64, 1, 'decode', prefix = 'res4_'),
layers.Conv1DTranspose(64, 1, 1, name = 'Conv1Trans_Layer4'),
layers.Conv1DTranspose(90001, 1, 1, name = 'Conv1Trans_Layer5')
]
)
optimizer = tf.keras.optimizers.Adam(0.0003, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-08)
random_vector_for_generation = tf.random.normal(shape = [num_examples_to_generate, latent_dim])
model = CVAE(latent_dim)
I'm so confused, I have clearly named all the network layers. (I'm a novice)
you should not send a default name to initialize the superclass, so if you replace "super(Resnet1DBlock, self).init(name = '')" with "super(Resnet1DBlock, self).init()" it would be run successfully.
good luck