在训练一个用于音乐生成的VAE模型时,遇到报错:
ValueError Traceback (most recent call last)
/tmp/ipykernel_27/1449102967.py in <module>
98
99 random_vector_for_generation = tf.random.normal(shape = [num_examples_to_generate, latent_dim])
--> 100 model = CVAE(latent_dim)
/tmp/ipykernel_27/1449102967.py in __init__(self, latent_dim)
52
53 layers.Flatten(name = 'flatten'),
---> 54 layers.Dense(latent_dim + latent_dim, name = 'dense'),
55 ]
56 )
...
报错:
All layers added to a Sequential model should have unique names. Name "" is already the name of a layer in this model. Update the name argument to pass a unique name.
python version:3.7
tensorflow version:2.3
class Resnet1DBlock(tf.keras.Model):
def __init__(self, kernel_size, filters, type = 'encode', prefix = ''):
super(Resnet1DBlock, self).__init__(name = '')
if type == 'encode':
self.conv1a = layers.Conv1D(filters, kernel_size, 2, padding = "same", \
name = prefix + 'conv1a')
self.conv1b = layers.Conv1D(filters, kernel_size, 1, padding = "same", \
name = prefix + 'conv1b')
self.norm1a = tfa.layers.InstanceNormalization(name = prefix + 'norm1a')
self.norm1b = tfa.layers.InstanceNormalization(name = prefix + 'norm1b')
elif type == 'decode':
self.conv1a = layers.Conv1DTranspose(filters, kernel_size, 1, padding = "same", \
name = prefix + 'conv1a')
self.conv1b = layers.Conv1DTranspose(filters, kernel_size, 1, padding = "same", \
name = prefix + 'conv1b')
self.norm1a = tf.keras.layers.BatchNormalization(name = prefix + 'norm1a')
self.norm1b = tf.keras.layers.BatchNormalization(name = prefix + 'norm1b')
else:
return None
def call(self, input_tensor):
x = tf.nn.relu(input_tensor)
x = self.conv1a(x)
x = self.norm1a(x)
x = layers.LeakyReLU(0.4)(x)
x = self.conv1b(x)
x = self.norm1b(x)
x = layers.LeakyReLU(0.4)(x)
x += input_tensor
return tf.nn.relu(x)
class CVAE(tf.keras.Model):
def __init__(self, latent_dim):
super(CVAE, self).__init__()
self.latent_dim = latent_dim
self.encoder = tf.keras.Sequential(
[
tf.keras.layers.InputLayer(input_shape = (1, 90001), name = 'input_encoder'),
layers.Conv1D(64, 1, 2, name = 'conv1_layer1'),
Resnet1DBlock(64, 1, 'encode', prefix = 'res1_'),
layers.Conv1D(128, 1, 2, name = 'conv1_layer2'),
Resnet1DBlock(128, 1, 'encode', prefix = 'res2_'),
layers.Conv1D(128, 1, 2, name = 'conv1_layer3'),
Resnet1DBlock(128, 1, 'encode', prefix = 'res3_'),
layers.Conv1D(256, 1, 2, name = 'conv1_layer4'),
Resnet1DBlock(256, 1, 'encode', prefix = 'res4_'),
layers.Flatten(name = 'flatten'),
layers.Dense(latent_dim + latent_dim, name = 'dense'),
]
)
self.decoder = tf.keras.Sequential(
[
tf.keras.layers.InputLayer(input_shape = (latent_dim,), name = 'input_decoder'),
layers.Reshape(target_shape = (1, latent_dim)),
Resnet1DBlock(512, 1, 'decode', prefix = 'res1_'),
layers.Conv1DTranspose(512, 1, 1, name = 'Conv1Trans_Layer1'),
Resnet1DBlock(256, 1, 'decode', prefix = 'res2_'),
layers.Conv1DTranspose(256, 1, 1, name = 'Conv1Trans_Layer2'),
Resnet1DBlock(128, 1, 'decode', prefix = 'res3_'),
layers.Conv1DTranspose(128, 1, 1, name = 'Conv1Trans_Layer3'),
Resnet1DBlock(64, 1, 'decode', prefix = 'res4_'),
layers.Conv1DTranspose(64, 1, 1, name = 'Conv1Trans_Layer4'),
layers.Conv1DTranspose(90001, 1, 1, name = 'Conv1Trans_Layer5')
]
)
# 省略了与报错无关的函数
optimizer = tf.keras.optimizers.Adam(0.0003, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-08)
random_vector_for_generation = tf.random.normal(shape = [num_examples_to_generate, latent_dim])
model = CVAE(latent_dim)
我非常疑惑,明明已经给所有网络层命名了。