'Encounter an Error in Python for Variational Autoencoder

I encounter an error while implementing Variational Autoencoders, the code I actually get from coursera course but it didn't run. the whole code run smoothly, until I get to the last part (model.fit)

from tensorflow import keras
from keras.layers import Lambda, Input, Dense
from keras.models import Model
from keras.datasets import mnist
from keras.losses import mse, binary_crossentropy
    
#from keras.utils import plot_model
from keras.utils.vis_utils import plot_model
from keras import backend as K
    
import numpy as np
import matplotlib.pyplot as plt
import os
import tensorflow as tf
    
from tensorflow.keras.datasets import mnist
import numpy as np
np.set_printoptions(precision=2)
import matplotlib.pyplot as plt
%matplotlib inline
(x_train, y_train), (x_test, y_test) = mnist.load_data();

x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.

x_train_flat = x_train.reshape((len(x_train), np.prod(x_train.shape[1:])))
x_test_flat = x_test.reshape((len(x_test), np.prod(x_test.shape[1:])))
print(x_train_flat.shape)
print(x_test_flat.shape)

def sampling(args):
"""
Transforms parameters defining the latent space into a normal distribution.
"""
# Need to unpack arguments like this because of the way the Keras "Lambda" function works.
mu, log_sigma = args
# by default, random_normal has mean=0 and std=1.0
epsilon = K.random_normal(shape=tf.shape(mu))
sigma = K.exp(log_sigma)
return mu + K.exp(0.5 * sigma) * epsilon

hidden_dim = 256
batch_size = 128
latent_dim = 2 
# this is the dimension of each of the vectors representing the two parameters
# that will get transformed into a normal distribution
epochs = 1


# VAE model = encoder + decoder
# build encoder model
inputs = Input(shape=(784, ), name='encoder_input')
x = Dense(hidden_dim, activation='relu')(inputs)


z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
# NOTE: output of encoder model is *2* n-dimensional vectors:

z = Lambda(sampling, name='z')([z_mean, z_log_var])
# z is now one n dimensional vector representing the inputs 
encoder_model = Model(inputs, [z_mean, z_log_var, z], name='encoder')

encoder_model.summary()

# build decoder model
latent_inputs = Input(shape=(latent_dim,),)
x = Dense(hidden_dim, activation='relu')(latent_inputs)
outputs = Dense(784, activation='sigmoid')(x)
decoder_model = Model(latent_inputs, outputs, name='decoder')


# instantiate VAE model
outputs = decoder_model(encoder_model(inputs)[2])
vae_model = Model(inputs, outputs, name='vae_mlp')

for i, layer in enumerate(vae_model.layers):
print("Layer", i+1)
print("Name", layer.name)
print("Input shape", layer.input_shape)
print("Output shape", layer.output_shape)
if not layer.weights:
    print("No weights for this layer")
    continue
for i, weight in enumerate(layer.weights):
    print("Weights", i+1)
    print("Name", weight.name)
    print("Weights shape:", weight.shape.as_list())

reconstruction_loss = binary_crossentropy(inputs, outputs)
reconstruction_loss *= 784

kl_loss = 0.5 * (K.exp(z_log_var) - (1 + z_log_var) + K.square(z_mean))
kl_loss = K.sum(kl_loss, axis=-1)
total_vae_loss = K.mean(reconstruction_loss + kl_loss)

    
vae_model.add_loss(total_vae_loss)

vae_model.compile(optimizer='rmsprop',
                  metrics=['accuracy'])
    
vae_model.summary()
    
vae_model.fit(x_train_flat,
        epochs=epochs,
        batch_size=batch_size)

The error is:

***AttributeError                            Traceback (most recent call last)
/var/folders/x2/q28v76ps4yg9_n8ynm068gv00000gn/T/ipykernel_1994/1800164265.py in <module>
----> 1 vae_model.fit(x_train_flat,
      2         epochs=epochs,
      3         batch_size=batch_size)
/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/keras/utils/traceback_utils.py in error_handler(*args, **kwargs)
     65     except Exception as e:  # pylint: disable=broad-except
     66       filtered_tb = _process_traceback_frames(e.__traceback__)
---> 67       raise e.with_traceback(filtered_tb) from None
     68     finally:
     69       del filtered_tb
/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/tensorflow/python/framework/func_graph.py in autograph_handler(*args, **kwargs)
   1145           except Exception as e:  # pylint:disable=broad-except
   1146             if hasattr(e, "ag_error_metadata"):
-> 1147               raise e.ag_error_metadata.to_exception(e)
   1148             else:
   1149               raise
AttributeError: in user code:***

I tried my best to solve, but couldn't understand as I am new in this field as well as using python.



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source