In: Computer Science
write a fully-connected neural network to work with MNIST and Tensorflow 2.x. The labels will be input and MNIST image vectors as output labels. use tf.GradientTape instead fit. show the 28x28 image with the input vector.
PLEASE WRITE CODE AND DO NOT SENT ME DIFFERENT PARTS TO A WEBSITE.
It is clear, it's a homework assignment.
#loading library
import tensorflow as tf
from tensorflow.keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from tensorflow.keras.layers import Dropout, Flatten, Input, Dense
#loading dataset
fashion_mnist = tf.keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
#Normalization of dataset
train_images = train_images / 255.0
test_images = test_images / 255.0
#creating the model
def create_model():
def add_conv_block(model, num_filters):
model.add(Conv2D(num_filters, 3, activation='relu', padding='same'))
model.add(BatchNormalization())
model.add(Conv2D(num_filters, 3, activation='relu', padding='valid'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
return model
model = tf.keras.models.Sequential()
model.add(Input(shape=(28, 28, 3)))
model = add_conv_block(model, 32)
model = add_conv_block(model, 64)
model = add_conv_block(model, 128)
#flattening the model
model.add(Flatten())
model.add(Dense(3, activation='softmax'))
#compiling the model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
model = create_model()
model.summary()
#fitting the training dataset
model.fit(train_images, train_labels, epochs=10)
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2)
print('\nTest accuracy:', test_acc)