import tensorflow as tf
from tensorflow import keras
cifar100 =keras.datasets.cifar100
(train_images, train_labels), _ = cifar100.load_data()
train_images = train_images/255.0
train_images = tf.image.resize(train_images[:5000],(224,224))
train_labels = train_labels[:5000]
alexNet = keras.Sequential(layers=[
keras.layers.Conv2D(96,11,strides=(4,4),padding='valid',activation='relu'),
keras.layers.BatchNormalization(),
keras.layers.MaxPool2D(pool_size=(3,3),strides=(2,2),padding='valid'),
keras.layers.Conv2D(256,5,strides=(1,1),padding='same',activation='relu'),
keras.layers.BatchNormalization(),
keras.layers.MaxPool2D(pool_size=(3,3),strides=(2,2),padding='valid'),
keras.layers.Conv2D(384,3,strides=(1*1),padding='same',activation='relu'),
keras.layers.Conv2D(384,3,strides=(1,1),padding='same',activation='relu'),
keras.layers.Conv2D(256,3,strides=(1,1),padding='same',activation='relu'),
keras.layers.MaxPool2D(pool_size=(3,3),strides=(2,2),padding='valid'),
keras.layers.Flatten(),
keras.layers.Dense(4096,activation='relu'),
keras.layers.Dropout(rate=0.5),
keras.layers.Dense(4096, activation='relu'),
keras.layers.Dropout(rate=0.5),
keras.layers.Dense(100)
])
alexNet.compile(optimizer='adam',
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy']
)
alexNet.fit(x=train_images,y=train_labels,batch_size=16,epochs=10)
