Dl 5 Excuted
Dl 5 Excuted
import numpy as np
import tensorflow as tf
train_labels = tf.keras.utils.to_categorical(train_labels)
test_labels = tf.keras.utils.to_categorical(test_labels)
model = Sequential([
MaxPooling2D((2, 2)),
Flatten(),
Dense(64, activation='relu'),
Dense(10, activation='softmax')
])
plt.figure(figsize=(10, 10))
for i in range(25):
plt.subplot(5, 5, i+1)
plt.title(f"Label: {np.argmax(train_labels[i])}")
plt.axis('off')
plt.show()
model.summary()
print("\nTraining Dataset:")
print("\nTesting Dataset:")
def plot_history(history):
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
# Plot training history
plot_history(history)
import numpy as np
import tensorflow as tf
# Filter out only the images of cats (label 3) and dogs (label 5)
train_images = train_images[train_indices]
train_labels = train_labels[train_indices]
test_images = test_images[test_indices]
test_labels = test_labels[test_indices]
class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
plt.figure(figsize=(10, 10))
for i in range(25):
plt.subplot(5, 5, i + 1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(images[i])
plt.xlabel(class_names[int(labels[i])])
plt.show()
plot_images(test_images, test_labels)
# Create a new model with the pre-trained VGG16 base and additional layers
model = Sequential([
vgg_base,
Flatten(),
Dense(256, activation='relu'),
Dense(10, activation='softmax')
])
1
plt.figure(figsize=(10, 10))
for i inrange(25):
plt.subplot(5, 5, i + 1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(test_images[i])
plt.xlabel(get_class_names()[test_labels[i][0]])
plt.show()
2
# Train the model
history = model.fit(train_images, train_labels, epochs=5, batch_size=64,
validation_data=(test_images, test_labels))
Epoch 1/5
782/782━━━━━━━━━━━━━━━━━━━━16s 15ms/step - accuracy:
0.1848 - loss: 2.2483 - val_accuracy: 0.3618 - val_loss: 1.9465
Epoch 2/5
782/782━━━━━━━━━━━━━━━━━━━━8s 11ms/step - accuracy: 0.3839
- loss: 1.8907 - val_accuracy: 0.4210 - val_loss: 1.7493
Epoch 3/5
782/782━━━━━━━━━━━━━━━━━━━━9s 12ms/step - accuracy: 0.4352
- loss: 1.7138 - val_accuracy: 0.4516 - val_loss: 1.6360
Epoch 4/5
782/782━━━━━━━━━━━━━━━━━━━━9s 10ms/step - accuracy: 0.4634
- loss: 1.6136 - val_accuracy: 0.4702 - val_loss: 1.5637
Epoch 5/5
782/782━━━━━━━━━━━━━━━━━━━━10s 11ms/step - accuracy:
0.4865 - loss: 1.5350 - val_accuracy: 0.4882 - val_loss: 1.5121
3
# Plot accuracy graph
plt.plot(history.history['accuracy'], label='Training Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
4
1. Aim: Implement One-Hot Encoding of Words or Characters
Source code:
import numpy as np
import tensorflow as tf
from sklearn.model_selection import train_test_split
# Sample data
texts = ['hello', 'world', 'deep', 'learning', 'is', 'awesome'] # Input texts
labels = [0, 1, 1, 0, 1, 0] # Corresponding labels (dummy binary labels for
classification)
print("\nTesting Data:")
for text, label in zip(X_test, y_test):
decoded_text = ''.join([vocab[np.argmax(char)] for char in text])
print("Text:", decoded_text, "| Label:", label)
# Creating vocabulary
vocab = sorted(set(''.join(texts))) # Extract unique characters and sort
vocab_size = len(vocab)
char_indices = {char: i for i, char in enumerate(vocab)} # Map each character to an
index
max_len = max(map(len, texts)) # Find the length of the longest tex
Training Data:
Text: awesomea | Label: 0
Text: deepaaaa | Label: 1
Text: isaaaaaa | Label: 1
Text: learning | Label: 0
Testing Data:
Text: helloaaa | Label: 0
Text: worldaaa | Label: 1
Epoch 1/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 1s 6ms/step - accuracy: 0.4333 - loss: 0.7054
Epoch 2/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.4333 - loss: 0.6585
Epoch 3/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.7333 - loss: 0.6138
Epoch 4/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.6415
Epoch 5/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.6105
Epoch 6/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.5872
Epoch 7/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.5679
Epoch 8/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.5204
Epoch 9/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.5298
Epoch 10/10
4/4 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 1.0000 - loss: 0.5212
<keras.src.callbacks.history.History at 0x78b838bbc6d0>