-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
60 lines (41 loc) · 1.45 KB
/
main.py
File metadata and controls
60 lines (41 loc) · 1.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import random
import numpy as np
from data import mnist_dataloader # Import the dataloader class
from nn import *
import matplotlib.pyplot as plt
(x_train, y_train), (x_test, y_test) = mnist_dataloader.load_data()
# Normalize and preprocess data
x_train = np.array(x_train) / 255.0
x_test = np.array(x_test) / 255.0
x_train = x_train.reshape(x_train.shape[0], -1).T # Flatten and transpose
x_test = x_test.reshape(x_test.shape[0], -1).T
# Convert labels to one-hot encoding
def one_hot_encode(labels, num_classes):
return np.eye(num_classes)[labels].T
y_train = one_hot_encode(y_train, 10)
y_test = one_hot_encode(y_test, 10)
# Dimensions
input_neurons = x_train.shape[0] # 784
hidden_neurons = 30
output_neurons = 10
dimensions = (input_neurons, hidden_neurons, output_neurons)
dimensions = (input_neurons, hidden_neurons, output_neurons)
# Initialize parameters
total_params = (dimensions[0] + 1) * dimensions[1] + (dimensions[1] + 1) * dimensions[2]
params = np.random.randn(total_params) * 0.01
# Hyperparameters
learning_rate = 1
epochs = 3000
batch_size = 10
rate_decay = 0.1
# Train the neural network
trained_params = sgd(x_train, y_train, params, dimensions, learning_rate, epochs)
np.save('trained_params_with_mini_batch16.npy', trained_params)
print("Parameters saved to 'trained_params_with_mini_batch16.npy'!")
# Plot the graph
plt.plot(x, y, marker='o')
plt.xlabel("epochs")
plt.ylabel("loss")
plt.title("Graph of Loss")
# Show the graph
plt.show()