-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathann.py
152 lines (122 loc) · 4.92 KB
/
ann.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# Studying materials for the "Introduction to Deep Learning" course
# author: Dmitrii Bakhitov
# PACE University 2023
import numpy as np
# mean squared error
def mse(y_true, y_pred):
return np.mean(np.power(y_true-y_pred, 2))
def mse_prime(y_true, y_pred):
return 2*(y_pred-y_true)/y_true.size
def binary_cross_entropy(y_true, y_pred):
epsilon = 1e-15
y_pred = np.clip(y_pred, epsilon, 1 - epsilon)
return np.mean(-y_true * np.log(y_pred) - (1 - y_true) * np.log(1 - y_pred))
def binary_cross_entropy_prime(y_true, y_pred):
epsilon = 1e-15
y_pred = np.clip(y_pred, epsilon, 1 - epsilon)
return ((1 - y_true) / (1 - y_pred) - y_true / y_pred) / np.size(y_true)
# the model class
class Network:
def __init__(self, loss = 'mse'):
self.layers = []
self.loss = mse if loss == 'mse' else binary_cross_entropy
self.loss_prime = mse_prime if loss == 'mse' else binary_cross_entropy_prime
# add layer to network
def add(self, layer):
self.layers.append(layer)
# set loss to use
#def use(self, loss, loss_prime):
# self.loss = loss
# self.loss_prime = loss_prime
# predict output for given input
def predict(self, input_data):
# sample dimension first
samples = len(input_data)
result = []
# run network over all samples
for i in range(samples):
# forward propagati on
output = input_data[i]
for layer in self.layers:
output = layer.forward_propagation(output)
result.append(output)
return result
def evaluation(self, x, y):
"""
x - test data
y - test labels
returns the classification accuracy
"""
prediction = self.predict(x)
acc = 0
for i in range(len(y)):
pred = np.argmax(prediction[i][0])
if pred == y[i]:
acc += 1
return acc/len(y)
# train the network
def fit(self, x_train, y_train, epochs, learning_rate, evaluation = 0):
"""
x_train - training dataset
y_train - training labels
epochs - number of epochs to train
learning_rate - learning rate, Ex: learning_rate = 0.001
evaluation - portion of data to use for evaluation, Ex: 0.25 - 25% of data is used for validation
"""
# create variables to store trainning results
self.training = {}
epochs_list = []
err_list = []
eval_err_list = []
eval_acc_list = []
# sample dimension first
samples = len(x_train)
eval_samples = 0
# split training in case evaluation > 0
if evaluation != 0:
eval_samples = int(evaluation*samples)
samples = samples - eval_samples
self.eval_samples = eval_samples
self.samples = samples
x_eval = x_train[samples:]
x_train = x_train[:samples]
y_eval = y_train[samples:]
y_train = y_train[:samples]
# training loop
for i in range(epochs):
err = 0
eval_err = 0
eval_acc = 0
for j in range(samples):
# forward propagation
output = x_train[j]
for layer in self.layers:
output = layer.forward_propagation(output)
# compute loss (for display purpose only)
err += self.loss(y_train[j], output)
# backward propagation
error = self.loss_prime(y_train[j], output)
for layer in reversed(self.layers):
error = layer.backward_propagation(error, learning_rate)
err /= samples
epochs_list.append(i+1)
err_list.append(err)
# evaluation step
if evaluation != 0:
for j in range(eval_samples):
output = x_eval[j]
for layer in self.layers:
output = layer.forward_propagation(output)
# compute loss (for display purpose only)
eval_err += self.loss(y_eval[j], output)
eval_acc = self.evaluation(x_eval, np.argmax(y_eval,axis=1))
eval_err /= eval_samples
eval_err_list.append(eval_err)
eval_acc_list.append(eval_acc)
print('epoch %d/%d | training_loss=%f | eval_loss=%f | eval_accuracy=%f' % (i+1, epochs, err, eval_err, eval_acc))
self.training = {'epoch':epochs_list, 'training_loss': err_list,'eval_loss':eval_err_list, 'eval_accuracy': eval_acc_list}
def total_params(self):
total_params = 0
for layer in self.layers:
total_params += layer.number_parameters()
return total_params