-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathperceptron.js
79 lines (61 loc) · 1.85 KB
/
perceptron.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
// Perceptron
module.exports = class Perceptron {
constructor (x_train, y_train, epochs=1000, learn_rate= 0.1) {
// used to generate percent accuracy
this.accuracy = 0
this.samples = 0
this.x_train = x_train
this.y_train = y_train
this.epochs = epochs
this.learn_rate = learn_rate
this.bias = 0
this.weights = new Array(x_train[0].length)
// initialize random weights
for ( let n = 0; n < x_train[0].length; n++ ) {
this.weights[n] = this.random()
}
}
// returns percent accuracy
current_accuracy () {
return this.accuracy/this.samples
}
// generate random float between -1 and 1 (for generating weights)
random () {
return Math.random() * 2 - 1
}
// activation function
activation (n) {
return n < 0 ? 0 : 1
}
// y-hat output given an input tensor
predict (input) {
let total = this.bias
this.weights.forEach((w, index) => { total += input[index] * w }) // multiply each weight by each input vector value
return this.activation(total)
}
// training perceptron on data
fit () {
// epochs loop
for ( let e = 0; e < this.epochs; e++) {
// for each training sample
for ( let i = 0; i < this.x_train.length; i++ ) {
// get prediction
let prediction = this.predict(this.x_train[i])
console.log('Expected: ' + this.y_train[i] + ' Model Output: ' + prediction)
// update accuracy measures
this.y_train[i] === prediction ? this.accuracy += 1 : this.accuracy -= 1
this.samples++
// calculate loss
let loss = this.y_train[i] - prediction
// update all weights
for ( let w = 0; w < this.weights.length; w++ ) {
this.weights[w] += loss * this.x_train[i][w] * this.learn_rate
}
// update bias
this.bias += loss * this.learn_rate
}
// accuracy post epoch
console.log(this.current_accuracy())
}
}
}