-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNeuron.cpp
52 lines (42 loc) · 1.27 KB
/
Neuron.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
#include "Neuron.h"
#include "Layer.h"
#include "HyperParameters.h"
Neuron::Neuron(int numOutputs)
{
for (int c = 0; c < numOutputs; ++c) {
m_outputWeights.push_back(randomWeight());
m_outputWeightDeltas.push_back(0.0);
}
}
void Neuron::feedForward(double input)
{
setOutputVal(Neuron::activationFunction(input));
}
void Neuron::updateGradient(double delta)
{
m_gradient = delta * Neuron::activationFunctionDerivative(m_outputVal);
}
void Neuron::updateOutputWeight(int index, double otherGradient)
{
double myContribution = HyperParameters::LEARNING_RATE * getOutputVal() * otherGradient;
double oldDeltaWeight = m_outputWeightDeltas.at(index);
double momentum = HyperParameters::MOMENTUM_FACTOR * oldDeltaWeight;
double newDeltaWeight = myContribution + momentum;
m_outputWeightDeltas[index] = newDeltaWeight;
m_outputWeights[index] += newDeltaWeight;
}
double Neuron::activationFunction(double x)
{
// logistic sigmoid
// it could be replaced by tanh(x) because exp(x) is a bit slower (but it has bigger spread)
return 1 / (1 + exp(-x));
}
double Neuron::activationFunctionDerivative(double x)
{
// derivative
return x * (1 - x);
}
double Neuron::randomWeight(void)
{
return rand() / double(RAND_MAX);
}