Skip to content

Commit ce7d031

Browse files
refactor(neuron)!: rename value to activation and introduce z to separate pre- and post-activation states
1 parent 2197beb commit ce7d031

2 files changed

Lines changed: 13 additions & 11 deletions

File tree

try1 (OOP Approach)/cpp/include/NeuralNet/classes.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44
namespace NeuralNet {
55
class Neuron {
66
public:
7-
float value;
7+
float activation;
8+
float z;
89
float bias;
910
float *weights;
1011
int prevLayerNeurons_count;

try1 (OOP Approach)/cpp/src/classes.cpp

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,8 @@ namespace NeuralNet {
2828
Neuron::Neuron(int prevLayerNeurons_count) {
2929
this->prevLayerNeurons_count = prevLayerNeurons_count;
3030
float randRange = 1;
31-
this->value = 0;
31+
this->activation = 0;
32+
this->z = 0;
3233
this->bias = decimalRounder(getRandom(randRange));
3334
this->weights = new float[prevLayerNeurons_count];
3435

@@ -40,7 +41,7 @@ Neuron::Neuron(int prevLayerNeurons_count) {
4041
Neuron::~Neuron() {
4142
delete[] this->weights;
4243
this->weights = nullptr;
43-
this->value = 0;
44+
this->activation = 0;
4445
this->bias = 0;
4546
}
4647

@@ -64,7 +65,7 @@ Layer::~Layer() {
6465
void Layer::showNeurons() {
6566
for (int i = 0; i < this->size; i++) {
6667
std::cout << "Neuron - " << i << std::endl;
67-
std::cout << "value : " << this->neurons[i]->value << std::endl;
68+
std::cout << "value : " << this->neurons[i]->activation << std::endl;
6869
std::cout << "bias :- " << this->neurons[i]->bias << std::endl;
6970
std::cout << "weights: ";
7071

@@ -137,7 +138,7 @@ void MLP::resetNeuronsActivations() {
137138
for (int i = 0; i < this->hidOutLayerCount; i++) {
138139
// for Traversing Each Neuron of a Layer
139140
for (int i2 = 0; i2 < this->hidOutLayerSizes[i]; i2++) {
140-
this->HidOutlayers[i]->neurons[i2]->value = 0;
141+
this->HidOutlayers[i]->neurons[i2]->activation = 0;
141142
}
142143
}
143144
}
@@ -150,7 +151,7 @@ void MLP::feedForward(float *inputArr, int inputSize) {
150151
if (inputSize != this->inputLayerSize)
151152
throw runtime_error("Expected Input was Not Received");
152153
for (int i = 0; i < this->inputLayerSize; i++) {
153-
tempInputLayer->neurons[i]->value = inputArr[i];
154+
tempInputLayer->neurons[i]->activation = inputArr[i];
154155
}
155156

156157
// for Traversing Each Layer
@@ -161,9 +162,9 @@ void MLP::feedForward(float *inputArr, int inputSize) {
161162
float weightedSum = 0;
162163
// For traversing each Weight of current Neuron
163164
for (int i3 = 0; i3 < cNeuron->prevLayerNeurons_count; i3++) {
164-
weightedSum += prevLayer->neurons[i3]->value * cNeuron->weights[i3];
165+
cNeuron->z += prevLayer->neurons[i3]->activation * cNeuron->weights[i3];
165166
}
166-
cNeuron->value += sigmoid(weightedSum) + cNeuron->bias;
167+
cNeuron->activation += sigmoid(cNeuron->z) + cNeuron->bias;
167168
// TO DIplay Each Neuron's Final Activation in a Formatted way
168169
// std::cout<<"Neuron ["<<i<<"]"<<"["<<i2<<"] : "<<cNeuron->value<<endl;
169170
}
@@ -173,7 +174,7 @@ void MLP::feedForward(float *inputArr, int inputSize) {
173174
// for Returning output
174175
const int outputSize = this->hidOutLayerSizes[this->hidOutLayerCount - 1];
175176
for (int i = 0; i < outputSize; i++) {
176-
this->predictions[i] = prevLayer->neurons[i]->value;
177+
this->predictions[i] = prevLayer->neurons[i]->activation;
177178
}
178179
delete tempInputLayer;
179180
}
@@ -232,7 +233,7 @@ float MLP::cost(float *targetArr, int targetArr_size) {
232233

233234
float cost = 0;
234235
for (int i = 0; i < this->outputLayerSize; i++) {
235-
cost += pow(outLayer->neurons[i]->value - targetArr[i], 2);
236+
cost += pow(outLayer->neurons[i]->activation - targetArr[i], 2);
236237
}
237238
return cost;
238239
}
@@ -263,7 +264,7 @@ void MLP::backPropogate(float *inputArr, int inputSize, float *targetArr,
263264
float *a_prev = new float[last_hidden_layer_size]();
264265
for (int i = 0; i < last_hidden_layer_size; i++) {
265266
a_prev[i] =
266-
this->HidOutlayers[this->hidOutLayerCount - 2]->neurons[i]->value;
267+
this->HidOutlayers[this->hidOutLayerCount - 2]->neurons[i]->activation;
267268
}
268269

269270
float *output_layer_deltas = new float[this->outputLayerSize]();

0 commit comments

Comments
 (0)