Skip to content

Commit 800af4b

Browse files
feat(mlp)!: internalize predictions and change feedForward to void to support stateful outputs
1 parent f912578 commit 800af4b

1 file changed

Lines changed: 49 additions & 47 deletions

File tree

  • try1 (OOP Approach)/cpp/include/NeuralNet
Lines changed: 49 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,55 +1,57 @@
11
#ifndef CLASSES_H
22
#define CLASSES_H
33

4-
namespace NeuralNet
5-
{
6-
class Neuron
7-
{
8-
public:
9-
float value;
10-
float bias;
11-
float *weights;
12-
int prevLayerNeurons_count;
13-
Neuron(int prevLayerNeurons_count);
14-
~Neuron();
15-
};
4+
namespace NeuralNet {
5+
class Neuron {
6+
public:
7+
float value;
8+
float bias;
9+
float *weights;
10+
int prevLayerNeurons_count;
11+
Neuron(int prevLayerNeurons_count);
12+
~Neuron();
13+
};
1614

17-
class Layer
18-
{
19-
private:
20-
int size;
15+
class Layer {
16+
private:
17+
int size;
2118

22-
public:
23-
Neuron **neurons;
24-
Layer(int size, int prevLayerSize);
25-
~Layer();
26-
void showNeurons();
27-
};
19+
public:
20+
Neuron **neurons;
21+
Layer(int size, int prevLayerSize);
22+
~Layer();
23+
void showNeurons();
24+
};
2825

29-
class MLP
30-
{
31-
private:
32-
Layer **HidOutlayers;
33-
int hidOutLayerCount;
34-
int *hidOutLayerSizes;
35-
int inputLayerSize;
36-
int outputLayerSize;
37-
float lRate;
26+
class MLP {
27+
private:
28+
Layer **HidOutlayers;
29+
int hidOutLayerCount;
30+
int *hidOutLayerSizes;
31+
int inputLayerSize;
32+
int outputLayerSize;
33+
float lRate;
34+
float *predictions;
3835

39-
public:
40-
MLP(int inputLayerSize, int hidOutLayerCount, int *hidOutLayerSizes, int outputLayerSize, float lRate);
41-
~MLP();
42-
void describe();
43-
void resetNeuronsActivations();
44-
float *feedForward(float *inputs, int inputSize);
45-
void predict(float **inputs, int inputSize, float **target, int targetSize, int samplesCount);
46-
float cost(float *target, int targetArr_size);
47-
float getParamTCostDerivative(float &param, float *inputArr, int inputSize, float *targetArr, int targetArr_size);
48-
void backPropogate(float *inputArr, int inputSize, float *target, int targetArr_size);
49-
void train(float **inputArr_2d, int input_elem_size, float **targetArr_2d, int target_elem_size, int items_count, int epochs);
50-
void printParamsCount();
51-
void constructLayer(int i);
52-
};
53-
}
36+
public:
37+
MLP(int inputLayerSize, int hidOutLayerCount, int *hidOutLayerSizes,
38+
int outputLayerSize, float lRate);
39+
~MLP();
40+
void describe();
41+
void resetNeuronsActivations();
42+
void feedForward(float *inputs, int inputSize);
43+
void predict(float **inputs, int inputSize, float **target, int targetSize,
44+
int samplesCount);
45+
float cost(float *target, int targetArr_size);
46+
float getParamTCostDerivative(float &param, float *inputArr, int inputSize,
47+
float *targetArr, int targetArr_size);
48+
void backPropogate(float *inputArr, int inputSize, float *target,
49+
int targetArr_size);
50+
void train(float **inputArr_2d, int input_elem_size, float **targetArr_2d,
51+
int target_elem_size, int items_count, int epochs);
52+
void printParamsCount();
53+
void constructLayer(int i);
54+
};
55+
} // namespace NeuralNet
5456

55-
#endif
57+
#endif

0 commit comments

Comments
 (0)