Skip to content

Commit 2d170f1

Browse files
feat: add another try to this
1 parent ce7d031 commit 2d170f1

8 files changed

Lines changed: 449 additions & 0 deletions

File tree

try4 (OOP Approach)/cpp/README.md

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
## Setup
2+
- Clone Repo with this Command (run in bash, zsh, etc)
3+
```bash
4+
git clone https://github.com/Keshav-writes-code/NeuralNetInCppTry.git
5+
```
6+
7+
## Usage
8+
- Create a main.cpp file in the projects root directory if doesn't already exists
9+
- in your main.cpp file, Import the classes.h header
10+
11+
```cpp
12+
#include "classes.h"
13+
```
14+
- Then create a NeuralNet Object with this format
15+
16+
```cpp
17+
/*
18+
1st Param : Input Layer Size (Int)
19+
2nd Param : Hidden Layer Count (Int)
20+
3rd Param : Hidden Layer Sizes (Array of Ints)
21+
4th Param : Output Layer Size (Int)
22+
5th Param : Learning Rate (float)
23+
*/
24+
NeuralNet* NN = new NeuralNet(2, 5, {3, 4, 6, 7, 4 }, 5, 0.03);
25+
```
26+
27+
## Demo
28+
![image](https://github.com/user-attachments/assets/0583a0df-c0f6-45f6-91b5-4e3c248d9281)
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
#ifndef CLASSES_H
2+
#define CLASSES_H
3+
4+
enum class activations { relu, sigmoid, softmax };
5+
namespace NeuralNet {
6+
// Write Code here
7+
class Layer {
8+
private:
9+
int size;
10+
float *activation;
11+
float *z;
12+
float *bias;
13+
float **weights;
14+
int prev_layer_size;
15+
activations activation_function;
16+
void sigmoid();
17+
void relu();
18+
void softmax();
19+
friend class MLP;
20+
21+
public:
22+
Layer(int size, int prev_layer_size, activations activation_function);
23+
~Layer();
24+
void forward_pass(const float *inputs);
25+
void show_neurons();
26+
};
27+
28+
class MLP {
29+
private:
30+
Layer **layers;
31+
int input_layer_size;
32+
int hidden_layers_count;
33+
int *hidden_layer_sizes;
34+
int output_layer_size;
35+
const float *predictions;
36+
37+
public:
38+
MLP(int input_layer_size, int hidden_layers_count, int *hidden_layer_sizes,
39+
int output_layer_size);
40+
~MLP();
41+
void describe();
42+
void print_parameters_count();
43+
void feed_forward(float *inputs);
44+
void predict(float **feature_samples, float **target_samples,
45+
int samples_count);
46+
};
47+
48+
} // namespace NeuralNet
49+
50+
#endif
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
#ifndef PROGRESSBAR_H
2+
#define PROGRESSBAR_H
3+
4+
#include <iostream>
5+
namespace Console {
6+
void showProgressBar(int totalSteps, int currentStep);
7+
}
8+
9+
#endif

try4 (OOP Approach)/cpp/makefile

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
CXX = g++
2+
CXXFLAGS =
3+
TARGET = my_program
4+
SRCDIR = src
5+
SRCS = $(wildcard $(SRCDIR)/*.cpp)
6+
OBJDIR = build
7+
OBJS = $(SRCS:$(SRCDIR)/%.cpp=$(OBJDIR)/%.o)
8+
all: $(TARGET)
9+
10+
$(TARGET): $(OBJS)
11+
$(CXX) $(CXXFLAGS) -o $(TARGET) $(OBJS)
12+
./$(TARGET)
13+
$(OBJDIR)/%.o: $(SRCDIR)/%.cpp | $(OBJDIR)
14+
$(CXX) $(CXXFLAGS) -c $< -o $@
15+
$(OBJDIR):
16+
mkdir -p $(OBJDIR)
17+
clean:
18+
rm -rf $(OBJDIR) $(TARGET)

try4 (OOP Approach)/cpp/my_program

35.9 KB
Binary file not shown.
Lines changed: 220 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,220 @@
1+
#include "../include/NeuralNet/classes.h"
2+
#include "../include/console/progressbar.h"
3+
#include <chrono> // For duration types
4+
#include <cmath> // for exp and round
5+
#include <cstdlib> // for rand() and srand()
6+
#include <iomanip> // for setw
7+
#include <iostream>
8+
#include <thread> // For threads
9+
10+
float decimal_rounder(float x) { return round(x * 100.0) / 100.0; }
11+
12+
float get_random(float range) {
13+
return ((float)rand() / RAND_MAX) * range * 2 - range;
14+
}
15+
16+
template <typename T>
17+
void printInColor(const T &val, const std::string &colorCode) {
18+
std::cout << "\033[" << colorCode << "m" << val << "\033[0m";
19+
}
20+
21+
namespace NeuralNet {
22+
23+
Layer::Layer(int size, int prev_layer_size, activations activation_function) {
24+
const float range = 0.01;
25+
26+
this->size = size;
27+
this->prev_layer_size = prev_layer_size;
28+
this->activation_function = activation_function;
29+
30+
this->activation = new float[size]();
31+
this->z = new float[size]();
32+
this->bias = new float[size];
33+
this->weights = new float *[size];
34+
for (int i = 0; i < size; i++) {
35+
this->bias[i] = get_random(range);
36+
this->weights[i] = new float[prev_layer_size];
37+
for (int j = 0; j < prev_layer_size; j++) {
38+
this->weights[i][j] = get_random(range);
39+
}
40+
}
41+
}
42+
Layer::~Layer() {
43+
delete[] this->activation;
44+
delete[] this->z;
45+
delete[] this->bias;
46+
for (int i = 0; i < this->size; i++) {
47+
delete[] this->weights[i];
48+
}
49+
delete[] this->weights;
50+
}
51+
52+
void Layer::sigmoid() {
53+
for (int i = 0; i < this->size; i++) {
54+
this->activation[i] = 1 / (1 + exp(-this->z[i]));
55+
}
56+
}
57+
void Layer::relu() {
58+
for (int i = 0; i < this->size; i++) {
59+
this->activation[i] = this->z[i] > 0 ? this->z[i] : 0;
60+
}
61+
}
62+
void Layer::softmax() {
63+
float sum = 0;
64+
for (int i = 0; i < this->size; i++) {
65+
sum += exp(this->z[i]);
66+
}
67+
for (int i = 0; i < size; i++) {
68+
this->activation[i] = exp(this->z[i]) / sum;
69+
}
70+
}
71+
72+
void Layer::forward_pass(const float *inputs) {
73+
for (int i = 0; i < this->size; i++) {
74+
for (int j = 0; j < this->prev_layer_size; j++) {
75+
this->z[i] += this->weights[i][j] * inputs[j];
76+
}
77+
this->z[i] += this->bias[i];
78+
79+
switch (this->activation_function) {
80+
case activations::relu:
81+
this->relu();
82+
break;
83+
case activations::sigmoid:
84+
this->sigmoid();
85+
break;
86+
case activations::softmax:
87+
this->softmax();
88+
break;
89+
}
90+
}
91+
}
92+
93+
MLP::MLP(int input_layer_size, int hidden_layers_count, int *hidden_layer_sizes,
94+
int output_layer_size) {
95+
this->input_layer_size = input_layer_size;
96+
this->hidden_layers_count = hidden_layers_count;
97+
this->hidden_layer_sizes = hidden_layer_sizes;
98+
this->output_layer_size = output_layer_size;
99+
this->predictions = nullptr;
100+
this->layers = new Layer *[hidden_layers_count + 1]; // +1 for output layer
101+
102+
// Initialize Hidden layers
103+
for (int i = 0; i < hidden_layers_count; i++) {
104+
this->layers[i] =
105+
new Layer(hidden_layer_sizes[i],
106+
(i - 1 < 0) ? input_layer_size : hidden_layer_sizes[i - 1],
107+
activations::sigmoid);
108+
}
109+
// initialize output layer
110+
this->layers[this->hidden_layers_count] = new Layer(
111+
output_layer_size, hidden_layer_sizes[this->hidden_layers_count - 1],
112+
activations::softmax);
113+
}
114+
MLP::~MLP() {
115+
for (int i = 0; i < this->hidden_layers_count; i++) {
116+
delete[] this->layers[i];
117+
}
118+
delete[] this->layers;
119+
}
120+
121+
void MLP::describe() {
122+
// Using ANSI escape codes to make the terminal output colorful
123+
printInColor("\n+-----------------------------------------+\n",
124+
"32"); // Green
125+
printInColor("| Neural Network |\n", "32");
126+
printInColor("+-----------------------------------------+\n", "32");
127+
128+
std::cout << "Layer Count : " << this->hidden_layers_count + 1 << std::endl;
129+
printInColor("Layer Sizes: \n", "36"); // Cyan
130+
131+
std::cout << std::setw(4) << this->input_layer_size << " | ";
132+
for (int i = 0; i < this->hidden_layers_count; i++) {
133+
std::cout << std::setw(4)
134+
<< this->hidden_layer_sizes[i]; // Formatting for better spacing
135+
std::cout << " | ";
136+
// Breaking line for better readability every 10 layers
137+
if ((i + 1) % 6 == 0) {
138+
std::cout << std::endl;
139+
}
140+
}
141+
std::cout << std::setw(4)
142+
<< this->output_layer_size; // Formatting for better spacing
143+
std::cout << " | ";
144+
// Breaking line for better readability every 10 layers
145+
std::cout << std::endl << std::endl;
146+
}
147+
void MLP::print_parameters_count() {
148+
int weightsCount = this->input_layer_size;
149+
int prevLayerNeuronsCount = this->input_layer_size;
150+
int biasesCount = 0;
151+
152+
for (int i = 0; i < this->hidden_layers_count; i++) {
153+
weightsCount += this->hidden_layer_sizes[i] * prevLayerNeuronsCount;
154+
prevLayerNeuronsCount = this->hidden_layer_sizes[i];
155+
biasesCount += this->hidden_layer_sizes[i];
156+
}
157+
std::cout << "Weights Count : " << weightsCount << std::endl;
158+
std::cout << "Biases Count : " << biasesCount << std::endl;
159+
}
160+
161+
void MLP::feed_forward(float *inputs) {
162+
this->layers[0]->forward_pass(inputs);
163+
const float *intermediate_activations = this->layers[0]->activation;
164+
for (int i = 1; i <= this->hidden_layers_count;
165+
i++) { // "<=" to account for the output layer
166+
Layer *c_layer = this->layers[i];
167+
c_layer->forward_pass(intermediate_activations);
168+
intermediate_activations = c_layer->activation;
169+
}
170+
this->predictions = intermediate_activations;
171+
}
172+
void MLP::predict(float **feature_samples, float **target_samples,
173+
int samples_count) {
174+
float accuracy = 0;
175+
for (int j = 0; j < samples_count; j++) {
176+
std::cout << "Inputs : ";
177+
for (int i = 0; i < this->input_layer_size; i++) {
178+
if (i != 0) {
179+
std::cout << ", ";
180+
}
181+
const float result = decimal_rounder(feature_samples[j][i]);
182+
std::cout << result;
183+
if (i == this->input_layer_size - 1) {
184+
std::cout << std::endl;
185+
}
186+
}
187+
this->feed_forward(feature_samples[j]);
188+
const int outputSize = this->output_layer_size;
189+
// Get highest Output
190+
float max = this->predictions[0];
191+
int maxIndex = 0;
192+
for (int i = 1; i < outputSize; i++) {
193+
if (this->predictions[i] > max) {
194+
max = this->predictions[i];
195+
maxIndex = i;
196+
}
197+
}
198+
std::cout << "Outputs : \n";
199+
for (int i = 0; i < outputSize; i++) {
200+
const float result = decimal_rounder(this->predictions[i]);
201+
const char *color = "32"; // Green
202+
if (i != maxIndex) {
203+
color = "31";
204+
} // Red
205+
else if (maxIndex == i && target_samples[j][i] != 1) {
206+
color = "33";
207+
} // Yellow
208+
else if (maxIndex == i && target_samples[j][i] == 1) {
209+
accuracy += 100.0 / samples_count;
210+
}
211+
std::cout << "\033[" << color << "m" << "[" << i << "] : " << result
212+
<< " => " << target_samples[j][i] << "\033[0m";
213+
std::cout << std::endl;
214+
}
215+
}
216+
std::cout << "Accuracy : " << accuracy << "%" << std::endl;
217+
;
218+
}
219+
220+
} // namespace NeuralNet

0 commit comments

Comments
 (0)