Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions models/knn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor
from sklearn.metrics import accuracy_score, mean_squared_error

class KNNClassifier:
def __init__(self, n_neighbors=3):
self.n_neighbors = n_neighbors
self.model = KNeighborsClassifier(n_neighbors=self.n_neighbors)

def train(self, X, y):
self.model.fit(X, y)

def predict(self, X):
return self.model.predict(X)

def evaluate(self, X, y):
y_pred = self.predict(X)
return accuracy_score(y, y_pred)


class KNNRegressor:
def __init__(self, n_neighbors=3):
self.n_neighbors = n_neighbors
self.model = KNeighborsRegressor(n_neighbors=self.n_neighbors)

def train(self, X, y):
self.model.fit(X, y)

def predict(self, X):
return self.model.predict(X)

def evaluate(self, X, y):
y_pred = self.predict(X)
return mean_squared_error(y, y_pred, squared=False) # RMSE
5 changes: 5 additions & 0 deletions models/svm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from utils.plot_helpers import plot_confusion_matrix
import streamlit as st

fig = plot_confusion_matrix(y_true, y_pred, labels=["Class 0", "Class 1"], cmap="Purples")
st.pyplot(fig)
66 changes: 66 additions & 0 deletions pages/KNN.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import streamlit as st
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from models.knn import KNNClassifier, KNNRegressor
from utils.plot_helpers import plot_confusion_matrix

st.title("🧩 K-Nearest Neighbors (KNN) Simulator")

mode = st.radio("Choose Mode", ["Classification", "Regression"])

if mode == "Classification":
st.subheader("KNN Classifier Visualization")

# Load dataset
X, y = datasets.make_classification(
n_samples=150, n_features=2, n_informative=2, n_redundant=0,
n_clusters_per_class=1, random_state=42
)

k = st.slider("Number of Neighbors (k)", 1, 15, 3)
model = KNNClassifier(n_neighbors=k)
model.train(X, y)
y_pred = model.predict(X)

# Decision boundary
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 300),
np.linspace(y_min, y_max, 300))
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)

plt.figure(figsize=(8, 6))
plt.contourf(xx, yy, Z, cmap="coolwarm", alpha=0.5)
plt.scatter(X[:, 0], X[:, 1], c=y, cmap="coolwarm", edgecolors="k")
plt.title(f"KNN Classifier (k={k})")
st.pyplot(plt)

st.write(f"**Accuracy:** {model.evaluate(X, y):.2f}")

# Confusion matrix
fig = plot_confusion_matrix(y, y_pred, labels=["Class 0", "Class 1"], cmap="Purples")
st.pyplot(fig)

else:
st.subheader("KNN Regressor Visualization")

# Generate regression dataset
X = np.linspace(0, 10, 100).reshape(-1, 1)
y = np.sin(X).ravel() + np.random.randn(100) * 0.1

k = st.slider("Number of Neighbors (k)", 1, 15, 3)
model = KNNRegressor(n_neighbors=k)
model.train(X, y)
y_pred = model.predict(X)

# Plot regression
plt.figure(figsize=(8, 6))
plt.scatter(X, y, color="blue", label="Data")
plt.plot(X, y_pred, color="red", label=f"KNN Prediction (k={k})")
plt.title("KNN Regression")
plt.legend()
st.pyplot(plt)

st.write(f"**RMSE:** {model.evaluate(X, y):.3f}")
6 changes: 6 additions & 0 deletions pages/LogisticRegression.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from utils.plot_helpers import plot_roc_curve
import streamlit as st

# Example
fig = plot_roc_curve(y_true, y_pred_proba)
st.pyplot(fig)
62 changes: 62 additions & 0 deletions pages/svm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import streamlit as st
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from models.svm import SVMClassifier, SVMRegressor

st.title("🔷 Support Vector Machine (SVM) Simulator")

option = st.radio("Choose Mode", ["Classification", "Regression"])

if option == "Classification":
st.subheader("SVM Classifier Visualization")

# Load toy dataset
X, y = datasets.make_blobs(n_samples=100, centers=2, random_state=6, cluster_std=1.2)

kernel = st.selectbox("Kernel", ["linear", "poly", "rbf", "sigmoid"])
C = st.slider("Regularization (C)", 0.01, 10.0, 1.0)

model = SVMClassifier(kernel=kernel, C=C)
model.train(X, y)

# Plot decision boundary
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 300),
np.linspace(y_min, y_max, 300))
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)

plt.figure(figsize=(8, 6))
plt.contourf(xx, yy, Z, cmap='coolwarm', alpha=0.6)
plt.scatter(X[:, 0], X[:, 1], c=y, cmap='coolwarm', edgecolors='k')
plt.scatter(model.get_support_vectors()[:, 0], model.get_support_vectors()[:, 1],
s=100, facecolors='none', edgecolors='yellow', label='Support Vectors')
plt.legend()
st.pyplot(plt)
st.write(f"**Accuracy:** {model.evaluate(X, y):.2f}")

else:
st.subheader("SVM Regressor Visualization")

# Generate regression dataset
X = np.sort(5 * np.random.rand(100, 1), axis=0)
y = np.sin(X).ravel() + np.random.randn(100) * 0.1

kernel = st.selectbox("Kernel", ["linear", "poly", "rbf", "sigmoid"])
C = st.slider("Regularization (C)", 0.1, 10.0, 1.0)
epsilon = st.slider("Epsilon", 0.01, 1.0, 0.1)

model = SVMRegressor(kernel=kernel, C=C, epsilon=epsilon)
model.train(X, y)
y_pred = model.predict(X)

# Plot regression curve
plt.figure(figsize=(8, 6))
plt.scatter(X, y, color="blue", label="Data")
plt.plot(X, y_pred, color="red", label="SVM Prediction")
plt.title("SVM Regression")
plt.legend()
st.pyplot(plt)
st.write(f"**RMSE:** {model.evaluate(X, y):.3f}")
57 changes: 35 additions & 22 deletions utils/plot_helpers.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,41 @@
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import confusion_matrix, roc_curve, auc
from sklearn.metrics import confusion_matrix

def plot_regression_line(X, y, model):
plt.figure()
plt.scatter(X, y, color="blue", label="Data")
y_pred = model.predict(X)
plt.plot(X, y_pred, color="red", label="Prediction")
plt.legend()
return plt
def plot_confusion_matrix(y_true, y_pred, labels=None, annotate=True, cmap="Blues"):
"""
Plots a customizable confusion matrix.

Parameters:
y_true (array-like): Ground truth labels.
y_pred (array-like): Predicted labels.
labels (list, optional): Class names to display on axes.
annotate (bool): Whether to show cell values.
cmap (str): Colormap for heatmap (e.g. 'Blues', 'Greens', 'Oranges').

Returns:
fig (matplotlib.figure.Figure): The confusion matrix figure.
"""

def plot_confusion_matrix(y_true, y_pred, labels):
cm = confusion_matrix(y_true, y_pred)
plt.figure()
sns.heatmap(cm, annot=True, fmt="d", cmap="Blues", xticklabels=labels, yticklabels=labels)
plt.xlabel("Predicted")
plt.ylabel("Actual")
return plt
fig, ax = plt.subplots(figsize=(6, 5))
sns.set_style("whitegrid")

sns.heatmap(
cm,
annot=annotate,
fmt="d" if annotate else "",
cmap=cmap,
cbar=False,
xticklabels=labels if labels is not None else "auto",
yticklabels=labels if labels is not None else "auto",
linewidths=0.5,
ax=ax,
)

ax.set_xlabel("Predicted Labels", fontsize=11)
ax.set_ylabel("True Labels", fontsize=11)
ax.set_title("Confusion Matrix", fontsize=13, pad=12)

def plot_roc_curve(y_true, y_scores):
fpr, tpr, _ = roc_curve(y_true, y_scores)
roc_auc = auc(fpr, tpr)
plt.figure()
plt.plot(fpr, tpr, label=f"AUC = {roc_auc:.2f}")
plt.plot([0, 1], [0, 1], linestyle="--")
plt.legend()
return plt
plt.tight_layout()
return fig