|
| 1 | +""" pyplots.ai |
| 2 | +contour-decision-boundary: Decision Boundary Classifier Visualization |
| 3 | +Library: letsplot 4.8.2 | Python 3.13.11 |
| 4 | +Quality: 91/100 | Created: 2026-01-01 |
| 5 | +""" |
| 6 | + |
| 7 | +import numpy as np |
| 8 | +import pandas as pd |
| 9 | +from lets_plot import ( |
| 10 | + LetsPlot, |
| 11 | + aes, |
| 12 | + element_text, |
| 13 | + geom_point, |
| 14 | + geom_tile, |
| 15 | + ggplot, |
| 16 | + ggsave, |
| 17 | + ggsize, |
| 18 | + labs, |
| 19 | + scale_color_manual, |
| 20 | + scale_fill_manual, |
| 21 | + scale_shape_manual, |
| 22 | + theme, |
| 23 | + theme_minimal, |
| 24 | +) |
| 25 | +from sklearn.datasets import make_moons |
| 26 | +from sklearn.neighbors import KNeighborsClassifier |
| 27 | + |
| 28 | + |
| 29 | +LetsPlot.setup_html() |
| 30 | + |
| 31 | +# Data - Create synthetic classification data |
| 32 | +np.random.seed(42) |
| 33 | +X, y = make_moons(n_samples=200, noise=0.25, random_state=42) |
| 34 | + |
| 35 | +# Train a KNN classifier |
| 36 | +classifier = KNeighborsClassifier(n_neighbors=5) |
| 37 | +classifier.fit(X, y) |
| 38 | + |
| 39 | +# Create mesh grid for decision boundary |
| 40 | +h = 0.02 # Step size |
| 41 | +x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5 |
| 42 | +y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5 |
| 43 | +xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) |
| 44 | + |
| 45 | +# Predict class for each point in mesh |
| 46 | +Z = classifier.predict(np.c_[xx.ravel(), yy.ravel()]) |
| 47 | +Z = Z.reshape(xx.shape) |
| 48 | + |
| 49 | +# Create DataFrame for mesh grid |
| 50 | +mesh_df = pd.DataFrame({"X1": xx.ravel(), "X2": yy.ravel(), "Predicted": Z.ravel().astype(str)}) |
| 51 | + |
| 52 | +# Create DataFrame for training points |
| 53 | +train_df = pd.DataFrame({"X1": X[:, 0], "X2": X[:, 1], "Class": y.astype(str)}) |
| 54 | + |
| 55 | +# Add classification result (correct/incorrect) for training points |
| 56 | +predictions = classifier.predict(X) |
| 57 | +train_df["Correct"] = np.where(predictions == y, "Correct", "Incorrect") |
| 58 | + |
| 59 | +# Plot - Decision boundary with training points |
| 60 | +plot = ( |
| 61 | + ggplot() |
| 62 | + # Decision regions as filled contour using tiles |
| 63 | + + geom_tile(aes(x="X1", y="X2", fill="Predicted"), data=mesh_df, alpha=0.4) |
| 64 | + # Training points |
| 65 | + + geom_point(aes(x="X1", y="X2", color="Class", shape="Correct"), data=train_df, size=5, stroke=1.5) |
| 66 | + # Color scales |
| 67 | + + scale_fill_manual(values=["#306998", "#FFD43B"], name="Predicted Class") |
| 68 | + + scale_color_manual(values=["#306998", "#FFD43B"], name="True Class") |
| 69 | + + scale_shape_manual(values=[16, 4], name="Classification") # Circle for correct, X for incorrect |
| 70 | + # Labels |
| 71 | + + labs(title="contour-decision-boundary · letsplot · pyplots.ai", x="Feature X1", y="Feature X2") |
| 72 | + # Theme |
| 73 | + + theme_minimal() |
| 74 | + + theme( |
| 75 | + plot_title=element_text(size=24), |
| 76 | + axis_title=element_text(size=20), |
| 77 | + axis_text=element_text(size=16), |
| 78 | + legend_title=element_text(size=18), |
| 79 | + legend_text=element_text(size=14), |
| 80 | + legend_position="right", |
| 81 | + ) |
| 82 | + + ggsize(1600, 900) |
| 83 | +) |
| 84 | + |
| 85 | +# Save - use path parameter to specify exact location |
| 86 | +ggsave(plot, filename="plot.png", path=".", scale=3) |
| 87 | + |
| 88 | +# Save HTML for interactive version |
| 89 | +ggsave(plot, filename="plot.html", path=".") |
0 commit comments