Skip to content

Commit ae04285

Browse files
committed
[DOC] add python examples via 'literalinclude' and a testing CI
1 parent 02dafef commit ae04285

18 files changed

Lines changed: 550 additions & 161 deletions

.github/workflows/docs.yml

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
name: Documentation
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
- dev
8+
paths:
9+
- 'docs/**'
10+
- 'src/**'
11+
- '.github/workflows/docs.yml'
12+
pull_request:
13+
branches:
14+
- main
15+
- dev
16+
paths:
17+
- 'docs/**'
18+
- 'src/**'
19+
- '.github/workflows/docs.yml'
20+
21+
concurrency:
22+
group: ${{ github.workflow }}-${{ github.ref }}
23+
cancel-in-progress: true
24+
25+
jobs:
26+
test-snippets:
27+
name: test-doc-snippets
28+
runs-on: ubuntu-latest
29+
strategy:
30+
matrix:
31+
python-version: ['3.10', '3.11', '3.12']
32+
fail-fast: false
33+
34+
steps:
35+
- uses: actions/checkout@v4
36+
37+
- name: Set up Python ${{ matrix.python-version }}
38+
uses: actions/setup-python@v5
39+
with:
40+
python-version: ${{ matrix.python-version }}
41+
42+
- name: Install dependencies
43+
run: |
44+
python -m pip install --upgrade pip
45+
python -m pip install -e ".[all_extras,test]"
46+
python -m pip install -r docs/requirements.txt
47+
48+
- name: Show dependencies
49+
run: python -m pip list
50+
51+
- name: Test documentation snippets
52+
run: |
53+
python -m pytest docs/tests/test_doc_snippets.py -v --tb=short
54+
55+
build-docs:
56+
name: build-docs
57+
runs-on: ubuntu-latest
58+
needs: test-snippets
59+
60+
steps:
61+
- uses: actions/checkout@v4
62+
63+
- name: Set up Python
64+
uses: actions/setup-python@v5
65+
with:
66+
python-version: '3.12'
67+
68+
- name: Install dependencies
69+
run: |
70+
python -m pip install --upgrade pip
71+
python -m pip install -e ".[all_extras]"
72+
python -m pip install -r docs/requirements.txt
73+
74+
- name: Show dependencies
75+
run: python -m pip list
76+
77+
- name: Build documentation
78+
run: |
79+
cd docs && sphinx-build -b html source build/html -W --keep-going
80+
81+
- name: Run doctest
82+
run: |
83+
cd docs && sphinx-build -b doctest source build/doctest || true
84+
85+
- name: Upload documentation artifact
86+
uses: actions/upload-artifact@v4
87+
with:
88+
name: docs-html
89+
path: docs/build/html/
90+
retention-days: 7

docs/source/_snippets/__init__.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
"""Documentation code snippets.
2+
3+
This package contains testable Python code snippets that are included in the
4+
documentation using Sphinx's ``literalinclude`` directive. Each snippet file
5+
can be executed directly to verify it works correctly.
6+
7+
The snippets are organized by documentation section:
8+
- getting_started/: Quick start examples
9+
- installation/: Installation verification examples
10+
- user_guide/: In-depth tutorial examples
11+
- examples/: Gallery examples
12+
"""

docs/source/_snippets/conftest.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
"""Pytest configuration for documentation snippets.
2+
3+
This conftest provides shared fixtures that snippet files can use for testing.
4+
The fixtures ensure consistent behavior across all snippet tests.
5+
"""
6+
7+
import numpy as np
8+
import pytest
9+
10+
11+
@pytest.fixture
12+
def simple_search_space():
13+
"""Simple search space for basic examples."""
14+
return {
15+
"x": np.arange(-5, 5, 0.1),
16+
"y": np.arange(-5, 5, 0.1),
17+
}
18+
19+
20+
@pytest.fixture
21+
def simple_objective():
22+
"""Simple objective function for basic examples."""
23+
24+
def objective(params):
25+
x = params["x"]
26+
y = params["y"]
27+
return -(x**2 + y**2)
28+
29+
return objective
30+
31+
32+
@pytest.fixture
33+
def sklearn_data():
34+
"""Load iris dataset for sklearn examples."""
35+
from sklearn.datasets import load_iris
36+
37+
return load_iris(return_X_y=True)
38+
39+
40+
@pytest.fixture
41+
def sklearn_train_test_split(sklearn_data):
42+
"""Split sklearn data into train and test sets."""
43+
from sklearn.model_selection import train_test_split
44+
45+
X, y = sklearn_data
46+
return train_test_split(X, y, test_size=0.2, random_state=42)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
"""Example gallery code snippets for documentation."""
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
"""Getting started code snippets for documentation."""
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
"""Bayesian Optimizer example for documentation.
2+
3+
This snippet demonstrates the usage of BayesianOptimizer for
4+
optimization problems. It is included in get_started.rst.
5+
"""
6+
7+
# [start:full_example]
8+
from hyperactive.opt.gfo import BayesianOptimizer
9+
# [end:full_example]
10+
11+
# Need to define experiment and search_space for standalone execution
12+
import numpy as np
13+
14+
15+
def experiment(params):
16+
"""Simple objective function."""
17+
x = params["x"]
18+
y = params["y"]
19+
return -(x**2 + y**2)
20+
21+
22+
search_space = {
23+
"x": np.arange(-5, 5, 0.1),
24+
"y": np.arange(-5, 5, 0.1),
25+
}
26+
27+
# [start:optimizer_usage]
28+
optimizer = BayesianOptimizer(
29+
search_space=search_space,
30+
n_iter=30,
31+
experiment=experiment,
32+
)
33+
best_params = optimizer.solve()
34+
# [end:optimizer_usage]
35+
36+
if __name__ == "__main__":
37+
print(f"Best parameters: {best_params}")
38+
# Verify the optimization found parameters close to (0, 0)
39+
assert abs(best_params["x"]) < 2.0, f"Expected x near 0, got {best_params['x']}"
40+
assert abs(best_params["y"]) < 2.0, f"Expected y near 0, got {best_params['y']}"
41+
print("Bayesian optimizer example passed!")
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
"""Bayesian optimization example for index page.
2+
3+
This snippet demonstrates Bayesian optimization with a more complex
4+
objective function shown on the landing page. It is included in index.rst.
5+
"""
6+
7+
# [start:full_example]
8+
import numpy as np
9+
from hyperactive.opt.gfo import BayesianOptimizer
10+
11+
12+
def complex_objective(params):
13+
x = params["x"]
14+
y = params["y"]
15+
return -((x - 2) ** 2 + (y + 1) ** 2) + np.sin(x * y)
16+
17+
18+
search_space = {
19+
"x": np.linspace(-5, 5, 100),
20+
"y": np.linspace(-5, 5, 100),
21+
}
22+
23+
optimizer = BayesianOptimizer(
24+
search_space=search_space,
25+
n_iter=50,
26+
experiment=complex_objective,
27+
)
28+
best_params = optimizer.solve()
29+
# [end:full_example]
30+
31+
if __name__ == "__main__":
32+
print(f"Best parameters: {best_params}")
33+
# Verify we got valid parameters
34+
assert "x" in best_params
35+
assert "y" in best_params
36+
print("Index Bayesian example passed!")
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
"""Custom function example for index page.
2+
3+
This snippet demonstrates the basic custom function optimization
4+
shown on the landing page. It is included in index.rst.
5+
"""
6+
7+
# [start:full_example]
8+
import numpy as np
9+
from hyperactive.opt.gfo import HillClimbing
10+
11+
12+
# Define your objective function
13+
def objective(params):
14+
x, y = params["x"], params["y"]
15+
return -(x**2 + y**2) # Maximize (minimize negative)
16+
17+
18+
# Define the search space
19+
search_space = {
20+
"x": np.arange(-5, 5, 0.1),
21+
"y": np.arange(-5, 5, 0.1),
22+
}
23+
24+
# Create optimizer and solve
25+
optimizer = HillClimbing(
26+
search_space=search_space,
27+
n_iter=100,
28+
experiment=objective,
29+
)
30+
best_params = optimizer.solve()
31+
print(f"Best parameters: {best_params}")
32+
# [end:full_example]
33+
34+
if __name__ == "__main__":
35+
# Verify the optimization found parameters close to (0, 0)
36+
assert abs(best_params["x"]) < 1.0, f"Expected x near 0, got {best_params['x']}"
37+
assert abs(best_params["y"]) < 1.0, f"Expected y near 0, got {best_params['y']}"
38+
print("Index custom function example passed!")
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
"""Scikit-learn tuning example for index page.
2+
3+
This snippet demonstrates sklearn integration using OptCV
4+
shown on the landing page. It is included in index.rst.
5+
"""
6+
7+
# [start:full_example]
8+
from sklearn.svm import SVC
9+
from sklearn.datasets import load_iris
10+
from sklearn.model_selection import train_test_split
11+
from hyperactive.integrations.sklearn import OptCV
12+
from hyperactive.opt.gfo import HillClimbing
13+
14+
# Load data
15+
X, y = load_iris(return_X_y=True)
16+
X_train, X_test, y_train, y_test = train_test_split(X, y)
17+
18+
# Define optimizer with search space
19+
search_space = {"kernel": ["linear", "rbf"], "C": [0.1, 1, 10]}
20+
optimizer = HillClimbing(search_space=search_space, n_iter=20)
21+
22+
# Create tuned estimator and fit
23+
tuned_svc = OptCV(SVC(), optimizer)
24+
tuned_svc.fit(X_train, y_train)
25+
26+
print(f"Best params: {tuned_svc.best_params_}")
27+
# [end:full_example]
28+
29+
if __name__ == "__main__":
30+
# Verify we got valid results
31+
assert hasattr(tuned_svc, "best_params_")
32+
assert "kernel" in tuned_svc.best_params_
33+
assert "C" in tuned_svc.best_params_
34+
print("Index sklearn tuning example passed!")
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
"""Quick start example for documentation.
2+
3+
This snippet demonstrates the basic usage of Hyperactive for optimizing
4+
a custom objective function. It is included in get_started.rst.
5+
"""
6+
7+
# [start:full_example]
8+
import numpy as np
9+
from hyperactive.opt.gfo import HillClimbing
10+
11+
12+
# 1. Define your objective function
13+
def objective(params):
14+
x = params["x"]
15+
y = params["y"]
16+
return -(x**2 + y**2) # Hyperactive maximizes by default
17+
18+
19+
# 2. Define the search space
20+
search_space = {
21+
"x": np.arange(-5, 5, 0.1),
22+
"y": np.arange(-5, 5, 0.1),
23+
}
24+
25+
# 3. Create an optimizer and solve
26+
optimizer = HillClimbing(
27+
search_space=search_space,
28+
n_iter=100,
29+
experiment=objective,
30+
)
31+
best_params = optimizer.solve()
32+
33+
print(f"Best parameters: {best_params}")
34+
# [end:full_example]
35+
36+
if __name__ == "__main__":
37+
# Verify the optimization found parameters close to (0, 0)
38+
assert abs(best_params["x"]) < 1.0, f"Expected x near 0, got {best_params['x']}"
39+
assert abs(best_params["y"]) < 1.0, f"Expected y near 0, got {best_params['y']}"
40+
print("Quick start example passed!")

0 commit comments

Comments
 (0)