Skip to content

Commit 1f60c43

Browse files
Add anisotropic meshing to LearnerND (#141)
* make loss function based on simplex volume in higher dimensional space taking into account the output dimension of the function * make point_in_simplex and circumcircle global methods this way you can import them into other files * change choose_point_in_simplex such that it sometimes takes the center * pep8 * update docstring * remove n from _ask signature and split off _ask_bound_point * split _ask_point_without_known_simplices into separate function * use SortedList instead of heap * lose indentation * make learnerND.ask O(log N) * improve triangulation performance in 3d * make learner pass the tests * use heaps instead of sortedlist * add tell_pending to _ask_point_without_known_simplices * make bowyer_watson faster by looking at less circumcircles it prunes some circumcircles faster it results in: ~20% faster in 2d ~40% faster in 3d * move simplex_volume function to triangulation file * merge lists with zip * tetrahedron -> simplex * add quick volume computation for dim == 2 * try to add anisotropicity, however, this fails... * make anisotropicity work * out-comment the maximum shape factor of two * remove dependence on RTree * enable plotting of custom triangulations * small style change * change inside bounds check to have eps precision * add checks for volume consistency * add .pytest_cache to gitignore * fix: Python 3.10+ compatibility (collections.abc, math.factorial) * fix: use uniform scale for Bowyer-Watson insertion, not anisotropic transform The per-simplex anisotropic transform (based on local gradient) varies across the mesh, which can produce disconnected or non-star-shaped cavities in the Bowyer-Watson algorithm, breaking its volume conservation invariant. Fix: use the uniform bounds-scaling matrix for triangulation maintenance (Bowyer-Watson insertion). The anisotropic transform is still used for point selection (choose_point_in_simplex in _ask_best_point), so anisotropic sampling behavior is preserved. Closes #74. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Make anisotropic keyword-only in LearnerND --------- Co-authored-by: Jorn Hoofwijk <jornhoofwijk@gmail.com>
1 parent 156778d commit 1f60c43

6 files changed

Lines changed: 159 additions & 22 deletions

File tree

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ nosetests.xml
4444
coverage.xml
4545
*,cover
4646
.hypothesis/
47+
.pytest_cache/
4748

4849
# Translations
4950
*.mo

adaptive/learner/learnerND.py

Lines changed: 72 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,7 @@ class LearnerND(BaseLearner):
308308
children based on volume.
309309
"""
310310

311-
def __init__(self, func, bounds, loss_per_simplex=None):
311+
def __init__(self, func, bounds, loss_per_simplex=None, *, anisotropic=False):
312312
self._vdim = None
313313
self.loss_per_simplex = loss_per_simplex or default_loss
314314

@@ -363,6 +363,7 @@ def __init__(self, func, bounds, loss_per_simplex=None):
363363

364364
# create a private random number generator with fixed seed
365365
self._random = random.Random(1)
366+
self.anisotropic = anisotropic
366367

367368
# all real triangles that have not been subdivided and the pending
368369
# triangles heap of tuples (-loss, real simplex, sub_simplex or None)
@@ -379,7 +380,12 @@ def __init__(self, func, bounds, loss_per_simplex=None):
379380

380381
def new(self) -> LearnerND:
381382
"""Create a new learner with the same function and bounds."""
382-
return LearnerND(self.function, self.bounds, self.loss_per_simplex)
383+
return LearnerND(
384+
self.function,
385+
self.bounds,
386+
self.loss_per_simplex,
387+
anisotropic=self.anisotropic,
388+
)
383389

384390
@property
385391
def npoints(self):
@@ -529,7 +535,6 @@ def points(self):
529535

530536
def tell(self, point, value):
531537
point = tuple(point)
532-
533538
if point in self.data:
534539
return # we already know about the point
535540

@@ -548,9 +553,54 @@ def tell(self, point, value):
548553
simplex = self._pending_to_simplex.get(point)
549554
if simplex is not None and not self._simplex_exists(simplex):
550555
simplex = None
556+
# Keep Bowyer-Watson in the globally scaled coordinates. The
557+
# anisotropic per-simplex transform is only used for point
558+
# selection, not for cavity construction.
551559
to_delete, to_add = tri.add_point(point, simplex, transform=self._transform)
552560
self._update_losses(to_delete, to_add)
553561

562+
def get_local_transform_matrix(self, simplex):
563+
scale = self._transform
564+
565+
if simplex is None or self.tri is None or not self.anisotropic:
566+
return scale
567+
568+
neighbors = set.union(*[self.tri.vertex_to_simplices[i] for i in simplex])
569+
indices = set.union(set(), *neighbors)
570+
571+
points = np.array([self.points[i] for i in indices])
572+
values = [self.data[tuple(p)] for p in points]
573+
574+
if isinstance(values[0], Iterable):
575+
raise ValueError("Anisotropic learner only works with 1D output")
576+
577+
# Do a linear least square fit
578+
# A x = B, find x
579+
points = points @ scale
580+
ones = np.ones((len(points), 1))
581+
A = np.hstack((points, ones))
582+
B = np.array(values, dtype=float)
583+
fit = np.linalg.lstsq(A, B, rcond=None)[0]
584+
*gradient, _constant = fit
585+
# we do not need the constant, only the gradient
586+
587+
# gradient is a vector of the amount of the slope in each direction
588+
magnitude = np.linalg.norm(gradient)
589+
590+
if np.isclose(magnitude, 0):
591+
return scale # there is no noticable gradient
592+
593+
# Make it a 2d numpy array and normalise it.
594+
gradient = np.array([gradient], dtype=float) / magnitude
595+
projection_matrix = gradient.T @ gradient
596+
identity = np.eye(self.ndim)
597+
598+
factor = math.sqrt(magnitude**2 + 1) - 1
599+
600+
scale_along_gradient = projection_matrix * factor + identity
601+
m = np.dot(scale_along_gradient, scale)
602+
return m.T
603+
554604
def _simplex_exists(self, simplex):
555605
simplex = tuple(sorted(simplex))
556606
return simplex in self.tri.simplices
@@ -679,22 +729,29 @@ def _pop_highest_existing_simplex(self):
679729
def _ask_best_point(self):
680730
assert self.tri is not None
681731

682-
loss, simplex, subsimplex = self._pop_highest_existing_simplex()
732+
while True:
733+
loss, simplex, subsimplex = self._pop_highest_existing_simplex()
683734

684-
if subsimplex is None:
685-
# We found a real simplex and want to subdivide it
686-
points = self.tri.get_vertices(simplex)
687-
else:
688-
# We found a pending simplex and want to subdivide it
689-
subtri = self._subtriangulations[simplex]
690-
points = subtri.get_vertices(subsimplex)
735+
if subsimplex is None:
736+
# We found a real simplex and want to subdivide it
737+
points = self.tri.get_vertices(simplex)
738+
else:
739+
# We found a pending simplex and want to subdivide it
740+
subtri = self._subtriangulations[simplex]
741+
points = subtri.get_vertices(subsimplex)
691742

692-
point_new = tuple(choose_point_in_simplex(points, transform=self._transform))
743+
transform = self.get_local_transform_matrix(simplex)
744+
point_new = tuple(choose_point_in_simplex(points, transform=transform))
745+
746+
# The priority queue may still contain stale simplices from before a
747+
# pending split. Skip those rather than returning duplicate work.
748+
if point_new in self.data or point_new in self.pending_points:
749+
continue
693750

694-
self._pending_to_simplex[point_new] = simplex
695-
self.tell_pending(point_new, simplex=simplex) # O(??)
751+
self._pending_to_simplex[point_new] = simplex
752+
self.tell_pending(point_new, simplex=simplex) # O(??)
696753

697-
return point_new, loss
754+
return point_new, loss
698755

699756
@property
700757
def _bounds_available(self):

adaptive/learner/triangulation.py

Lines changed: 53 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from itertools import chain, combinations
44
from math import factorial, sqrt
55

6+
import numpy as np
67
import scipy.spatial
78
from numpy import abs as np_abs
89
from numpy import (
@@ -252,6 +253,8 @@ def simplex_volume_in_embedding(vertices) -> float:
252253
ValueError
253254
if the vertices do not form a simplex (for example,
254255
because they are coplanar, colinear or coincident).
256+
257+
Warning: this algorithm has not been tested for numerical stability.
255258
"""
256259
# Implements http://mathworld.wolfram.com/Cayley-MengerDeterminant.html
257260
# Modified from https://codereview.stackexchange.com/questions/77593/calculating-the-volume-of-a-tetrahedron
@@ -566,7 +569,14 @@ def bowyer_watson(self, pt_index, containing_simplex=None, transform=None):
566569
self.add_simplex(simplex)
567570

568571
new_triangles = self.vertex_to_simplices[pt_index]
569-
return bad_triangles - new_triangles, new_triangles - bad_triangles
572+
deleted_simplices = bad_triangles - new_triangles
573+
new_simplices = new_triangles - bad_triangles
574+
575+
old_vol = sum([self.volume(simplex) for simplex in deleted_simplices])
576+
new_vol = sum([self.volume(simplex) for simplex in new_simplices])
577+
assert np.isclose(old_vol, new_vol), f"{old_vol} !== {new_vol}"
578+
579+
return deleted_simplices, new_simplices
570580

571581
def _simplex_is_almost_flat(self, simplex):
572582
return self._relative_volume(simplex) < 1e-8
@@ -713,3 +723,45 @@ def hull(self):
713723
def convex_invariant(self, vertex):
714724
"""Hull is convex."""
715725
raise NotImplementedError
726+
727+
728+
class FakeDelaunay(scipy.spatial.Delaunay):
729+
def __init__(self, vertices, simplices):
730+
"""
731+
Throw in a Triangulation object, and receive a brandnew
732+
scipy.spatial.Delaunay
733+
734+
Parameters
735+
----------
736+
vertices : 2d arraylike of floats
737+
738+
simplices : 2d arraylike of integers
739+
"""
740+
741+
# just create some Delaunay triangulation object, any would do.
742+
super().__init__([[0, 1], [1, 0], [0, 0]])
743+
744+
self.ndim = len(vertices[0])
745+
self._points = np.array(vertices, dtype="float64")
746+
self.simplices = np.array(list(simplices), dtype="int32")
747+
# self.neighbors = np.zero # () todo
748+
self.nsimplex = len(simplices)
749+
self.npoints = len(vertices)
750+
self.min_bound = self._points.min(axis=0)
751+
self.max_bound = self._points.max(axis=0)
752+
self._transform = None # Set it to None to recompute it when needed
753+
754+
# now the hard part: finding the neighbours
755+
self.neighbors = -np.ones_like(self.simplices) # set all to -1
756+
757+
faces_dict = {}
758+
for n, simplex in enumerate(self.simplices):
759+
simplex = tuple(simplex)
760+
for i, _point in enumerate(simplex):
761+
face = tuple(sorted(simplex[:i] + simplex[i + 1 :]))
762+
if face in faces_dict:
763+
other_simpl_index, other_point_index = faces_dict[face]
764+
self.neighbors[other_simpl_index, other_point_index] = n
765+
self.neighbors[n, i] = other_simpl_index
766+
else:
767+
faces_dict[face] = (n, i)

adaptive/tests/test_learnernd.py

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,18 @@
22
import scipy.spatial
33

44
from adaptive.learner import LearnerND
5-
from adaptive.runner import replay_log, simple
5+
from adaptive.runner import BlockingRunner, replay_log, simple
66

77
from .test_learners import generate_random_parametrization, ring_of_fire
88

99

10-
def test_faiure_case_LearnerND():
10+
def sphere(xyz):
11+
x, y, z = xyz
12+
a = 0.4
13+
return x + z**2 + np.exp(-((x**2 + y**2 + z**2 - 0.75**2) ** 2) / a**4)
14+
15+
16+
def test_failure_case_LearnerND():
1117
log = [
1218
("ask", 4),
1319
("tell", (-1, -1, -1), 1.607873907219222e-101),
@@ -26,6 +32,24 @@ def test_faiure_case_LearnerND():
2632
replay_log(learner, log)
2733

2834

35+
def test_anisotropic_3d():
36+
# There was a bug where the total simplex volume would exceed the bounding
37+
# box volume for the anisotropic 3D learner.
38+
# volume for the anisotropic 3d learnerND
39+
# learner = adaptive.LearnerND(ring, bounds=[(-1, 1), (-1, 1)], anisotropic=True)
40+
learner = LearnerND(sphere, bounds=[(-1, 1), (-1, 1), (-1, 1)], anisotropic=True)
41+
42+
def goal(learner):
43+
if learner.tri:
44+
sum_of_simplex_volumes = np.sum(learner.tri.volumes())
45+
assert sum_of_simplex_volumes < 8.00000000001
46+
return learner.npoints >= 1000
47+
48+
BlockingRunner(learner, goal, ntasks=1)
49+
50+
assert learner.npoints >= 1000
51+
52+
2953
def test_interior_vs_bbox_gives_same_result():
3054
f = generate_random_parametrization(ring_of_fire)
3155

adaptive/tests/unit/test_learnernd.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,11 @@ def test_learnerND_accepts_ConvexHull_as_input():
3434
assert np.allclose(learner._bbox, [(0, 2), (0, 1)])
3535

3636

37+
def test_learnerND_anisotropic_is_keyword_only():
38+
with pytest.raises(TypeError):
39+
LearnerND(ring_of_fire, [(-1, 1), (-1, 1)], None, True)
40+
41+
3742
def test_learnerND_raises_if_too_many_neigbors():
3843
@uses_nth_neighbors(2)
3944
def loss(*args):

docs/source/logo.md

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,11 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.19.1
88
kernelspec:
99
display_name: Python 3 (ipykernel)
1010
language: python
1111
name: python3
12-
execution:
13-
timeout: 300
1412
---
1513

1614
```{code-cell} ipython3
@@ -168,7 +166,7 @@ def animate_png(folder=None, nseconds=15):
168166
169167
170168
def save_webp(fname_webp, ims):
171-
(im, *_ims) = ims
169+
im, *_ims = ims
172170
im.save(
173171
fname_webp,
174172
save_all=True,

0 commit comments

Comments
 (0)