Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions adaptive/learner/balancing_learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,8 +261,13 @@ def ask(
return [], []

if not tell_pending:
with restore(*self.learners):
return self._ask_and_tell(n)
try:
with restore(*self.learners):
return self._ask_and_tell(n)
finally:
self._ask_cache.clear()
self._loss.clear()
self._pending_loss.clear()
else:
return self._ask_and_tell(n)

Expand Down
23 changes: 16 additions & 7 deletions adaptive/learner/data_saver.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,14 @@
with_pandas = False


def _to_key(x):
return tuple(x.values) if x.values.size > 1 else x.item()
def _mapping_uses_tuple_keys(mapping):
return bool(mapping) and isinstance(next(iter(mapping)), tuple)


def _row_to_key(row, force_tuple=False):
if row.values.size > 1 or force_tuple:
return tuple(row.values)
return row.item()


class DataSaver(BaseLearner):
Expand Down Expand Up @@ -109,8 +115,10 @@ def to_dataframe( # type: ignore[override]
**kwargs,
)

force_tuple = _mapping_uses_tuple_keys(self.extra_data)
df[extra_data_name] = [
self.extra_data[_to_key(x)] for _, x in df[df.attrs["inputs"]].iterrows()
self.extra_data[_row_to_key(x, force_tuple=force_tuple)]
for _, x in df[df.attrs["inputs"]].iterrows()
]
return df

Expand Down Expand Up @@ -146,10 +154,11 @@ def load_dataframe( # type: ignore[override]
function_prefix=function_prefix,
**kwargs,
)
keys = df.attrs.get("inputs", list(input_names))
for _, x in df[keys + [extra_data_name]].iterrows():
key = _to_key(x[:-1])
self.extra_data[key] = x[-1]
input_columns = df.attrs.get("inputs", list(input_names))
force_tuple = _mapping_uses_tuple_keys(self.learner.data)
for _, row in df[input_columns + [extra_data_name]].iterrows():
key = _row_to_key(row.iloc[:-1], force_tuple=force_tuple)
self.extra_data[key] = row.iloc[-1]

def _get_data(self) -> tuple[Any, OrderedDict[Any, Any]]:
return self.learner._get_data(), self.extra_data
Expand Down
52 changes: 43 additions & 9 deletions adaptive/learner/learnerND.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,11 +493,39 @@ def load_dataframe( # type: ignore[override]
def bounds_are_done(self):
return all(p in self.data for p in self._bounds_points)

def _sorted_line_data(self):
coordinates = self.points[:, 0]
sorted_indices = np.argsort(coordinates)
return coordinates[sorted_indices], self.values[sorted_indices]

def _ip(self):
"""A `scipy.interpolate.LinearNDInterpolator` instance
containing the learner's data."""
"""A SciPy interpolator containing the learner's data."""
# XXX: take our own triangulation into account when generating the _ip
return interpolate.LinearNDInterpolator(self.points, self.values)
if self.ndim != 1:
return interpolate.LinearNDInterpolator(self.points, self.values)

coordinates, values = self._sorted_line_data()
return interpolate.interp1d(
coordinates,
values,
axis=0,
bounds_error=False,
fill_value=np.nan,
)

def _plot_1d(self, n=None):
hv = ensure_holoviews()
if len(self.data) < 2:
return hv.Path([]) * hv.Scatter([]).opts(size=5)

(x_bounds,) = self._bbox
n = n or 201
xs = np.linspace(*x_bounds, n)
ys = self._ip()(xs)
scatter_points = [
(point[0], value) for point, value in sorted(self.data.items())
]
return hv.Path((xs, ys)) * hv.Scatter(scatter_points).opts(size=5)

@property
def tri(self):
Expand Down Expand Up @@ -834,7 +862,7 @@ def _update_range(self, new_output):
# this is the first point, nothing to do, just set the range
self._min_value = np.min(new_output)
self._max_value = np.max(new_output)
self._old_scale = self._scale or 1
self._old_scale = self._scale
return False

# if range in one or more directions is doubled, then update all losses
Expand All @@ -857,7 +885,10 @@ def _update_range(self, new_output):

self._output_multiplier = scale_multiplier

scale_factor = self._scale / self._old_scale
if self._old_scale == 0:
scale_factor = math.inf if self._scale > 0 else 1
else:
scale_factor = self._scale / self._old_scale
if scale_factor > self._recompute_losses_factor:
self._old_scale = self._scale
self._recompute_all_losses()
Expand All @@ -881,23 +912,26 @@ def remove_unfinished(self):
##########################

def plot(self, n=None, tri_alpha=0):
"""Plot the function we want to learn, only works in 2D.
"""Plot the function we want to learn in 1D or 2D.

Parameters
----------
n : int
the number of boxes in the interpolation grid along each axis
The number of interpolation points per axis.
tri_alpha : float (0 to 1)
Opacity of triangulation lines
"""
hv = ensure_holoviews()
if self.vdim > 1:
raise NotImplementedError(
"holoviews currently does not support", "3D surface plots in bokeh."
)
if self.ndim == 1:
return self._plot_1d(n)

hv = ensure_holoviews()
if self.ndim != 2:
raise NotImplementedError(
"Only 2D plots are implemented: You can "
"Only 1D and 2D plots are implemented: You can "
"plot a 2D slice with 'plot_slice'."
)
x, y = self._bbox
Expand Down
37 changes: 25 additions & 12 deletions adaptive/learner/triangulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,14 @@ def is_iterable_and_sized(obj):
return isinstance(obj, Iterable) and isinstance(obj, Sized)


def _flat_simplices(coords):
"""Yield the intervals of a 1D triangulation in sorted coordinate order."""
sorted_indices = sorted(range(len(coords)), key=coords.__getitem__)
for left, right in zip(sorted_indices, sorted_indices[1:]):
if coords[left] != coords[right]:
yield left, right


def simplex_volume_in_embedding(vertices) -> float:
"""Calculate the volume of a simplex in a higher dimensional embedding.
That is: dim > len(vertices) - 1. For example if you would like to know the
Expand All @@ -257,8 +265,13 @@ def simplex_volume_in_embedding(vertices) -> float:
# Modified from https://codereview.stackexchange.com/questions/77593/calculating-the-volume-of-a-tetrahedron

vertices = asarray(vertices, dtype=float)
dim = len(vertices[0])
if dim == 2:
num_vertices = len(vertices)
if num_vertices == 2:
# A 1-simplex is just a line segment.
return float(norm(vertices[1] - vertices[0]))

embedding_dim = len(vertices[0])
if embedding_dim == 2:
# Heron's formula
a, b, c = scipy.spatial.distance.pdist(vertices, metric="euclidean")
s = 0.5 * (a + b + c)
Expand All @@ -268,13 +281,12 @@ def simplex_volume_in_embedding(vertices) -> float:
sq_dists = scipy.spatial.distance.pdist(vertices, metric="sqeuclidean")

# Add border while compressed
num_verts = scipy.spatial.distance.num_obs_y(sq_dists)
bordered = concatenate((ones(num_verts), sq_dists))
bordered = concatenate((ones(num_vertices), sq_dists))

# Make matrix and find volume
sq_dists_mat = scipy.spatial.distance.squareform(bordered)

coeff = -((-2) ** (num_verts - 1)) * factorial(num_verts - 1) ** 2
coeff = -((-2) ** (num_vertices - 1)) * factorial(num_vertices - 1) ** 2
vol_square = fast_det(sq_dists_mat) / coeff

if vol_square < 0:
Expand Down Expand Up @@ -326,9 +338,6 @@ def __init__(self, coords):
if any(len(coord) != dim for coord in coords):
raise ValueError("Coordinates dimension mismatch")

if dim == 1:
raise ValueError("Triangulation class only supports dim >= 2")

if len(coords) < dim + 1:
raise ValueError("Please provide at least one simplex")

Expand All @@ -344,10 +353,14 @@ def __init__(self, coords):
# initialise empty set for each vertex
self.vertex_to_simplices = [set() for _ in coords]

# find a Delaunay triangulation to start with, then we will throw it
# away and continue with our own algorithm
initial_tri = scipy.spatial.Delaunay(coords)
for simplex in initial_tri.simplices:
if dim == 1:
simplices = _flat_simplices(coords)
else:
# Find a Delaunay triangulation to start with, then we will throw it
# away and continue with our own algorithm.
simplices = scipy.spatial.Delaunay(coords).simplices

for simplex in simplices:
self.add_simplex(simplex)

def delete_simplex(self, simplex):
Expand Down
44 changes: 43 additions & 1 deletion adaptive/tests/test_balancing_learner.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,24 @@
from __future__ import annotations

import functools as ft
import math
import random

import numpy as np
import pytest

from adaptive.learner import BalancingLearner, Learner1D
from adaptive.learner import BalancingLearner, Learner1D, Learner2D
from adaptive.runner import simple

strategies = ["loss", "loss_improvements", "npoints", "cycle"]


def ring_of_fire(xy, d):
a = 0.2
x, y = xy
return x + math.exp(-((x**2 + y**2 - d**2) ** 2) / a**4)


def test_balancing_learner_loss_cache():
learner = Learner1D(lambda x: x, bounds=(-1, 1))
learner.tell(-1, -1)
Expand Down Expand Up @@ -64,3 +75,34 @@ def test_strategies(strategy, goal_type, goal):
learners = [Learner1D(lambda x: x, bounds=(-1, 1)) for i in range(10)]
learner = BalancingLearner(learners, strategy=strategy)
simple(learner, **{goal_type: goal})


def test_loss_improvements_strategy_with_tell_pending_false_reserves_child_points():
random.seed(3104322362)
np.random.seed(3104322362 % 2**32)

learners = [
Learner2D(
ft.partial(ring_of_fire, d=random.uniform(0.2, 1)),
bounds=((-1, 1), (-1, 1)),
)
for _ in range(4)
]
learner = BalancingLearner(learners, strategy="loss_improvements")

stash = []
for n, m in [(1, 1), (4, 4), (2, 0), (4, 4), (8, 6)]:
xs, _ = learner.ask(n, tell_pending=False)
random.shuffle(xs)
for _ in range(m):
stash.append(xs.pop())

for x in xs:
learner.tell(x, learner.function(x))

random.shuffle(stash)
for _ in range(m):
x = stash.pop()
learner.tell(x, learner.function(x))

assert all(not child.pending_points for child in learners)
21 changes: 21 additions & 0 deletions adaptive/tests/test_learnernd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import pytest
import scipy.spatial

from adaptive.learner import LearnerND
Expand Down Expand Up @@ -48,3 +49,23 @@ def test_vector_return_with_a_flat_layer():
for function in [h1, h2, h3]:
learner = LearnerND(function, bounds=[(-1, 1), (-1, 1)])
simple(learner, loss_goal=0.1)


@pytest.mark.parametrize(
("run_kwargs", "expected_npoints"),
[
({"npoints_goal": 10}, 10),
({"loss_goal": 0.1}, None),
],
ids=["npoints-goal", "loss-goal"],
)
def test_learnerND_1d(run_kwargs, expected_npoints):
"""Test LearnerND works with 1D bounds."""
learner = LearnerND(lambda x: x[0] ** 2, bounds=[(-1, 1)])
simple(learner, **run_kwargs)

if expected_npoints is not None:
assert learner.npoints == expected_npoints
assert learner.loss() < float("inf")
if "loss_goal" in run_kwargs:
assert learner.loss() <= run_kwargs["loss_goal"]
7 changes: 7 additions & 0 deletions adaptive/tests/test_learners.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,13 @@ def linear_with_peak(x, d: uniform(-1, 1)): # type: ignore[valid-type]
return x + a**2 / (a**2 + (x - d) ** 2)


@learn_with(LearnerND, bounds=((-1, 1),))
def peak_1d(x, d: uniform(-0.5, 0.5)): # type: ignore[valid-type]
a = 0.01
(x,) = x
return x + a**2 / (a**2 + (x - d) ** 2)


@learn_with(LearnerND, bounds=((-1, 1), (-1, 1)))
@learn_with(Learner2D, bounds=((-1, 1), (-1, 1)))
@learn_with(SequenceLearner, sequence=np.random.rand(1000, 2))
Expand Down
Loading
Loading