Skip to content

Commit d31f57a

Browse files
committed
Update logging of y-values and parameters in Spectra class
1 parent dbefe89 commit d31f57a

File tree

3 files changed

+202
-9
lines changed

3 files changed

+202
-9
lines changed

pyduino/optimization/nelder_mead.py

+128-2
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,23 @@
11
import numpy as np
2-
from typing import Callable, List
2+
import warnings
3+
from typing import Union, List, Iterable
34
from . import Optimizer, linmap
45

56
def sigmoid(x):
67
return 1 / (1 + np.exp(-x))
78
def logit(x, inf=100):
89
return np.where(x==0, -inf, np.where(x==1, inf, np.log(x) - np.log(1-x)))
10+
def softmax(x):
11+
assert x.ndim == 2, "Softmax only implemented for 2D arrays"
12+
return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)
913

10-
class NelderMead(Optimizer):
14+
class NelderMeadBounded(Optimizer):
1115
def __init__(self, population_size: int, ranges: List[float], rng_seed: int = 0, hypercube_radius = 100):
1216
"""
1317
This Nelder-Mead algorithm assumes that the optimization function 'f' is to be minimized and time independent.
1418
19+
The space is assumed to be a hypercube, and the algorithm will map the hypercube to the unit hypercube, bounded by a sigmoid function.
20+
1521
ranges list of pairs:
1622
Maxima and minima that each parameter can cover.
1723
Example: [(0, 10), (0, 10)] for two parameters ranging from 0 to 10.
@@ -59,6 +65,126 @@ def ask_oracle(self, X: np.ndarray) -> np.ndarray:
5965
def init_oracle(self):
6066
return self.ask_oracle(self.view_g())
6167

68+
def step(self):
69+
"""
70+
This function performs a single step of the Nelder-Mead algorithm.
71+
"""
72+
73+
# Sort the population by the value of the oracle
74+
y = self.ask_oracle(self.view_g())
75+
idx = np.argsort(y)
76+
self.population = self.population[idx]
77+
78+
# Compute the centroid of the population
79+
centroid = self.population[:-1].mean(axis=0)
80+
81+
# Reflect the worst point through the centroid
82+
reflected = centroid + (centroid - self.population[-1])
83+
84+
# Evaluate the reflected point
85+
86+
y_reflected = self.ask_oracle(self.view(reflected.reshape(1,-1)))
87+
88+
# If the reflected point is better than the second worst, but not better than the best, then expand
89+
90+
if y_reflected < y[-2] and y_reflected > y[0]:
91+
expanded = centroid + (reflected - centroid)
92+
y_expanded = self.ask_oracle(self.view(expanded.reshape(1,-1)))
93+
if y_expanded < y_reflected:
94+
self.population[-1] = expanded
95+
else:
96+
self.population[-1] = reflected
97+
# If the reflected point is better than the best, then expand
98+
elif y_reflected < y[0]:
99+
expanded = centroid + 2 * (reflected - centroid)
100+
y_expanded = self.ask_oracle(self.view(expanded.reshape(1,-1)))
101+
if y_expanded < y_reflected:
102+
self.population[-1] = expanded
103+
else:
104+
self.population[-1] = reflected
105+
# If the reflected point is worse than the second worst, then contract
106+
elif y_reflected > y[-2]:
107+
contracted = centroid + 0.5 * (self.population[-1] - centroid)
108+
y_contracted = self.ask_oracle(self.view(contracted.reshape(1,-1)))
109+
if y_contracted < y[-1]:
110+
self.population[-1] = contracted
111+
else:
112+
for i in range(1, len(self.population)):
113+
self.population[i] = 0.5 * (self.population[i] + self.population[0])
114+
# If the reflected point is worse than the worst, then shrink
115+
elif y_reflected > y[-1]:
116+
for i in range(1, len(self.population)):
117+
self.population[i] = 0.5 * (self.population[i] + self.population[0])
118+
119+
self.y = y.copy()
120+
return self.view_g()
121+
122+
class NelderMeadConstant(Optimizer):
123+
def __init__(self, population_size: int, ranges: Union[int, Iterable], rng_seed: int = 0, energy=10):
124+
"""
125+
This Nelder-Mead algorithm assumes that the optimization function 'f' is to be minimized and time independent.
126+
127+
The parameters are constrained so that their sum always add up to `energy` though a softmax function.
128+
129+
Parameters:
130+
- population_size (int): The size of the population.
131+
- ranges (int or list): The number of parameters or a list of ranges for each parameter.
132+
- rng_seed (int): The seed for the random number generator.
133+
- energy (float): The energy parameter used in the optimization.
134+
135+
Note: The ranges parameter is merely a placeholder. The ranges are set to (0, energy) for all parameters.
136+
"""
137+
self.population_size = population_size
138+
if isinstance(ranges, Iterable):
139+
self.ranges = np.array([(0, energy) for _ in range(len(ranges))])
140+
warnings.warn("While using Nelder-MeadConstant, the ranges are set to (0, energy) for all parameters. The parameter `ranges` is merely a placeholder.")
141+
elif isinstance(ranges, int):
142+
self.ranges = np.array([(0, energy) for _ in range(ranges)])
143+
self.rng_seed = np.random.default_rng(rng_seed)
144+
145+
# Initialize the population (random position and initial momentum)
146+
self.population = self.rng_seed.random((self.population_size, len(self.ranges)))
147+
148+
# Derived attributes
149+
self.energy = energy
150+
self.e_summation = 1
151+
# Initialize y as vector of nans
152+
self.y = np.full(self.population_size, np.nan)
153+
154+
def forward(self, x):
155+
self.e_summation = np.sum(np.exp(x), axis=1, keepdims=True)
156+
return self.energy * softmax(x)
157+
158+
def backward(self, x):
159+
"""
160+
Softmax is not injective. This is a pseudo-inverse.
161+
"""
162+
return np.log(self.e_summation * x/self.energy)
163+
164+
def view(self, x):
165+
"""
166+
Maps the input from the domain to the codomain.
167+
"""
168+
return self.forward(x)
169+
170+
def view_g(self):
171+
"""
172+
Maps the input from the domain to the codomain.
173+
"""
174+
return self.forward(self.population)
175+
176+
def inverse_view(self, x):
177+
"""
178+
Maps the input from the codomain to the domain.
179+
"""
180+
return self.backward(x)
181+
182+
def ask_oracle(self, X: np.ndarray) -> np.ndarray:
183+
return super().ask_oracle()
184+
185+
def init_oracle(self):
186+
return self.ask_oracle(self.view_g())
187+
62188
def step(self):
63189
"""
64190
This function performs a single step of the Nelder-Mead algorithm.

pyduino/spectra.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from pyduino.optimization.nelder_mead import NelderMead
1+
from pyduino.optimization.nelder_mead import NelderMeadBounded
22
from pyduino.pyduino2 import ReactorManager, chunks, PATHS
33
import numpy as np
44
import json
@@ -75,7 +75,7 @@ def parse_dados(X,param):
7575
"""
7676
return np.array(list(map(seval,map(lambda x: x[1].get(param,0),sorted(X.items(),key=lambda x: x[0])))))
7777

78-
class Spectra(RangeParser,ReactorManager,NelderMead):
78+
class Spectra(RangeParser,ReactorManager,NelderMeadBounded):
7979
def __init__(self,ranges,density_param,maximize=True,log_name=None,reset_density=False,**kwargs):
8080
"""
8181
Args:
@@ -99,7 +99,7 @@ def __init__(self,ranges,density_param,maximize=True,log_name=None,reset_density
9999
self.irradiance = PATHS.SYSTEM_PARAMETERS['irradiance']
100100

101101
ReactorManager.__init__(self)
102-
NelderMead.__init__(
102+
NelderMeadBounded.__init__(
103103
self,
104104
population_size=len(self.parameters),
105105
ranges=self.ranges_as_list(),

tests/test_optim.py

+71-4
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,24 @@
88
dotenv.load_dotenv(dotenv_path=".env.local")
99

1010
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
11-
from pyduino.optimization.nelder_mead import NelderMead
11+
from pyduino.optimization.nelder_mead import NelderMeadBounded, NelderMeadConstant, softmax
1212

13-
class TestOptim:
14-
optimizer = NelderMead(population_size=10, ranges=[(-10, 10)] * 4)
13+
def test_softmax():
14+
# Test case 1: 2D array
15+
x = np.array([[1, 2, 3], [4, 5, 6]])
16+
expected_output = np.array([[0.09003057, 0.24472847, 0.66524096], [0.09003057, 0.24472847, 0.66524096]])
17+
assert np.allclose(softmax(x), expected_output)
18+
19+
# Test case 2: 1D array (should raise an assertion error)
20+
x = np.array([1, 2, 3])
21+
try:
22+
softmax(x)
23+
assert False, "Expected an assertion error"
24+
except AssertionError:
25+
pass
26+
27+
class TestOptimNelderMeadBounded:
28+
optimizer = NelderMeadBounded(population_size=10, ranges=[(-10, 10)] * 4)
1529

1630
def test_logic(self):
1731
x = (2*np.random.random((10, 4))-1)*100
@@ -58,4 +72,57 @@ def ask(self, X):
5872
assert self.optimizer.view(self.optimizer.population).shape == (10, 4)
5973
assert self.optimizer.view_g().shape == (10, 4)
6074
assert self.optimizer.inverse_view(self.optimizer.view(self.optimizer.population)).shape == (10, 4)
61-
assert np.isclose(self.optimizer.y.min(), 0, atol=1e-4), f"Oracle: {self.optimizer.y.min()}"
75+
assert np.isclose(self.optimizer.y.min(), 0, atol=1e-4), f"Oracle: {self.optimizer.y.min()}"
76+
77+
class TestOptimNelderMeadConstant:
78+
energy = 10
79+
optimizer = NelderMeadConstant(population_size=10, ranges=[(0, 10)] * 4, energy=energy)
80+
81+
def test_logic(self):
82+
x = softmax((2*np.random.random((10, 4))-1)*100)
83+
assert np.allclose(x.sum(axis=1), 1), f"Sum of x: {x.sum(axis=1)}"
84+
85+
print("X")
86+
print(x)
87+
assert np.allclose(self.energy, self.optimizer.forward(x).sum(axis=1)), f"Sum of forward: {self.optimizer.forward(x).sum(axis=1)}"
88+
print("Forward -> Backward")
89+
print(self.optimizer.backward(self.optimizer.forward(x)))
90+
91+
assert np.allclose(x, self.optimizer.backward(self.optimizer.forward(x)))
92+
assert np.allclose(x, self.optimizer.inverse_view(self.optimizer.view(x)))
93+
94+
assert np.all(self.optimizer.backward(np.zeros((10,4))) < 1)
95+
96+
def test_nelder_mead_parabola(self):
97+
class Oracle:
98+
def ask(self, X):
99+
return (X**2).sum(axis=1)
100+
oracle = Oracle()
101+
self.optimizer.ask_oracle = oracle.ask
102+
103+
# Set initial population to oracle
104+
self.optimizer.init_oracle()
105+
106+
for i in range(100):
107+
self.optimizer.step()
108+
assert self.optimizer.view(self.optimizer.population).shape == (10, 4)
109+
assert self.optimizer.view_g().shape == (10, 4)
110+
assert self.optimizer.inverse_view(self.optimizer.view(self.optimizer.population)).shape == (10, 4)
111+
assert np.allclose(self.energy, self.optimizer.view_g().sum(axis=1), atol=1e-4), f"Energy wasn't conserved: {self.optimizer.view_g().sum(axis=1)}"
112+
113+
def test_nelder_mead_rosenbrock(self):
114+
class Oracle:
115+
def ask(self, X):
116+
return ((1 - X[:, :-1])**2).sum(axis=1) + 100 * ((X[:, 1:] - X[:, :-1]**2)**2).sum(axis=1)
117+
oracle = Oracle()
118+
self.optimizer.ask_oracle = oracle.ask
119+
120+
# Set initial population to oracle
121+
self.optimizer.init_oracle()
122+
123+
for i in range(1000):
124+
self.optimizer.step()
125+
assert self.optimizer.view(self.optimizer.population).shape == (10, 4)
126+
assert self.optimizer.view_g().shape == (10, 4)
127+
assert self.optimizer.inverse_view(self.optimizer.view(self.optimizer.population)).shape == (10, 4)
128+
assert np.allclose(self.energy, self.optimizer.view_g().sum(axis=1), atol=1e-4), f"Energy wasn't conserved: {self.optimizer.view_g().sum(axis=1)}"

0 commit comments

Comments
 (0)