Skip to content

Commit

Permalink
fix base_model.py
Browse files Browse the repository at this point in the history
  • Loading branch information
Xbc-gressor committed Sep 26, 2024
1 parent 70a3232 commit 17e8124
Show file tree
Hide file tree
Showing 3 changed files with 227 additions and 152 deletions.
148 changes: 88 additions & 60 deletions openbox/core/ea/differential_ea_advisor.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import random
from typing import Optional, Callable, List, Union, Tuple

from ConfigSpace import Configuration, ConfigurationSpace, CategoricalHyperparameter, OrdinalHyperparameter
from typing import Callable, List, Optional, Tuple, Union

from ConfigSpace import (
CategoricalHyperparameter,
Configuration,
ConfigurationSpace,
OrdinalHyperparameter,
)
from ConfigSpace.hyperparameters import NumericalHyperparameter

from openbox.core.ea.base_ea_advisor import Individual, pareto_best, pareto_sort
Expand All @@ -11,52 +16,58 @@

class DifferentialEAAdvisor(ModularEAAdvisor):

@deprecate_kwarg('num_objs', 'num_objectives', 'a future version')
def __init__(self,

config_space: ConfigurationSpace,
num_objectives=1,
num_constraints=0,
population_size=30,
optimization_strategy='ea',
batch_size=1,
output_dir='logs',
task_id='OpenBox',
random_state=None,

required_evaluation_count: Optional[int] = None,
auto_step=True,
strict_auto_step=True,
skip_gen_population=False,
filter_gen_population: Optional[Callable[[List[Configuration]], List[Configuration]]] = None,
keep_unexpected_population=True,
save_cached_configuration=True,

constraint_strategy='discard',

f: Union[Tuple[float, float], float] = 0.5,
cr: Union[Tuple[float, float], float] = 0.9,
):
@deprecate_kwarg("num_objs", "num_objectives", "a future version")
def __init__(
self,
config_space: ConfigurationSpace,
num_objectives=1,
num_constraints=0,
population_size=30,
optimization_strategy="ea",
batch_size=1,
output_dir="logs",
task_id="OpenBox",
random_state=None,
required_evaluation_count: Optional[int] = None,
auto_step=True,
strict_auto_step=True,
skip_gen_population=False,
filter_gen_population: Optional[
Callable[[List[Configuration]], List[Configuration]]
] = None,
keep_unexpected_population=True,
save_cached_configuration=True,
constraint_strategy="discard",
f: Union[Tuple[float, float], float] = 0.5,
cr: Union[Tuple[float, float], float] = 0.9,
):
"""
f is the hyperparameter for DEA that X = A + (B - C) * f
cr is the cross rate
f and cr may be a tuple of two floats, such as (0.1,0.9)
If so, these two values are adjusted automatically within this range.
"""
super().__init__(config_space=config_space, num_objectives=num_objectives, num_constraints=num_constraints,
population_size=population_size, optimization_strategy=optimization_strategy,
batch_size=batch_size, output_dir=output_dir, task_id=task_id,
random_state=random_state,

required_evaluation_count=required_evaluation_count, auto_step=auto_step,
strict_auto_step=strict_auto_step, skip_gen_population=skip_gen_population,
filter_gen_population=filter_gen_population,
keep_unexpected_population=keep_unexpected_population,
save_cached_configuration=save_cached_configuration
)
super().__init__(
config_space=config_space,
num_objectives=num_objectives,
num_constraints=num_constraints,
population_size=population_size,
optimization_strategy=optimization_strategy,
batch_size=batch_size,
output_dir=output_dir,
task_id=task_id,
random_state=random_state,
required_evaluation_count=required_evaluation_count,
auto_step=auto_step,
strict_auto_step=strict_auto_step,
skip_gen_population=skip_gen_population,
filter_gen_population=filter_gen_population,
keep_unexpected_population=keep_unexpected_population,
save_cached_configuration=save_cached_configuration,
)

self.constraint_strategy = constraint_strategy
assert self.constraint_strategy in {'discard'}
assert self.constraint_strategy in {"discard"}

self.f = f
self.cr = cr
Expand All @@ -76,25 +87,32 @@ def _gen(self, count=1) -> List[Configuration]:
next_config = self.sample_random_config(excluded_configs=self.all_configs)
nid = -1
else:
xi = self.population[self.cur]['config']
xi_score = self.population[self.cur]['perf']
xi = self.population[self.cur]["config"]
xi_score = self.population[self.cur]["perf"]

# Randomly sample 3 other values: x1, x2, x3
lst = list(range(self.population_size))
lst.remove(self.cur)
random.shuffle(lst)
lst = lst[:3]

if self.dynamic_f:
lst.sort(key=lambda a: self.population[a]['perf'])
lst.sort(key=lambda a: self.population[a]["perf"][0])

i1, i2, i3 = lst[0], lst[1], lst[2]
x1, x2, x3 = self.population[i1]['config'], self.population[i2]['config'], self.population[i3]['config']
x1, x2, x3 = (
self.population[i1]["config"],
self.population[i2]["config"],
self.population[i3]["config"],
)

# Mutation: xt = x1 + (x2 - x3) * f
if self.dynamic_f:
# Dynamic f
f1, f2, f3 = self.population[i1]['perf'], self.population[i2]['perf'], self.population[i3]['perf']
f1, f2, f3 = (
self.population[i1]["perf"][0],
self.population[i2]["perf"][0],
self.population[i3]["perf"][0],
)
if f1 == f3:
f = self.f[0]
else:
Expand All @@ -108,14 +126,14 @@ def _gen(self, count=1) -> List[Configuration]:
# Cross over between xi and xt, get xn
if self.dynamic_cr:
# Dynamic cr
scores = [a['perf'] for a in self.population]
scores = [a["perf"][0] for a in self.population]
scores_avg = sum(scores) / len(scores)

if xi_score < scores_avg:
if xi_score[0] < scores_avg:
scores_mx = max(scores)
scores_mn = min(scores)
cr = self.cr[0] + (self.cr[1] - self.cr[0]) * (scores_mx - xi_score) / max(
scores_mx - scores_mn, 1e-10)
cr = self.cr[0] + (self.cr[1] - self.cr[0]) * (
scores_mx - xi_score[0]
) / max(scores_mx - scores_mn, 1e-10)
else:
cr = self.cr[0]
else:
Expand All @@ -132,12 +150,11 @@ def _gen(self, count=1) -> List[Configuration]:
next_config = xn
nid = self.cur
self.cur = (self.cur + 1) % self.population_size

self.nid_map[next_config] = nid
return [next_config]

def _sel(self, parent: List[Individual], sub: List[Individual]) -> List[Individual]:
if self.constraint_strategy == 'discard' and self.num_constraints > 0:
if self.constraint_strategy == "discard" and self.num_constraints > 0:
sub = [x for x in sub if x.constraints_satisfied]

for conf in sub:
Expand All @@ -153,26 +170,37 @@ def _sel(self, parent: List[Individual], sub: List[Individual]) -> List[Individu
for conf in sub:
if conf not in self.nid_map or self.nid_map[conf] == -1:
parent.append(conf)

parent = pareto_sort(parent)
parent = parent[:self.population_size]
parent = parent[: self.population_size]
random.shuffle(parent)

return parent

def mutate(self, config_a: Configuration, config_b: Configuration, config_c: Configuration, f: float):
def mutate(
self,
config_a: Configuration,
config_b: Configuration,
config_c: Configuration,
f: float,
):
"""
Compute A + (B - C) * f. Basically element-wise.
For ranged int/float values, the result will be clamped into [lower, upper].
For categorical/ordinal values, the values are converted to ints and the result is (mod SIZE).
e. g. in ["A", "B", "C", "D"], "D" + "B" - "A" => 3 + 1 - 0 => 4 => 0 (mod 4) => "A"
"""
new_array = config_a.get_array() + (config_b.get_array() - config_c.get_array()) * f
new_array = (
config_a.get_array() + (config_b.get_array() - config_c.get_array()) * f
)

for i, key in enumerate(self.config_space.keys()):
hp_type = self.config_space.get_hyperparameter(key)
if isinstance(hp_type, CategoricalHyperparameter) or isinstance(hp_type, OrdinalHyperparameter):
v = (round(new_array[i]) % hp_type.get_size() + hp_type.get_size()) % hp_type.get_size()
if isinstance(hp_type, CategoricalHyperparameter) or isinstance(
hp_type, OrdinalHyperparameter
):
v = (
round(new_array[i]) % hp_type.get_size() + hp_type.get_size()
) % hp_type.get_size()
new_array[i] = v
elif isinstance(hp_type, NumericalHyperparameter):
# new_array[i] = max(0, min(new_array[i], 1))
Expand Down
Loading

0 comments on commit 17e8124

Please sign in to comment.