Skip to content

Commit 16a7a3e

Browse files
committed
uniqueness node and some fixes
1 parent 94cc8e7 commit 16a7a3e

File tree

5 files changed

+132
-48
lines changed

5 files changed

+132
-48
lines changed
Lines changed: 27 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,15 @@
11
from abc import abstractmethod
22
from collections import deque
33
from dataclasses import dataclass
4+
from multiprocessing.managers import DictProxy
45
from typing import Optional, Sequence, Union, Any, Dict, List, Callable
6+
from multiprocessing import Manager
57

68
from golem.core.dag.graph import Graph
9+
from golem.core.optimisers.common_optimizer.common_optimizer_params import CommonOptimizerParameters
710
from golem.core.optimisers.common_optimizer.node import Node
811
from golem.core.optimisers.common_optimizer.old_config import default_stages
12+
from golem.core.optimisers.common_optimizer.runner import ParallelRunner
913
from golem.core.optimisers.common_optimizer.scheme import Scheme
1014
from golem.core.optimisers.common_optimizer.stage import Stage
1115
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum
@@ -20,24 +24,6 @@
2024
from golem.core.optimisers.timer import OptimisationTimer
2125

2226

23-
@dataclass
24-
class CommonOptimizerParameters:
25-
""" This class is for storing a state of all CommonOptimizer parameters """
26-
_run: bool
27-
generations: List[PopulationT]
28-
population: PopulationT
29-
new_population: PopulationT
30-
evaluator: Any
31-
32-
objective: Objective
33-
initial_graphs: Sequence[Union[Graph, Any]]
34-
requirements: OptimizationParameters
35-
graph_generation_params: GraphGenerationParams
36-
graph_optimizer_params: AlgorithmParameters
37-
stages: List[Stage]
38-
history: OptHistory
39-
40-
4127
class CommonOptimizer(PopulationalOptimizer):
4228
"""
4329
This class implements a common optimizer.
@@ -60,40 +46,41 @@ def __init__(self,
6046
initial_graphs: Optional[Sequence[Union[Graph, Any]]] = None,
6147
requirements: Optional[OptimizationParameters] = None,
6248
graph_generation_params: Optional[GraphGenerationParams] = None,
63-
graph_optimizer_params: Optional[AlgorithmParameters] = None,
64-
stages: Optional[List[Stage]] = None):
49+
graph_optimizer_params: Optional[AlgorithmParameters] = None):
6550

6651
super().__init__(objective=objective,
6752
initial_graphs=initial_graphs,
6853
requirements=requirements,
6954
graph_generation_params=graph_generation_params,
7055
graph_optimizer_params=graph_optimizer_params)
7156

72-
self.stages = default_stages
73-
self._run = True
74-
57+
self.graph_optimizer_params.stages = self.graph_optimizer_params.stages or default_stages
7558
self.requirements.max_depth = 100 # TODO fix
76-
self.graph_optimizer_params.pop_size = graph_optimizer_params.pop_size
7759
self.initial_individuals = [Individual(graph, metadata=requirements.static_individual_metadata)
7860
for graph in self.initial_graphs]
7961

62+
# create dictionary with all existing graphs descriptive_id
63+
# for multiprocessing create dictionary via Manager for parallel
64+
stages = self.graph_optimizer_params.stages
65+
if any(stage.runner is ParallelRunner for stage in stages):
66+
self.graph_optimizer_params.repo = Manager().dict({'empty': True})
67+
else:
68+
self.graph_optimizer_params.repo = dict()
69+
8070
@property
8171
def parameters(self):
82-
return CommonOptimizerParameters(**{attr: getattr(self, attr) for attr in self.__parameters_attrs})
72+
return CommonOptimizerParameters(**{attr: getattr(self, attr)
73+
for attr in self.__parameters_attrs
74+
if hasattr(self, attr)})
8375

8476
@parameters.setter
8577
def parameters(self, parameters: CommonOptimizerParameters):
8678
if not isinstance(parameters, CommonOptimizerParameters):
8779
raise TypeError(f"parameters should be `CommonOptimizerParameters`, got {type(parameters)} instead")
8880
for attr in self.__parameters_allowed_to_change:
89-
if hasattr(parameters, attr):
81+
if hasattr(parameters, attr) and hasattr(self, attr):
9082
setattr(self, attr, getattr(parameters, attr))
9183

92-
# def optimise(self, objective: ObjectiveFunction):
93-
# while self._run:
94-
# for i_stage in range(len(self.stages)):
95-
# self.parameters = self.stages[i_stage].run(self.parameters)
96-
9784
def _initial_population(self, evaluator: EvaluationOperator):
9885
""" Initializes the initial population """
9986
self._update_population(evaluator(self.initial_individuals), 'initial_assumptions')
@@ -102,9 +89,13 @@ def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT:
10289
""" Method realizing full evolution cycle """
10390
# TODO add iterations limit
10491

105-
self.evaluator = evaluator
106-
self.new_population = None
92+
parameters = self.parameters
93+
parameters.evaluator = evaluator
94+
95+
for i_stage in range(len(self.graph_optimizer_params.stages)):
96+
parameters = self.graph_optimizer_params.stages[i_stage].run(parameters)
97+
98+
# TODO define: do we need this?
99+
self.parameters = parameters
107100

108-
for i_stage in range(len(self.stages)):
109-
self.parameters = self.stages[i_stage].run(self.parameters)
110-
return self.new_population
101+
return parameters.new_population
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from abc import abstractmethod
2+
from collections import deque
3+
from dataclasses import dataclass
4+
from typing import Optional, Sequence, Union, Any, Dict, List, Callable
5+
6+
from golem.core.dag.graph import Graph
7+
from golem.core.optimisers.common_optimizer.node import Node
8+
from golem.core.optimisers.common_optimizer.old_config import default_stages
9+
from golem.core.optimisers.common_optimizer.scheme import Scheme
10+
from golem.core.optimisers.common_optimizer.stage import Stage
11+
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum
12+
from golem.core.optimisers.genetic.operators.operator import PopulationT, EvaluationOperator
13+
from golem.core.optimisers.graph import OptGraph
14+
from golem.core.optimisers.objective import Objective, ObjectiveFunction
15+
from golem.core.optimisers.opt_history_objects.individual import Individual
16+
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory
17+
from golem.core.optimisers.optimization_parameters import OptimizationParameters
18+
from golem.core.optimisers.optimizer import GraphOptimizer, GraphGenerationParams, AlgorithmParameters
19+
from golem.core.optimisers.populational_optimizer import PopulationalOptimizer
20+
from golem.core.optimisers.timer import OptimisationTimer
21+
22+
23+
@dataclass
24+
class CommonOptimizerParameters(AlgorithmParameters):
25+
""" This class is for storing a state of all CommonOptimizer parameters """
26+
generations: Optional[List[PopulationT]] = None
27+
population: Optional[PopulationT] = None
28+
29+
objective: Optional[Objective] = None
30+
initial_graphs: Optional[Sequence[Union[Graph, Any]]] = None
31+
requirements: Optional[OptimizationParameters] = None
32+
graph_generation_params: Optional[GraphGenerationParams] = None
33+
graph_optimizer_params: Optional[AlgorithmParameters] = None
34+
history: Optional[OptHistory] = None
35+
36+
repo: Optional[Dict[str, bool]] = None
37+
new_population: Optional[PopulationT] = None
38+
evaluator: Optional[Any] = None
39+

golem/core/optimisers/common_optimizer/nodes/mutation.py

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,41 @@
1+
from copy import deepcopy
12
from random import choice
3+
4+
from typing import Optional, Dict, Callable
5+
26
from golem.core.optimisers.common_optimizer.node import Node
37
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum, TaskMixin
4-
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo
5-
from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation
8+
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo, MutationTypesEnum
9+
from golem.core.optimisers.graph import OptGraph
610
from golem.core.optimisers.opt_history_objects.individual import Individual
711
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator
812

913

1014
class MutationTask(TaskMixin):
1115
def __init__(self, parameters: 'CommonOptimizerParameters'):
1216
super().__init__(parameters)
13-
self.generation = parameters.generations[-1]
14-
self.mutation_tries = parameters.graph_optimizer_params.mutation_tries
15-
self.verifier = parameters.graph_generation_params.verifier
16-
self.static_individual_metadata = parameters.requirements.static_individual_metadata
17+
self.graph_optimizer_params = parameters.graph_optimizer_params
18+
self.requirements = parameters.requirements
19+
self.graph_generation_params = parameters.graph_generation_params
20+
self.generation = parameters.population
1721

1822
def update_parameters(self, parameters: 'CommonOptimizerParameters'):
1923
return super().update_parameters(parameters)
2024

2125

2226
class Mutation(Node):
23-
def __init__(self, name: str):
27+
def __init__(self,
28+
name: str = 'mutation',
29+
mutations_repo: Optional[Dict[str], Callable[[OptGraph], OptGraph]] = None):
2430
self.name = name
25-
self._mutations_repo = base_mutations_repo
31+
self._mutations_repo = mutations_repo or base_mutations_repo
2632

2733
def __call__(self, task: MutationTask):
2834
individual = task.generation[0]
2935
mutation_type, mutation = choice(self._mutations_repo.items())
30-
for _ in range(task.mutation_tries):
31-
new_graph = mutation(individual.graph.copy())
32-
if task.verifier(new_graph):
36+
for _ in range(2):
37+
new_graph = mutation(deepcopy(individual.graph))
38+
if task.graph_generation_params.verifier(new_graph):
3339
parent_operator = ParentOperator(type_='mutation',
3440
operators=mutation_type,
3541
parent_individuals=individual)
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
from copy import deepcopy
2+
from random import choice
3+
4+
from typing import Optional, Dict, Callable
5+
6+
from golem.core.optimisers.common_optimizer.node import Node
7+
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum, TaskMixin
8+
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo, MutationTypesEnum
9+
from golem.core.optimisers.graph import OptGraph
10+
from golem.core.optimisers.opt_history_objects.individual import Individual
11+
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator
12+
13+
14+
class UniquenessCheckTask(TaskMixin):
15+
def __init__(self, parameters: 'CommonOptimizerParameters'):
16+
super().__init__(parameters)
17+
self.generation = parameters.population
18+
self.repo = parameters.graph_optimizer_params.repo
19+
20+
def update_parameters(self, parameters: 'CommonOptimizerParameters'):
21+
return super().update_parameters(parameters)
22+
23+
24+
class UniquenessCheck(Node):
25+
def __init__(self, name: str = 'uniqueness_check'):
26+
self.name = name
27+
28+
def __call__(self, task: UniquenessCheckTask):
29+
to_add = dict()
30+
new_inds = []
31+
for ind in task.generation:
32+
descriptive_id = ind.graph.descriptive_id
33+
if descriptive_id not in task.repo and descriptive_id not in to_add:
34+
to_add[descriptive_id] = True
35+
new_inds.append(ind)
36+
37+
if to_add:
38+
task.repo.update(to_add)
39+
40+
if new_inds:
41+
task.generation = new_inds
42+
task.status = TaskStatusEnum.SUCCESS
43+
return [task]
44+
45+
task.status = TaskStatusEnum.FAIL
46+
return [task]
47+

golem/core/optimisers/common_optimizer/temp/adaptive.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def __init__(self, parameters: 'CommonOptimizerParameters'):
2525
self.graph_generation_params = parameters.graph_generation_params
2626
self.population = parameters.population
2727
self.generations = parameters.generations
28-
self.stages = parameters.stages
28+
self.stages = parameters.graph_optimizer_params.stages
2929

3030
def update_parameters(self, parameters: 'CommonOptimizerParameters') -> 'CommonOptimizerParameters':
3131
"""
@@ -37,6 +37,7 @@ def update_parameters(self, parameters: 'CommonOptimizerParameters') -> 'CommonO
3737
parameters.population = self.population
3838
parameters.requirements = self.requirements
3939
parameters.graph_optimizer_params = self.graph_optimizer_params
40+
parameters.graph_optimizer_params.stages = self.stages
4041
parameters.graph_generation_params = self.graph_generation_params
4142
return parameters
4243

0 commit comments

Comments
 (0)