1
1
from abc import abstractmethod
2
2
from collections import deque
3
3
from dataclasses import dataclass
4
+ from multiprocessing .managers import DictProxy
4
5
from typing import Optional , Sequence , Union , Any , Dict , List , Callable
6
+ from multiprocessing import Manager
5
7
6
8
from golem .core .dag .graph import Graph
9
+ from golem .core .optimisers .common_optimizer .common_optimizer_params import CommonOptimizerParameters
7
10
from golem .core .optimisers .common_optimizer .node import Node
8
11
from golem .core .optimisers .common_optimizer .old_config import default_stages
12
+ from golem .core .optimisers .common_optimizer .runner import ParallelRunner
9
13
from golem .core .optimisers .common_optimizer .scheme import Scheme
10
14
from golem .core .optimisers .common_optimizer .stage import Stage
11
15
from golem .core .optimisers .common_optimizer .task import Task , TaskStatusEnum
20
24
from golem .core .optimisers .timer import OptimisationTimer
21
25
22
26
23
- @dataclass
24
- class CommonOptimizerParameters :
25
- """ This class is for storing a state of all CommonOptimizer parameters """
26
- _run : bool
27
- generations : List [PopulationT ]
28
- population : PopulationT
29
- new_population : PopulationT
30
- evaluator : Any
31
-
32
- objective : Objective
33
- initial_graphs : Sequence [Union [Graph , Any ]]
34
- requirements : OptimizationParameters
35
- graph_generation_params : GraphGenerationParams
36
- graph_optimizer_params : AlgorithmParameters
37
- stages : List [Stage ]
38
- history : OptHistory
39
-
40
-
41
27
class CommonOptimizer (PopulationalOptimizer ):
42
28
"""
43
29
This class implements a common optimizer.
@@ -60,40 +46,41 @@ def __init__(self,
60
46
initial_graphs : Optional [Sequence [Union [Graph , Any ]]] = None ,
61
47
requirements : Optional [OptimizationParameters ] = None ,
62
48
graph_generation_params : Optional [GraphGenerationParams ] = None ,
63
- graph_optimizer_params : Optional [AlgorithmParameters ] = None ,
64
- stages : Optional [List [Stage ]] = None ):
49
+ graph_optimizer_params : Optional [AlgorithmParameters ] = None ):
65
50
66
51
super ().__init__ (objective = objective ,
67
52
initial_graphs = initial_graphs ,
68
53
requirements = requirements ,
69
54
graph_generation_params = graph_generation_params ,
70
55
graph_optimizer_params = graph_optimizer_params )
71
56
72
- self .stages = default_stages
73
- self ._run = True
74
-
57
+ self .graph_optimizer_params .stages = self .graph_optimizer_params .stages or default_stages
75
58
self .requirements .max_depth = 100 # TODO fix
76
- self .graph_optimizer_params .pop_size = graph_optimizer_params .pop_size
77
59
self .initial_individuals = [Individual (graph , metadata = requirements .static_individual_metadata )
78
60
for graph in self .initial_graphs ]
79
61
62
+ # create dictionary with all existing graphs descriptive_id
63
+ # for multiprocessing create dictionary via Manager for parallel
64
+ stages = self .graph_optimizer_params .stages
65
+ if any (stage .runner is ParallelRunner for stage in stages ):
66
+ self .graph_optimizer_params .repo = Manager ().dict ({'empty' : True })
67
+ else :
68
+ self .graph_optimizer_params .repo = dict ()
69
+
80
70
@property
81
71
def parameters (self ):
82
- return CommonOptimizerParameters (** {attr : getattr (self , attr ) for attr in self .__parameters_attrs })
72
+ return CommonOptimizerParameters (** {attr : getattr (self , attr )
73
+ for attr in self .__parameters_attrs
74
+ if hasattr (self , attr )})
83
75
84
76
@parameters .setter
85
77
def parameters (self , parameters : CommonOptimizerParameters ):
86
78
if not isinstance (parameters , CommonOptimizerParameters ):
87
79
raise TypeError (f"parameters should be `CommonOptimizerParameters`, got { type (parameters )} instead" )
88
80
for attr in self .__parameters_allowed_to_change :
89
- if hasattr (parameters , attr ):
81
+ if hasattr (parameters , attr ) and hasattr ( self , attr ) :
90
82
setattr (self , attr , getattr (parameters , attr ))
91
83
92
- # def optimise(self, objective: ObjectiveFunction):
93
- # while self._run:
94
- # for i_stage in range(len(self.stages)):
95
- # self.parameters = self.stages[i_stage].run(self.parameters)
96
-
97
84
def _initial_population (self , evaluator : EvaluationOperator ):
98
85
""" Initializes the initial population """
99
86
self ._update_population (evaluator (self .initial_individuals ), 'initial_assumptions' )
@@ -102,9 +89,13 @@ def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT:
102
89
""" Method realizing full evolution cycle """
103
90
# TODO add iterations limit
104
91
105
- self .evaluator = evaluator
106
- self .new_population = None
92
+ parameters = self .parameters
93
+ parameters .evaluator = evaluator
94
+
95
+ for i_stage in range (len (self .graph_optimizer_params .stages )):
96
+ parameters = self .graph_optimizer_params .stages [i_stage ].run (parameters )
97
+
98
+ # TODO define: do we need this?
99
+ self .parameters = parameters
107
100
108
- for i_stage in range (len (self .stages )):
109
- self .parameters = self .stages [i_stage ].run (self .parameters )
110
- return self .new_population
101
+ return parameters .new_population
0 commit comments