Skip to content

Commit

Permalink
uniqueness node and some fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
kasyanovse committed Dec 14, 2023
1 parent 94cc8e7 commit 16a7a3e
Show file tree
Hide file tree
Showing 5 changed files with 132 additions and 48 deletions.
63 changes: 27 additions & 36 deletions golem/core/optimisers/common_optimizer/common_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
from abc import abstractmethod
from collections import deque
from dataclasses import dataclass
from multiprocessing.managers import DictProxy
from typing import Optional, Sequence, Union, Any, Dict, List, Callable
from multiprocessing import Manager

from golem.core.dag.graph import Graph
from golem.core.optimisers.common_optimizer.common_optimizer_params import CommonOptimizerParameters
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.old_config import default_stages
from golem.core.optimisers.common_optimizer.runner import ParallelRunner
from golem.core.optimisers.common_optimizer.scheme import Scheme
from golem.core.optimisers.common_optimizer.stage import Stage
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum
Expand All @@ -20,24 +24,6 @@
from golem.core.optimisers.timer import OptimisationTimer


@dataclass
class CommonOptimizerParameters:
""" This class is for storing a state of all CommonOptimizer parameters """
_run: bool
generations: List[PopulationT]
population: PopulationT
new_population: PopulationT
evaluator: Any

objective: Objective
initial_graphs: Sequence[Union[Graph, Any]]
requirements: OptimizationParameters
graph_generation_params: GraphGenerationParams
graph_optimizer_params: AlgorithmParameters
stages: List[Stage]
history: OptHistory


class CommonOptimizer(PopulationalOptimizer):
"""
This class implements a common optimizer.
Expand All @@ -60,40 +46,41 @@ def __init__(self,
initial_graphs: Optional[Sequence[Union[Graph, Any]]] = None,
requirements: Optional[OptimizationParameters] = None,
graph_generation_params: Optional[GraphGenerationParams] = None,
graph_optimizer_params: Optional[AlgorithmParameters] = None,
stages: Optional[List[Stage]] = None):
graph_optimizer_params: Optional[AlgorithmParameters] = None):

super().__init__(objective=objective,
initial_graphs=initial_graphs,
requirements=requirements,
graph_generation_params=graph_generation_params,
graph_optimizer_params=graph_optimizer_params)

self.stages = default_stages
self._run = True

self.graph_optimizer_params.stages = self.graph_optimizer_params.stages or default_stages
self.requirements.max_depth = 100 # TODO fix
self.graph_optimizer_params.pop_size = graph_optimizer_params.pop_size
self.initial_individuals = [Individual(graph, metadata=requirements.static_individual_metadata)
for graph in self.initial_graphs]

# create dictionary with all existing graphs descriptive_id
# for multiprocessing create dictionary via Manager for parallel
stages = self.graph_optimizer_params.stages
if any(stage.runner is ParallelRunner for stage in stages):
self.graph_optimizer_params.repo = Manager().dict({'empty': True})
else:
self.graph_optimizer_params.repo = dict()

@property
def parameters(self):
return CommonOptimizerParameters(**{attr: getattr(self, attr) for attr in self.__parameters_attrs})
return CommonOptimizerParameters(**{attr: getattr(self, attr)
for attr in self.__parameters_attrs
if hasattr(self, attr)})

@parameters.setter
def parameters(self, parameters: CommonOptimizerParameters):
if not isinstance(parameters, CommonOptimizerParameters):
raise TypeError(f"parameters should be `CommonOptimizerParameters`, got {type(parameters)} instead")
for attr in self.__parameters_allowed_to_change:
if hasattr(parameters, attr):
if hasattr(parameters, attr) and hasattr(self, attr):
setattr(self, attr, getattr(parameters, attr))

# def optimise(self, objective: ObjectiveFunction):
# while self._run:
# for i_stage in range(len(self.stages)):
# self.parameters = self.stages[i_stage].run(self.parameters)

def _initial_population(self, evaluator: EvaluationOperator):
""" Initializes the initial population """
self._update_population(evaluator(self.initial_individuals), 'initial_assumptions')
Expand All @@ -102,9 +89,13 @@ def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT:
""" Method realizing full evolution cycle """
# TODO add iterations limit

self.evaluator = evaluator
self.new_population = None
parameters = self.parameters
parameters.evaluator = evaluator

for i_stage in range(len(self.graph_optimizer_params.stages)):
parameters = self.graph_optimizer_params.stages[i_stage].run(parameters)

# TODO define: do we need this?
self.parameters = parameters

for i_stage in range(len(self.stages)):
self.parameters = self.stages[i_stage].run(self.parameters)
return self.new_population
return parameters.new_population
39 changes: 39 additions & 0 deletions golem/core/optimisers/common_optimizer/common_optimizer_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from abc import abstractmethod
from collections import deque
from dataclasses import dataclass
from typing import Optional, Sequence, Union, Any, Dict, List, Callable

from golem.core.dag.graph import Graph
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.old_config import default_stages
from golem.core.optimisers.common_optimizer.scheme import Scheme
from golem.core.optimisers.common_optimizer.stage import Stage
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum
from golem.core.optimisers.genetic.operators.operator import PopulationT, EvaluationOperator
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.objective import Objective, ObjectiveFunction
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory
from golem.core.optimisers.optimization_parameters import OptimizationParameters
from golem.core.optimisers.optimizer import GraphOptimizer, GraphGenerationParams, AlgorithmParameters
from golem.core.optimisers.populational_optimizer import PopulationalOptimizer
from golem.core.optimisers.timer import OptimisationTimer


@dataclass
class CommonOptimizerParameters(AlgorithmParameters):
""" This class is for storing a state of all CommonOptimizer parameters """
generations: Optional[List[PopulationT]] = None
population: Optional[PopulationT] = None

objective: Optional[Objective] = None
initial_graphs: Optional[Sequence[Union[Graph, Any]]] = None
requirements: Optional[OptimizationParameters] = None
graph_generation_params: Optional[GraphGenerationParams] = None
graph_optimizer_params: Optional[AlgorithmParameters] = None
history: Optional[OptHistory] = None

repo: Optional[Dict[str, bool]] = None
new_population: Optional[PopulationT] = None
evaluator: Optional[Any] = None

28 changes: 17 additions & 11 deletions golem/core/optimisers/common_optimizer/nodes/mutation.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,41 @@
from copy import deepcopy
from random import choice

from typing import Optional, Dict, Callable

from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo
from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo, MutationTypesEnum
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator


class MutationTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.generation = parameters.generations[-1]
self.mutation_tries = parameters.graph_optimizer_params.mutation_tries
self.verifier = parameters.graph_generation_params.verifier
self.static_individual_metadata = parameters.requirements.static_individual_metadata
self.graph_optimizer_params = parameters.graph_optimizer_params
self.requirements = parameters.requirements
self.graph_generation_params = parameters.graph_generation_params
self.generation = parameters.population

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Mutation(Node):
def __init__(self, name: str):
def __init__(self,
name: str = 'mutation',
mutations_repo: Optional[Dict[str], Callable[[OptGraph], OptGraph]] = None):
self.name = name
self._mutations_repo = base_mutations_repo
self._mutations_repo = mutations_repo or base_mutations_repo

def __call__(self, task: MutationTask):
individual = task.generation[0]
mutation_type, mutation = choice(self._mutations_repo.items())
for _ in range(task.mutation_tries):
new_graph = mutation(individual.graph.copy())
if task.verifier(new_graph):
for _ in range(2):
new_graph = mutation(deepcopy(individual.graph))
if task.graph_generation_params.verifier(new_graph):
parent_operator = ParentOperator(type_='mutation',
operators=mutation_type,
parent_individuals=individual)
Expand Down
47 changes: 47 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/uniqueness_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from copy import deepcopy
from random import choice

from typing import Optional, Dict, Callable

from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo, MutationTypesEnum
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator


class UniquenessCheckTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.generation = parameters.population
self.repo = parameters.graph_optimizer_params.repo

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class UniquenessCheck(Node):
def __init__(self, name: str = 'uniqueness_check'):
self.name = name

def __call__(self, task: UniquenessCheckTask):
to_add = dict()
new_inds = []
for ind in task.generation:
descriptive_id = ind.graph.descriptive_id
if descriptive_id not in task.repo and descriptive_id not in to_add:
to_add[descriptive_id] = True
new_inds.append(ind)

if to_add:
task.repo.update(to_add)

if new_inds:
task.generation = new_inds
task.status = TaskStatusEnum.SUCCESS
return [task]

task.status = TaskStatusEnum.FAIL
return [task]

3 changes: 2 additions & 1 deletion golem/core/optimisers/common_optimizer/temp/adaptive.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self, parameters: 'CommonOptimizerParameters'):
self.graph_generation_params = parameters.graph_generation_params
self.population = parameters.population
self.generations = parameters.generations
self.stages = parameters.stages
self.stages = parameters.graph_optimizer_params.stages

def update_parameters(self, parameters: 'CommonOptimizerParameters') -> 'CommonOptimizerParameters':
"""
Expand All @@ -37,6 +37,7 @@ def update_parameters(self, parameters: 'CommonOptimizerParameters') -> 'CommonO
parameters.population = self.population
parameters.requirements = self.requirements
parameters.graph_optimizer_params = self.graph_optimizer_params
parameters.graph_optimizer_params.stages = self.stages
parameters.graph_generation_params = self.graph_generation_params
return parameters

Expand Down

0 comments on commit 16a7a3e

Please sign in to comment.