66import os
77import typing as tp
88from nevergrad .functions import gym as nevergrad_gym
9- from nevergrad .functions import ExperimentFunction
109from .xpbase import registry
1110from .xpbase import create_seed_generator
1211from .xpbase import Experiment
@@ -266,8 +265,6 @@ def gym_problem(
266265 seed : tp .Optional [int ] = None ,
267266 specific_problem : str = "LANM" ,
268267 conformant : bool = False ,
269- compiler_gym_pb_index : tp .Optional [int ] = None ,
270- limited_compiler_gym : tp .Optional [bool ] = None ,
271268 big_noise : bool = False ,
272269 multi_scale : bool = False ,
273270 greedy_bias : bool = False ,
@@ -280,53 +277,35 @@ def gym_problem(
280277 name of the problem we are working on
281278 conformant: bool
282279 do we focus on conformant planning
283- compiler_gym_pb_index: integer
284- index of Uris problem we work on.
285- limited_compiler_gym: boolean
286- for compiler-gyn, whether we use a restricted action space
287280 big_noise: bool
288281 do we switch to specific optimizers, dedicated to noise
289282 multi_scale: boolean
290283 do we check multiple scales
291284 greedy_bias: boolean
292285 do we use greedy reward estimates for biasing the decisions.
293286 """
294- if "directcompilergym" in specific_problem :
295- assert compiler_gym_pb_index is not None
296- assert limited_compiler_gym is not None
297- assert compiler_gym_pb_index >= 0
298- assert greedy_bias is False
299- funcs : tp .List [ExperimentFunction ] = [
300- nevergrad_gym .CompilerGym (
301- compiler_gym_pb_index = compiler_gym_pb_index , limited_compiler_gym = limited_compiler_gym
287+ if conformant :
288+ funcs = [
289+ nevergrad_gym .GymMulti (
290+ specific_problem ,
291+ control = "conformant" ,
292+ neural_factor = None ,
302293 )
303294 ]
304295 else :
305- if conformant :
306- funcs = [
307- nevergrad_gym .GymMulti (
308- specific_problem ,
309- control = "conformant" ,
310- limited_compiler_gym = limited_compiler_gym ,
311- compiler_gym_pb_index = compiler_gym_pb_index ,
312- neural_factor = None ,
313- )
314- ]
315- else :
316- funcs = [
317- nevergrad_gym .GymMulti (
318- specific_problem ,
319- control = control ,
320- neural_factor = 1 if control != "linear" else None ,
321- limited_compiler_gym = limited_compiler_gym ,
322- optimization_scale = scale ,
323- greedy_bias = greedy_bias ,
324- )
325- for scale in ([- 6 , - 4 , - 2 , 0 ] if multi_scale else [0 ])
326- for control in (
327- ["deep_neural" , "semideep_neural" , "neural" , "linear" ] if not greedy_bias else ["neural" ]
328- )
329- ]
296+ funcs = [
297+ nevergrad_gym .GymMulti (
298+ specific_problem ,
299+ control = control ,
300+ neural_factor = 1 if control != "linear" else None ,
301+ optimization_scale = scale ,
302+ greedy_bias = greedy_bias ,
303+ )
304+ for scale in ([- 6 , - 4 , - 2 , 0 ] if multi_scale else [0 ])
305+ for control in (
306+ ["deep_neural" , "semideep_neural" , "neural" , "linear" ] if not greedy_bias else ["neural" ]
307+ )
308+ ]
330309 seedg = create_seed_generator (seed )
331310 optims = [
332311 "TwoPointsDE" ,
@@ -346,9 +325,7 @@ def gym_problem(
346325 ]
347326 if "stochastic" in specific_problem :
348327 optims = ["DiagonalCMA" , "TBPSA" ] if big_noise else ["DiagonalCMA" ]
349- if "directcompilergym" in specific_problem or (
350- specific_problem == "EnergySavingsGym-v0" and conformant
351- ): # Do this for all conformant discrete ?
328+ if specific_problem == "EnergySavingsGym-v0" and conformant : # Do this for all conformant discrete ?
352329 optims = [
353330 "DiscreteOnePlusOne" ,
354331 "PortfolioDiscreteOnePlusOne" ,
@@ -380,28 +357,6 @@ def gym_problem(
380357 yield xp
381358
382359
383- @registry .register
384- def limited_stochastic_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
385- """Working on CompilerGym. Stochastic problem: we are optimizing a net for driving compilation."""
386- return gym_problem (seed , specific_problem = "stochasticcompilergym" , limited_compiler_gym = True )
387-
388-
389- @registry .register
390- def multiscale_limited_stochastic_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
391- """Working on CompilerGym. Stochastic problem: we are optimizing a net for driving compilation."""
392- return gym_problem (
393- seed , specific_problem = "stochasticcompilergym" , limited_compiler_gym = True , multi_scale = True
394- )
395-
396-
397- @registry .register
398- def unlimited_hardcore_stochastic_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
399- """Working on CompilerGym. Stochastic problem: we are optimizing a net for driving compilation."""
400- return gym_problem (
401- seed , specific_problem = "stochasticcompilergym" , limited_compiler_gym = False , big_noise = True
402- )
403-
404-
405360@registry .register
406361def conformant_planning (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
407362 # You might modify this problem by specifying an environment variable.
@@ -425,53 +380,3 @@ def neuro_planning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
425380 conformant = False ,
426381 big_noise = False ,
427382 )
428-
429-
430- @registry .register
431- def limited_hardcore_stochastic_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
432- """Working on CompilerGym. Stochastic problem: we are optimizing a net for driving compilation."""
433- return gym_problem (
434- seed , specific_problem = "stochasticcompilergym" , limited_compiler_gym = True , big_noise = True
435- )
436-
437-
438- @registry .register
439- def greedy_limited_stochastic_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
440- """Working on CompilerGym. Stochastic problem: we are optimizing a net for driving compilation."""
441- return gym_problem (
442- seed , specific_problem = "stochasticcompilergym" , limited_compiler_gym = True , greedy_bias = True
443- )
444-
445-
446- @registry .register
447- def unlimited_stochastic_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
448- """Working on CompilerGym. Stochastic problem: we are optimizing a net for driving compilation."""
449- return gym_problem (seed , specific_problem = "stochasticcompilergym" , limited_compiler_gym = False )
450-
451-
452- @registry .register
453- def unlimited_direct_problems23_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
454- """Working on CompilerGym. All 23 problems."""
455- for compiler_gym_pb_index in range (23 ):
456- pb = gym_problem (
457- seed ,
458- specific_problem = "directcompilergym" + str (compiler_gym_pb_index ),
459- compiler_gym_pb_index = compiler_gym_pb_index ,
460- limited_compiler_gym = False ,
461- )
462- for xp in pb :
463- yield xp
464-
465-
466- @registry .register
467- def limited_direct_problems23_compiler_gym (seed : tp .Optional [int ] = None ) -> tp .Iterator [Experiment ]:
468- """Working on CompilerGym. All 23 problems."""
469- for compiler_gym_pb_index in range (23 ):
470- pb = gym_problem (
471- seed ,
472- specific_problem = "directcompilergym" + str (compiler_gym_pb_index ),
473- compiler_gym_pb_index = compiler_gym_pb_index ,
474- limited_compiler_gym = True ,
475- )
476- for xp in pb :
477- yield xp
0 commit comments