Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ERROR] ActiveHyperparameterNotSetError #509

Open
jwpark0921 opened this issue Sep 5, 2024 · 0 comments
Open

[ERROR] ActiveHyperparameterNotSetError #509

jwpark0921 opened this issue Sep 5, 2024 · 0 comments

Comments

@jwpark0921
Copy link

jwpark0921 commented Sep 5, 2024

OS: Ubuntu 24.04(x86_64)
Python version : 3.8.19
Install method : pip install autoPyTorch[forecasting]

Code (In a nutshell example)

from autoPyTorch.api.tabular_classification import TabularClassificationTask

data and metric imports

import sklearn.model_selection
import sklearn.datasets
import sklearn.metrics
X, y = sklearn.datasets.load_digits(return_X_y=True)
X_train, X_test, y_train, y_test =
sklearn.model_selection.train_test_split(X, y, random_state=1)

initialise Auto-PyTorch api

api = TabularClassificationTask()

Search for an ensemble of machine learning algorithms

api.search(
X_train=X_train,
y_train=y_train,
X_test=X_test,
y_test=y_test,
optimize_metric='accuracy',
total_walltime_limit=300,
func_eval_time_limit_secs=50
)

Calculate test accuracy

y_pred = api.predict(X_test)
score = api.score(y_pred, y_test)
print("Accuracy score", score)

Error message

ActiveHyperparameterNotSetError Traceback (most recent call last)
Cell In[5], line 1
----> 1 api.search(
2 X_train=X_train,
3 y_train=y_train,
4 X_test=X_test,
5 y_test=y_test,
6 dataset_name='digits',
7 optimize_metric='accuracy',
8 total_walltime_limit=300,
9 func_eval_time_limit_secs=50
10 )

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/api/tabular_classification.py:450, in TabularClassificationTask.search(self, optimize_metric, X_train, y_train, X_test, y_test, dataset_name, feat_types, budget_type, min_budget, max_budget, total_walltime_limit, func_eval_time_limit_secs, enable_traditional_pipeline, memory_limit, smac_scenario_args, get_smac_object_callback, all_supported_metrics, precision, disable_file_output, load_models, portfolio_selection, dataset_compression)
437 self._dataset_compression = get_dataset_compression_mapping(memory_limit, dataset_compression)
439 self.dataset, self.input_validator = self._get_dataset_input_validator(
440 X_train=X_train,
441 y_train=y_train,
(...)
447 dataset_compression=self._dataset_compression,
448 feat_types=feat_types)
--> 450 return self._search(
451 dataset=self.dataset,
452 optimize_metric=optimize_metric,
453 budget_type=budget_type,
454 min_budget=min_budget,
455 max_budget=max_budget,
456 total_walltime_limit=total_walltime_limit,
457 func_eval_time_limit_secs=func_eval_time_limit_secs,
458 enable_traditional_pipeline=enable_traditional_pipeline,
459 memory_limit=memory_limit,
460 smac_scenario_args=smac_scenario_args,
461 get_smac_object_callback=get_smac_object_callback,
462 all_supported_metrics=all_supported_metrics,
463 precision=precision,
464 disable_file_output=disable_file_output,
465 load_models=load_models,
466 portfolio_selection=portfolio_selection,
467 )

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/api/base_task.py:1011, in BaseTask._search(self, optimize_metric, dataset, budget_type, min_budget, max_budget, total_walltime_limit, func_eval_time_limit_secs, enable_traditional_pipeline, memory_limit, smac_scenario_args, get_smac_object_callback, tae_func, all_supported_metrics, precision, disable_file_output, load_models, portfolio_selection, dask_client, **kwargs)
1009 # Initialise information needed for the experiment
1010 experiment_task_name: str = 'runSearch'
-> 1011 dataset_requirements = get_dataset_requirements(
1012 info=dataset.get_required_dataset_info(),
1013 include=self.include_components,
1014 exclude=self.exclude_components,
1015 search_space_updates=self.search_space_updates)
1016 self._dataset_requirements = dataset_requirements
1017 dataset_properties = dataset.get_dataset_properties(dataset_requirements)

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/utils/pipeline.py:75, in get_dataset_requirements(info, include, exclude, search_space_updates)
69 return _get_regression_dataset_requirements(info,
70 include if include is not None else {},
71 exclude if exclude is not None else {},
72 search_space_updates=search_space_updates
73 )
74 elif task_type in CLASSIFICATION_TASKS:
---> 75 return _get_classification_dataset_requirements(info,
76 include if include is not None else {},
77 exclude if exclude is not None else {},
78 search_space_updates=search_space_updates
79 )
80 else:
81 if not forecasting_dependencies_installed:

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/utils/pipeline.py:116, in _get_classification_dataset_requirements(info, include, exclude, search_space_updates)
113 task_type = STRING_TO_TASK_TYPES[info['task_type']]
115 if task_type in TABULAR_TASKS:
--> 116 return TabularClassificationPipeline(
117 dataset_properties=info,
118 include=include, exclude=exclude,
119 search_space_updates=search_space_updates
120 ).get_dataset_requirements()
121 elif task_type in IMAGE_TASKS:
122 return ImageClassificationPipeline(
123 dataset_properties=info,
124 include=include, exclude=exclude,
125 search_space_updates=search_space_updates
126 ).get_dataset_requirements()

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/pipeline/tabular_classification.py:128, in TabularClassificationPipeline.init(self, config, steps, dataset_properties, include, exclude, random_state, init_params, search_space_updates)
117 def init(
118 self,
119 config: Optional[Configuration] = None,
(...)
126 search_space_updates: Optional[HyperparameterSearchSpaceUpdates] = None
127 ):
--> 128 super().init(
129 config, steps, dataset_properties, include, exclude,
130 random_state, init_params, search_space_updates)
132 # Because a pipeline is passed to a worker, we need to honor the random seed
133 # in this context. A tabular classification pipeline will implement a torch
134 # model, so we comply with https://pytorch.org/docs/stable/notes/randomness.html
135 torch.manual_seed(self.random_state.get_state()[1][0])

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/pipeline/base_pipeline.py:104, in BasePipeline.init(self, config, steps, dataset_properties, include, exclude, random_state, init_params, search_space_updates)
101 else:
102 self.steps = steps
--> 104 self.config_space = self.get_hyperparameter_search_space()
106 if config is None:
107 self.config = self.config_space.get_default_configuration()

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/pipeline/base_pipeline.py:262, in BasePipeline.get_hyperparameter_search_space(self)
256 """Return the configuration space for the CASH problem.
257
258 Returns:
259 ConfigurationSpace: The configuration space describing the Pipeline.
260 """
261 if not hasattr(self, 'config_space') or self.config_space is None:
--> 262 self.config_space = self._get_hyperparameter_search_space(
263 dataset_properties=self.dataset_properties,
264 include=self.include,
265 exclude=self.exclude,
266 )
267 return self.config_space

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/pipeline/tabular_classification.py:260, in TabularClassificationPipeline._get_hyperparameter_search_space(self, dataset_properties, include, exclude)
256 dataset_properties['target_type'] = 'tabular_classification'
257 # get the base search space given this
258 # dataset properties. Then overwrite with custom
259 # classification requirements
--> 260 cs = self._get_base_search_space(
261 cs=cs, dataset_properties=dataset_properties,
262 exclude=exclude, include=include, pipeline=self.steps)
264 # Here we add custom code, that is used to ensure valid configurations, For example
265 # Learned Entity Embedding is only valid when encoder is one hot encoder
266 if 'network_embedding' in self.named_steps.keys() and 'encoder' in self.named_steps.keys():

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/pipeline/base_pipeline.py:376, in BasePipeline._get_base_search_space(self, cs, dataset_properties, include, exclude, pipeline)
367 choices_list = find_active_choices(
368 matches, node, node_idx,
369 dataset_properties,
370 include.get(node_name),
371 exclude.get(node_name)
372 )
374 # ignore type check here as mypy is not able to infer
375 # that isinstance(node, autoPyTorchChooice) = True
--> 376 sub_config_space = node.get_hyperparameter_search_space( # type: ignore[call-arg]
377 dataset_properties, include=choices_list)
378 cs.add_configuration_space(node_name, sub_config_space)
380 # If the node is a choice, we have to figure out which of its
381 # choices are actually legal choices
382 else:

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/autoPyTorch/pipeline/components/preprocessing/tabular_preprocessing/feature_preprocessing/init.py:213, in FeatureProprocessorChoice.get_hyperparameter_search_space(self, dataset_properties, default, include, exclude)
210 config_space = available_[name].get_hyperparameter_search_space(dataset_properties, # type:ignore
211 **updates)
212 parent_hyperparameter = {'parent': preprocessor, 'value': name}
--> 213 cs.add_configuration_space(name, config_space,
214 parent_hyperparameter=parent_hyperparameter)
216 self.configuration_space = cs
217 self.dataset_properties = dataset_properties

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/ConfigSpace/configuration_space.py:446, in ConfigurationSpace.add_configuration_space(self, prefix, configuration_space, delimiter, parent_hyperparameter)
443 conditions_to_add.append(EqualsCondition(param, parent, value))
445 if len(conditions_to_add) > 0:
--> 446 self.add(conditions_to_add)
448 return configuration_space

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/ConfigSpace/configuration_space.py:351, in ConfigurationSpace.add(self, *args)
348 self._dag.add_forbidden(forbidden)
350 self._len = len(self._dag.nodes)
--> 351 self._check_default_configuration()

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/ConfigSpace/configuration_space.py:915, in ConfigurationSpace._check_default_configuration(self)
912 else:
913 values[hp_name] = hp.default_value
--> 915 return Configuration(self, values=values)

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/ConfigSpace/configuration.py:126, in Configuration.init(self, configuration_space, values, vector, allow_inactive_with_values, origin, config_id)
123 self._values[key] = value
124 self._vector[i] = hp.to_vector(value) # type: ignore
--> 126 self.check_valid_configuration()
128 elif vector is not None:
129 if not isinstance(vector, np.ndarray):

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/ConfigSpace/configuration.py:160, in Configuration.check_valid_configuration(self)
153 """Check if the object is a valid.
154
155 Raises:
156 ValueError: If configuration is not valid.
157 """
158 from ConfigSpace.util import check_configuration
--> 160 check_configuration(
161 self.config_space,
162 self._vector,
163 allow_inactive_with_values=self.allow_inactive_with_values,
164 )

File ~/miniconda3/envs/auto-pytorch/lib/python3.8/site-packages/ConfigSpace/util.py:593, in check_configuration(space, vector, allow_inactive_with_values)
591 hp_name = space.at[idx]
592 hp = space[hp_name]
--> 593 raise ActiveHyperparameterNotSetError(hp)
595 for hp_idx, hp_node in cnode.unique_children.items():
596 # OPTIM: We bypass the larger safety checking of the hp and access
597 # the underlying transformer directly
598 transformer = hp_node.hp._transformer

ActiveHyperparameterNotSetError: Hyperparameter is active but has no value set.
SelectRatesClassification:mode, Type: Categorical, Choices: {fpr, fdr, fwe, percentile}, Default: fpr

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant