Skip to content

Commit 4a47fbd

Browse files
Modify script for optimizing hparams
1 parent 4f6f7b2 commit 4a47fbd

File tree

1 file changed

+11
-8
lines changed

1 file changed

+11
-8
lines changed

steps/optimize_hparams.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
11
from clearml import Task
2-
from clearml.automation import (
3-
DiscreteParameterRange,
4-
GridSearch,
5-
HyperParameterOptimizer,
6-
)
2+
from clearml.automation import DiscreteParameterRange, HyperParameterOptimizer
3+
from clearml.automation.optuna import OptimizerOptuna
74
from dotenv import load_dotenv
85

96
load_dotenv()
107

118

129
def main():
10+
task_training = Task.get_task(project_name="MyProject", task_name="Training")
11+
1312
Task.init(
1413
project_name="MyProjectHPO",
1514
task_name="Automatic Hyper-Parameter Optimization",
@@ -18,7 +17,7 @@ def main():
1817
)
1918

2019
optimizer = HyperParameterOptimizer(
21-
base_task_id=Task.get_task(project_name="MyProject", task_name="Training").id,
20+
base_task_id=task_training.id,
2221
hyper_parameters=[
2322
# DiscreteParameterRange("Hydra/model.n_factors", values=[8, 16]),
2423
# DiscreteParameterRange("Hydra/model.n_layers", values=[3, 4]),
@@ -35,10 +34,14 @@ def main():
3534
DiscreteParameterRange("Hydra/trainer.max_epochs", values=[100]),
3635
],
3736
objective_metric_title="val",
38-
objective_metric_series="ndcg",
37+
objective_metric_series="auroc",
3938
objective_metric_sign="max_global",
40-
optimizer_class=GridSearch,
39+
optimizer_class=OptimizerOptuna,
4140
max_number_of_concurrent_tasks=1,
41+
save_top_k_tasks_only=-1,
42+
total_max_jobs=10,
43+
min_iteration_per_job=10 * 857,
44+
max_iteration_per_job=50 * 857,
4245
# execution_queue="default",
4346
spawn_project="MyProjectHPO",
4447
)

0 commit comments

Comments
 (0)