Skip to content

Commit

Permalink
Fix hyperopt.hp.randint (#250)
Browse files Browse the repository at this point in the history
* monkey patch hyperopt.hp.randint

* add hp_randint to tuning tests; add test of hyperopt parameter types

* pep8

* Update test/unit/tuning/test_tuning.py

Co-authored-by: Sergey <[email protected]>

---------

Co-authored-by: Sergey <[email protected]>
  • Loading branch information
MorrisNein and kasyanovse authored Dec 12, 2023
1 parent 085ca3d commit bcfad53
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 8 deletions.
15 changes: 12 additions & 3 deletions golem/core/tuning/hyperopt_tuner.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from abc import ABC
from datetime import timedelta
from typing import Optional, Callable, Dict
from typing import Callable, Dict, Optional

import numpy as np
from hyperopt import tpe, hp
from hyperopt import hp, tpe
from hyperopt.early_stop import no_progress_loss
from hyperopt.pyll import Apply
from hyperopt.pyll import Apply, scope
from hyperopt.pyll_utils import validate_label

from golem.core.adapter import BaseOptimizationAdapter
from golem.core.log import default_log
Expand All @@ -14,6 +15,14 @@
from golem.core.tuning.tuner_interface import BaseTuner


@validate_label
def hp_randint(label, *args, **kwargs):
return scope.int(scope.hyperopt_param(label, scope.randint(*args, **kwargs)))


hp.randint = hp_randint


class HyperoptTuner(BaseTuner, ABC):
"""Base class for hyperparameters optimization based on hyperopt library
Expand Down
27 changes: 22 additions & 5 deletions test/unit/tuning/test_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
from golem.core.tuning.search_space import SearchSpace
from golem.core.tuning.sequential import SequentialTuner
from golem.core.tuning.simultaneous import SimultaneousTuner
from test.unit.mocks.common_mocks import MockAdapter, MockObjectiveEvaluate, mock_graph_with_params, \
opt_graph_with_params, MockNode, MockDomainStructure
from test.unit.utils import ParamsSumMetric, ParamsProductMetric
from test.unit.mocks.common_mocks import (MockAdapter, MockDomainStructure, MockNode, MockObjectiveEvaluate,
mock_graph_with_params, opt_graph_with_params)
from test.unit.utils import ParamsProductMetric, ParamsSumMetric


def not_tunable_mock_graph():
Expand Down Expand Up @@ -40,8 +40,7 @@ def search_space():
'hyperopt-dist': hp.choice,
'sampling-scope': [['A', 'B', 'C']],
'type': 'categorical'
}
},
}},
'b': {
'b1': {
'hyperopt-dist': hp.choice,
Expand All @@ -53,6 +52,11 @@ def search_space():
'sampling-scope': [0.05, 1.0],
'type': 'continuous'
},
'b3': {
'hyperopt-dist': hp.randint,
'sampling-scope': [1, 1000],
'type': 'discrete'
}
},
'e': {
'e1': {
Expand Down Expand Up @@ -133,3 +137,16 @@ def test_multi_objective_tuning(search_space, tuner_cls, init_graph, adapter, ob
final_metric = obj_eval.evaluate(graph)
assert final_metric is not None
assert not init_metric.dominates(final_metric)


@pytest.mark.parametrize('tuner_cls', [SequentialTuner, SimultaneousTuner])
def test_hyperopt_returns_native_types(search_space, tuner_cls):
obj_eval = MockObjectiveEvaluate(Objective({'sum_metric': ParamsSumMetric.get_value}))
adapter = MockAdapter()
graph = opt_graph_with_params()
tuner = tuner_cls(obj_eval, search_space, adapter, iterations=20)
tuned_graph = tuner.tune(deepcopy(graph))
for node in tuned_graph.nodes:
for param, val in node.parameters.items():
assert val.__class__.__module__ != 'numpy', (f'The parameter "{param}" should not be a numpy type. '
f'Got "{type(val)}".')

0 comments on commit bcfad53

Please sign in to comment.