diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index e954aee4..cf6471a6 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -1,11 +1,12 @@ from abc import ABC from datetime import timedelta -from typing import Optional, Callable, Dict +from typing import Callable, Dict, Optional import numpy as np -from hyperopt import tpe, hp +from hyperopt import hp, tpe from hyperopt.early_stop import no_progress_loss -from hyperopt.pyll import Apply +from hyperopt.pyll import Apply, scope +from hyperopt.pyll_utils import validate_label from golem.core.adapter import BaseOptimizationAdapter from golem.core.log import default_log @@ -14,6 +15,14 @@ from golem.core.tuning.tuner_interface import BaseTuner +@validate_label +def hp_randint(label, *args, **kwargs): + return scope.int(scope.hyperopt_param(label, scope.randint(*args, **kwargs))) + + +hp.randint = hp_randint + + class HyperoptTuner(BaseTuner, ABC): """Base class for hyperparameters optimization based on hyperopt library diff --git a/test/unit/tuning/test_tuning.py b/test/unit/tuning/test_tuning.py index 08addcd4..e8f4bc87 100644 --- a/test/unit/tuning/test_tuning.py +++ b/test/unit/tuning/test_tuning.py @@ -9,9 +9,9 @@ from golem.core.tuning.search_space import SearchSpace from golem.core.tuning.sequential import SequentialTuner from golem.core.tuning.simultaneous import SimultaneousTuner -from test.unit.mocks.common_mocks import MockAdapter, MockObjectiveEvaluate, mock_graph_with_params, \ - opt_graph_with_params, MockNode, MockDomainStructure -from test.unit.utils import ParamsSumMetric, ParamsProductMetric +from test.unit.mocks.common_mocks import (MockAdapter, MockDomainStructure, MockNode, MockObjectiveEvaluate, + mock_graph_with_params, opt_graph_with_params) +from test.unit.utils import ParamsProductMetric, ParamsSumMetric def not_tunable_mock_graph(): @@ -40,8 +40,7 @@ def search_space(): 'hyperopt-dist': hp.choice, 'sampling-scope': [['A', 'B', 'C']], 'type': 'categorical' - } - }, + }}, 'b': { 'b1': { 'hyperopt-dist': hp.choice, @@ -53,6 +52,11 @@ def search_space(): 'sampling-scope': [0.05, 1.0], 'type': 'continuous' }, + 'b3': { + 'hyperopt-dist': hp.randint, + 'sampling-scope': [1, 1000], + 'type': 'discrete' + } }, 'e': { 'e1': { @@ -133,3 +137,16 @@ def test_multi_objective_tuning(search_space, tuner_cls, init_graph, adapter, ob final_metric = obj_eval.evaluate(graph) assert final_metric is not None assert not init_metric.dominates(final_metric) + + +@pytest.mark.parametrize('tuner_cls', [SequentialTuner, SimultaneousTuner]) +def test_hyperopt_returns_native_types(search_space, tuner_cls): + obj_eval = MockObjectiveEvaluate(Objective({'sum_metric': ParamsSumMetric.get_value})) + adapter = MockAdapter() + graph = opt_graph_with_params() + tuner = tuner_cls(obj_eval, search_space, adapter, iterations=20) + tuned_graph = tuner.tune(deepcopy(graph)) + for node in tuned_graph.nodes: + for param, val in node.parameters.items(): + assert val.__class__.__module__ != 'numpy', (f'The parameter "{param}" should not be a numpy type. ' + f'Got "{type(val)}".')