Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
bcea10c
Create ray.tune.suggest.create.create_scheduler
sumanthratna Aug 31, 2020
6a1e20a
Update __init__.py
sumanthratna Aug 31, 2020
ede4d92
Resolve conflict in __init__.py
sumanthratna Aug 31, 2020
03c734f
Merge branch 'master' into shim-instantiation
sumanthratna Aug 31, 2020
51dc4cf
Create ray.tune.schedulers.create.create_scheduler
sumanthratna Aug 31, 2020
672cb4e
Update __init__.py
sumanthratna Aug 31, 2020
f4309a1
Move create_scheduler to tune.schedulers.__init__
sumanthratna Sep 3, 2020
c81917c
Move create_searcher to tune.suggest.__init__
sumanthratna Sep 3, 2020
522d847
Delete tune.suggest.create
sumanthratna Sep 3, 2020
ffdeb6f
Delete tune.schedulers.create
sumanthratna Sep 3, 2020
4d930c6
Update imports for shim functions in tune.__init__
sumanthratna Sep 3, 2020
bad8f27
Remove shim from tune.suggest.__init__.__all__
sumanthratna Sep 3, 2020
6b2187f
Remove shim from tune.schedulers.__init__.__all__
sumanthratna Sep 3, 2020
d640e76
Add ShimCreationTest
sumanthratna Sep 3, 2020
4b3d8fd
Move ShimCreationTest to test_api
sumanthratna Sep 3, 2020
ce981ce
Delete test_shim.py
sumanthratna Sep 3, 2020
98f2f40
Add docstring for ray.tune.create_scheduler
sumanthratna Sep 3, 2020
4d7804b
Add docstring to ray.tune.create_searcher
sumanthratna Sep 3, 2020
5406526
Fix typo in ray.tune.create_scheduler docstring
sumanthratna Sep 3, 2020
e104320
Fix lint errors in tune.schedulers.__init__
sumanthratna Sep 3, 2020
1a51377
Fix lint errors in tune.suggest.__init__
sumanthratna Sep 3, 2020
f633f28
Fix lint errors in tune.suggest.__init__
sumanthratna Sep 3, 2020
1dab52e
Fix lint errors in tune.schedulers.__init__
sumanthratna Sep 3, 2020
8e8c2cd
Fix imports in test_api
sumanthratna Sep 3, 2020
e3c6587
Fix lint errors in test_api
sumanthratna Sep 3, 2020
cc34803
Fix kwargs in create_searcher
sumanthratna Sep 3, 2020
a293ca0
Fix kwargs in create_scheduler
sumanthratna Sep 3, 2020
ef2d2c5
Merge branch 'master' into shim-instantiation
sumanthratna Sep 3, 2020
e060672
Update use-case in docs in tune.create_scheduler
sumanthratna Sep 3, 2020
4713981
Update use-case in docs in tune.create_searcher
sumanthratna Sep 3, 2020
40a2226
Merge branch 'master' into shim-instantiation
sumanthratna Sep 3, 2020
87cc7af
Remove duplicate pytest run from test_api
sumanthratna Sep 3, 2020
4a459ca
Add check to create_searcher
sumanthratna Sep 3, 2020
2510fc5
Add check to create_scheduler
sumanthratna Sep 3, 2020
6e59daf
lint
richardliaw Sep 3, 2020
368fb0b
Compare types of instances in test_api
sumanthratna Sep 4, 2020
6dcf1f0
Add tune.create_searcher to docs
sumanthratna Sep 4, 2020
2fcd854
Fix doc build
sumanthratna Sep 4, 2020
ca29c83
Fix tests
sumanthratna Sep 4, 2020
c0c530a
Add tune.create_scheduler to docs
sumanthratna Sep 4, 2020
c71fb6e
Fix tests
sumanthratna Sep 4, 2020
158b9e0
Fix lint errors
sumanthratna Sep 4, 2020
d891dda
Update Ax search for master
sumanthratna Sep 4, 2020
fdaa371
Fix metric kwarg for Ax in test_api
sumanthratna Sep 5, 2020
7cb3ff0
Fix doc build
sumanthratna Sep 5, 2020
667f064
Fix HyperOptSearch import in test_api
sumanthratna Sep 5, 2020
ae29d2e
Fix HyperOptSearch import in create_searcher
sumanthratna Sep 5, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions doc/source/tune/api_docs/schedulers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -196,3 +196,9 @@ TrialScheduler

.. autoclass:: ray.tune.schedulers.TrialScheduler
:members:

Shim Instantiation (tune.create_scheduler)
------------------------------------------
There is also a shim function that constructs the scheduler based on the provided string. This can be useful if the scheduler you want to use changes often (e.g., specifying the scheduler via a CLI option or config file).

.. automethod:: ray.tune.create_scheduler
9 changes: 9 additions & 0 deletions doc/source/tune/api_docs/suggestion.rst
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ Tune also provides helpful utilities to use with Search Algorithms:

* :ref:`repeater`: Support for running each *sampled hyperparameter* with multiple random seeds.
* :ref:`limiter`: Limits the amount of concurrent trials when running optimization.
* :ref:`shim`: Allows creation of the search algorithm object given a string.

Saving and Restoring
--------------------
Expand Down Expand Up @@ -264,3 +265,11 @@ If you are interested in implementing or contributing a new Search Algorithm, pr
:members:
:private-members:
:show-inheritance:

.. _shim:

Shim Instantiation (tune.create_searcher)
-----------------------------------------
There is also a shim function that constructs the search algorithm based on the provided string. This can be useful if the search algorithm you want to use changes often (e.g., specifying the search algorithm via a CLI option or config file).

.. automethod:: ray.tune.create_searcher
4 changes: 3 additions & 1 deletion python/ray/tune/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
from ray.tune.sample import (function, sample_from, uniform, quniform, choice,
randint, qrandint, randn, qrandn, loguniform,
qloguniform)
from ray.tune.suggest import create_searcher
from ray.tune.schedulers import create_scheduler

__all__ = [
"Trainable", "DurableTrainable", "TuneError", "grid_search",
Expand All @@ -24,5 +26,5 @@
"loguniform", "qloguniform", "ExperimentAnalysis", "Analysis",
"CLIReporter", "JupyterNotebookReporter", "ProgressReporter", "report",
"get_trial_dir", "get_trial_name", "get_trial_id", "make_checkpoint_dir",
"save_checkpoint", "checkpoint_dir"
"save_checkpoint", "checkpoint_dir", "create_searcher", "create_scheduler"
]
63 changes: 63 additions & 0 deletions python/ray/tune/schedulers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,69 @@
from ray.tune.schedulers.pbt import (PopulationBasedTraining,
PopulationBasedTrainingReplay)


def create_scheduler(
scheduler,
metric="episode_reward_mean",
mode="max",
**kwargs,
):
"""Instantiate a scheduler based on the given string.

This is useful for swapping between different schedulers.

Args:
scheduler (str): The scheduler to use.
metric (str): The training result objective value attribute. Stopping
procedures will use this attribute.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
**kwargs: Additional parameters.
These keyword arguments will be passed to the initialization
function of the chosen class.
Returns:
ray.tune.schedulers.trial_scheduler.TrialScheduler: The scheduler.
Example:
>>> scheduler = tune.create_scheduler('pbt')
"""

def _import_async_hyperband_scheduler():
from ray.tune.schedulers import AsyncHyperBandScheduler
return AsyncHyperBandScheduler

def _import_median_stopping_rule_scheduler():
from ray.tune.schedulers import MedianStoppingRule
return MedianStoppingRule

def _import_hyperband_scheduler():
from ray.tune.schedulers import HyperBandScheduler
return HyperBandScheduler

def _import_hb_bohb_scheduler():
from ray.tune.schedulers import HyperBandForBOHB
return HyperBandForBOHB

def _import_pbt_search():
from ray.tune.schedulers import PopulationBasedTraining
return PopulationBasedTraining

SCHEDULER_IMPORT = {
"async_hyperband": _import_async_hyperband_scheduler,
"median_stopping_rule": _import_median_stopping_rule_scheduler,
"hyperband": _import_hyperband_scheduler,
"hb_bohb": _import_hb_bohb_scheduler,
"pbt": _import_pbt_search,
}
scheduler = scheduler.lower()
if scheduler not in SCHEDULER_IMPORT:
raise ValueError(
f"Search alg must be one of {list(SCHEDULER_IMPORT)}. "
f"Got: {scheduler}")

SchedulerClass = SCHEDULER_IMPORT[scheduler]()
return SchedulerClass(metric=metric, mode=mode, **kwargs)


__all__ = [
"TrialScheduler", "HyperBandScheduler", "AsyncHyperBandScheduler",
"ASHAScheduler", "MedianStoppingRule", "FIFOScheduler",
Expand Down
88 changes: 88 additions & 0 deletions python/ray/tune/suggest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,94 @@
from ray.tune.suggest.variant_generator import grid_search
from ray.tune.suggest.repeater import Repeater


def create_searcher(
search_alg,
metric="episode_reward_mean",
mode="max",
**kwargs,
):
"""Instantiate a search algorithm based on the given string.

This is useful for swapping between different search algorithms.

Args:
search_alg (str): The search algorithm to use.
metric (str): The training result objective value attribute. Stopping
procedures will use this attribute.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
**kwargs: Additional parameters.
These keyword arguments will be passed to the initialization
function of the chosen class.
Returns:
ray.tune.suggest.Searcher: The search algorithm.
Example:
>>> search_alg = tune.create_searcher('ax')
"""

def _import_ax_search():
from ray.tune.suggest.ax import AxSearch
return AxSearch

def _import_dragonfly_search():
from ray.tune.suggest.dragonfly import DragonflySearch
return DragonflySearch

def _import_skopt_search():
from ray.tune.suggest.skopt import SkOptSearch
return SkOptSearch

def _import_hyperopt_search():
from ray.tune.suggest.hyperopt import HyperOptSearch
return HyperOptSearch

def _import_bayesopt_search():
from ray.tune.suggest.bayesopt import BayesOptSearch
return BayesOptSearch

def _import_bohb_search():
from ray.tune.suggest.bohb import TuneBOHB
return TuneBOHB

def _import_nevergrad_search():
from ray.tune.suggest.nevergrad import NevergradSearch
return NevergradSearch

def _import_optuna_search():
from ray.tune.suggest.optuna import OptunaSearch
return OptunaSearch

def _import_zoopt_search():
from ray.tune.suggest.zoopt import ZOOptSearch
return ZOOptSearch

def _import_sigopt_search():
from ray.tune.suggest.sigopt import SigOptSearch
return SigOptSearch

SEARCH_ALG_IMPORT = {
"ax": _import_ax_search,
"dragonfly": _import_dragonfly_search,
"skopt": _import_skopt_search,
"hyperopt": _import_hyperopt_search,
"bayesopt": _import_bayesopt_search,
"bohb": _import_bohb_search,
"nevergrad": _import_nevergrad_search,
"optuna": _import_optuna_search,
"zoopt": _import_zoopt_search,
"sigopt": _import_sigopt_search,
}
search_alg = search_alg.lower()
if search_alg not in SEARCH_ALG_IMPORT:
raise ValueError(
f"Search alg must be one of {list(SEARCH_ALG_IMPORT)}. "
f"Got: {search_alg}")

SearcherClass = SEARCH_ALG_IMPORT[search_alg]()
return SearcherClass(metric=metric, mode=mode, **kwargs)


__all__ = [
"SearchAlgorithm", "Searcher", "BasicVariantGenerator", "SearchGenerator",
"grid_search", "Repeater", "ConcurrencyLimiter"
Expand Down
29 changes: 28 additions & 1 deletion python/ray/tune/tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
from ray.tune import (DurableTrainable, Trainable, TuneError, Stopper,
EarlyStopping)
from ray.tune import register_env, register_trainable, run_experiments
from ray.tune.schedulers import TrialScheduler, FIFOScheduler
from ray.tune.schedulers import (TrialScheduler, FIFOScheduler,
AsyncHyperBandScheduler)
from ray.tune.trial import Trial
from ray.tune.result import (TIMESTEPS_TOTAL, DONE, HOSTNAME, NODE_IP, PID,
EPISODES_TOTAL, TRAINING_ITERATION,
Expand All @@ -24,6 +25,8 @@
from ray.tune.experiment import Experiment
from ray.tune.resources import Resources
from ray.tune.suggest import grid_search
from ray.tune.suggest.hyperopt import HyperOptSearch
from ray.tune.suggest.ax import AxSearch
from ray.tune.suggest._mock import _MockSuggestionAlgorithm
from ray.tune.utils import (flatten_dict, get_pinned_object,
pin_in_object_store)
Expand Down Expand Up @@ -1105,6 +1108,30 @@ def train(config, reporter):
self.assertIn("LOG_STDERR", content)


class ShimCreationTest(unittest.TestCase):
def testCreateScheduler(self):
kwargs = {"metric": "metric_foo", "mode": "min"}

scheduler = "async_hyperband"
shim_scheduler = tune.create_scheduler(scheduler, **kwargs)
real_scheduler = AsyncHyperBandScheduler(**kwargs)
assert type(shim_scheduler) is type(real_scheduler)

def testCreateSearcher(self):
kwargs = {"metric": "metric_foo", "mode": "min"}

searcher_ax = "ax"
shim_searcher_ax = tune.create_searcher(searcher_ax, **kwargs)
real_searcher_ax = AxSearch(space=[], **kwargs)
assert type(shim_searcher_ax) is type(real_searcher_ax)

searcher_hyperopt = "hyperopt"
shim_searcher_hyperopt = tune.create_searcher(searcher_hyperopt,
**kwargs)
real_searcher_hyperopt = HyperOptSearch({}, **kwargs)
assert type(shim_searcher_hyperopt) is type(real_searcher_hyperopt)


if __name__ == "__main__":
import pytest
import sys
Expand Down