mirror of
https://github.com/vale981/ray
synced 2025-03-06 02:21:39 -05:00
[minor][tune] update pbt docs (#5420)
This commit is contained in:
parent
b1e010feec
commit
cff72d1a54
2 changed files with 3 additions and 3 deletions
|
@ -97,7 +97,7 @@ class RayTrialExecutor(TrialExecutor):
|
|||
trial.runner = existing_runner
|
||||
if not self.reset_trial(trial, trial.config, trial.experiment_tag):
|
||||
raise AbortTrialExecution(
|
||||
"Trial runner reuse requires reset_trial() to be "
|
||||
"Trainable runner reuse requires reset_config() to be "
|
||||
"implemented and return True.")
|
||||
return existing_runner
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ class PopulationBasedTraining(FIFOScheduler):
|
|||
This Tune PBT implementation considers all trials added as part of the
|
||||
PBT population. If the number of trials exceeds the cluster capacity,
|
||||
they will be time-multiplexed as to balance training progress across the
|
||||
population.
|
||||
population. To run multiple trials, use `tune.run(num_samples=<int>)`.
|
||||
|
||||
Args:
|
||||
time_attr (str): The training result attr to use for comparing time.
|
||||
|
@ -162,7 +162,7 @@ class PopulationBasedTraining(FIFOScheduler):
|
|||
>>> # 10 -> 1 or 10 -> 100. Resampling will choose at random.
|
||||
>>> "factor_2": [1, 10, 100, 1000, 10000],
|
||||
>>> })
|
||||
>>> run_experiments({...}, scheduler=pbt)
|
||||
>>> tune.run({...}, num_samples=8, scheduler=pbt)
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
|
|
Loading…
Add table
Reference in a new issue