mirror of
https://github.com/vale981/ray
synced 2025-03-06 10:31:39 -05:00

This resolves the `TODO(ekl): add custom resources here once tune supports them` item. Also, related to the discussion [here](https://discuss.ray.io/t/reserve-workers-on-gpu-node-for-trainer-workers-only/5972/5). Co-authored-by: Kai Fricke <kai@anyscale.com>
39 lines
745 B
Python
39 lines
745 B
Python
import pytest
|
|
|
|
import ray
|
|
from ray import tune
|
|
|
|
|
|
@pytest.mark.parametrize("algorithm", ["PPO", "APEX", "IMPALA"])
|
|
def test_custom_resource(algorithm):
|
|
if ray.is_initialized:
|
|
ray.shutdown()
|
|
|
|
ray.init(
|
|
resources={"custom_resource": 1},
|
|
include_dashboard=False,
|
|
)
|
|
|
|
config = {
|
|
"env": "CartPole-v0",
|
|
"num_workers": 1,
|
|
"num_gpus": 0,
|
|
"framework": "torch",
|
|
"custom_resources_per_worker": {"custom_resource": 0.01},
|
|
}
|
|
|
|
stop = {"training_iteration": 1}
|
|
|
|
tune.run(
|
|
algorithm,
|
|
config=config,
|
|
stop=stop,
|
|
num_samples=1,
|
|
verbose=0,
|
|
)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import sys
|
|
|
|
sys.exit(pytest.main(["-v", __file__]))
|