ray/rllib/tests/test_custom_resource.py
Vince Jankovics 68444cd390
[tune] Custom resources per worker added to default_resource_request (#24463)
This resolves the `TODO(ekl): add custom resources here once tune supports them` item. 
Also, related to the discussion [here](https://discuss.ray.io/t/reserve-workers-on-gpu-node-for-trainer-workers-only/5972/5).

Co-authored-by: Kai Fricke <kai@anyscale.com>
2022-06-06 22:41:02 +01:00

39 lines
745 B
Python

import pytest
import ray
from ray import tune
@pytest.mark.parametrize("algorithm", ["PPO", "APEX", "IMPALA"])
def test_custom_resource(algorithm):
if ray.is_initialized:
ray.shutdown()
ray.init(
resources={"custom_resource": 1},
include_dashboard=False,
)
config = {
"env": "CartPole-v0",
"num_workers": 1,
"num_gpus": 0,
"framework": "torch",
"custom_resources_per_worker": {"custom_resource": 0.01},
}
stop = {"training_iteration": 1}
tune.run(
algorithm,
config=config,
stop=stop,
num_samples=1,
verbose=0,
)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", __file__]))