mirror of
https://github.com/vale981/ray
synced 2025-03-07 02:51:39 -05:00

* Remove all __future__ imports from RLlib. * Remove (object) again from tf_run_builder.py::TFRunBuilder. * Fix 2xLINT warnings. * Fix broken appo_policy import (must be appo_tf_policy) * Remove future imports from all other ray files (not just RLlib). * Remove future imports from all other ray files (not just RLlib). * Remove future import blocks that contain `unicode_literals` as well. Revert appo_tf_policy.py to appo_policy.py (belongs to another PR). * Add two empty lines before Schedule class. * Put back __future__ imports into determine_tests_to_run.py. Fails otherwise on a py2/print related error.
47 lines
1.3 KiB
Python
47 lines
1.3 KiB
Python
# This workload tests running IMPALA with remote envs
|
|
|
|
import ray
|
|
from ray.tune import run_experiments
|
|
from ray.cluster_utils import Cluster
|
|
|
|
num_redis_shards = 5
|
|
redis_max_memory = 10**8
|
|
object_store_memory = 10**8
|
|
num_nodes = 1
|
|
|
|
message = ("Make sure there is enough memory on this machine to run this "
|
|
"workload. We divide the system memory by 2 to provide a buffer.")
|
|
assert (num_nodes * object_store_memory + num_redis_shards * redis_max_memory <
|
|
ray.utils.get_system_memory() / 2), message
|
|
|
|
# Simulate a cluster on one machine.
|
|
|
|
cluster = Cluster()
|
|
for i in range(num_nodes):
|
|
cluster.add_node(
|
|
redis_port=6379 if i == 0 else None,
|
|
num_redis_shards=num_redis_shards if i == 0 else None,
|
|
num_cpus=10,
|
|
num_gpus=0,
|
|
resources={str(i): 2},
|
|
object_store_memory=object_store_memory,
|
|
redis_max_memory=redis_max_memory)
|
|
ray.init(address=cluster.address)
|
|
|
|
# Run the workload.
|
|
|
|
run_experiments({
|
|
"impala": {
|
|
"run": "IMPALA",
|
|
"env": "CartPole-v0",
|
|
"config": {
|
|
"num_workers": 8,
|
|
"num_gpus": 0,
|
|
"num_envs_per_worker": 5,
|
|
"remote_worker_envs": True,
|
|
"remote_env_batch_wait_ms": 99999999,
|
|
"sample_batch_size": 50,
|
|
"train_batch_size": 100,
|
|
},
|
|
},
|
|
})
|