2021-06-18 11:40:04 +01:00
|
|
|
"""Small cluster training
|
|
|
|
|
|
|
|
This training run will start 4 workers on 4 nodes (including head node).
|
|
|
|
|
|
|
|
Test owner: krfricke
|
|
|
|
|
|
|
|
Acceptance criteria: Should run through and report final results.
|
|
|
|
"""
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
|
|
|
|
import ray
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2021-11-02 16:40:48 +01:00
|
|
|
os.environ["RXGB_PLACEMENT_GROUP_TIMEOUT_S"] = "1200"
|
|
|
|
|
2021-06-18 11:40:04 +01:00
|
|
|
addr = os.environ.get("RAY_ADDRESS")
|
2021-11-02 16:40:48 +01:00
|
|
|
job_name = os.environ.get("RAY_JOB_NAME", "train_gpu_connect")
|
2021-11-24 11:27:17 +01:00
|
|
|
|
2022-04-20 15:18:22 -07:00
|
|
|
# Manually set NCCL_SOCKET_IFNAME to "ens3" so NCCL training works on
|
|
|
|
# anyscale_default_cloud.
|
|
|
|
# See https://github.com/pytorch/pytorch/issues/68893 for more details.
|
|
|
|
# Passing in runtime_env to ray.init() will also set it for all the
|
|
|
|
# workers.
|
|
|
|
runtime_env = {
|
|
|
|
"env_vars": {
|
|
|
|
"RXGB_PLACEMENT_GROUP_TIMEOUT_S": "1200",
|
|
|
|
"NCCL_SOCKET_IFNAME": "ens3",
|
2022-07-27 06:24:19 -07:00
|
|
|
},
|
|
|
|
"working_dir": os.path.dirname(__file__),
|
2022-04-20 15:18:22 -07:00
|
|
|
}
|
2021-11-24 11:27:17 +01:00
|
|
|
|
2021-06-18 11:40:04 +01:00
|
|
|
if addr.startswith("anyscale://"):
|
2021-11-24 11:27:17 +01:00
|
|
|
ray.init(address=addr, job_name=job_name, runtime_env=runtime_env)
|
2021-06-18 11:40:04 +01:00
|
|
|
else:
|
2022-04-20 15:18:22 -07:00
|
|
|
ray.init(address="auto", runtime_env=runtime_env)
|
2021-06-18 11:40:04 +01:00
|
|
|
|
2021-11-02 16:40:48 +01:00
|
|
|
from xgboost_ray import RayParams
|
2022-07-27 06:24:19 -07:00
|
|
|
from release_test_util import train_ray, get_parquet_files
|
2021-11-02 16:40:48 +01:00
|
|
|
|
2021-06-18 11:40:04 +01:00
|
|
|
ray_params = RayParams(
|
|
|
|
elastic_training=False,
|
|
|
|
max_actor_restarts=2,
|
|
|
|
num_actors=4,
|
|
|
|
cpus_per_actor=4,
|
2021-11-02 16:40:48 +01:00
|
|
|
gpus_per_actor=1,
|
|
|
|
)
|
2021-06-18 11:40:04 +01:00
|
|
|
|
|
|
|
@ray.remote
|
2021-11-24 11:27:17 +01:00
|
|
|
def ray_get_parquet_files():
|
|
|
|
return get_parquet_files(
|
2021-06-18 11:40:04 +01:00
|
|
|
path="/data/classification.parquet",
|
|
|
|
num_files=25,
|
|
|
|
)
|
|
|
|
|
|
|
|
start = time.time()
|
2021-11-24 11:27:17 +01:00
|
|
|
train_ray(
|
|
|
|
path=ray.get(ray_get_parquet_files.remote()),
|
|
|
|
num_workers=4,
|
|
|
|
num_boost_rounds=100,
|
|
|
|
regression=False,
|
|
|
|
use_gpu=True,
|
|
|
|
ray_params=ray_params,
|
|
|
|
xgboost_params=None,
|
|
|
|
)
|
2021-06-18 11:40:04 +01:00
|
|
|
taken = time.time() - start
|
|
|
|
|
|
|
|
result = {
|
|
|
|
"time_taken": taken,
|
|
|
|
}
|
|
|
|
test_output_json = os.environ.get("TEST_OUTPUT_JSON", "/tmp/train_gpu_connect.json")
|
|
|
|
with open(test_output_json, "wt") as f:
|
|
|
|
json.dump(result, f)
|
|
|
|
|
|
|
|
print("PASSED.")
|