2020-02-10 09:37:39 -08:00
|
|
|
import time
|
|
|
|
import subprocess
|
|
|
|
from subprocess import PIPE
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
import ray
|
2020-02-18 13:43:19 -08:00
|
|
|
from ray import serve
|
2020-02-10 09:37:39 -08:00
|
|
|
from ray.cluster_utils import Cluster
|
|
|
|
|
|
|
|
num_redis_shards = 1
|
|
|
|
redis_max_memory = 10**8
|
|
|
|
object_store_memory = 10**8
|
2021-01-07 08:57:24 -08:00
|
|
|
num_nodes = 4
|
2020-02-10 09:37:39 -08:00
|
|
|
cluster = Cluster()
|
|
|
|
for i in range(num_nodes):
|
|
|
|
cluster.add_node(
|
|
|
|
redis_port=6379 if i == 0 else None,
|
|
|
|
num_redis_shards=num_redis_shards if i == 0 else None,
|
|
|
|
num_cpus=8,
|
|
|
|
num_gpus=0,
|
|
|
|
resources={str(i): 2},
|
|
|
|
object_store_memory=object_store_memory,
|
|
|
|
redis_max_memory=redis_max_memory,
|
2020-12-11 11:53:47 -08:00
|
|
|
dashboard_host="0.0.0.0",
|
|
|
|
)
|
2020-02-10 09:37:39 -08:00
|
|
|
|
2020-06-19 14:26:22 -07:00
|
|
|
ray.init(address=cluster.address, dashboard_host="0.0.0.0")
|
2020-09-04 12:02:23 -05:00
|
|
|
client = serve.start()
|
2020-02-10 09:37:39 -08:00
|
|
|
|
|
|
|
|
|
|
|
@serve.accept_batch
|
2020-12-11 11:53:47 -08:00
|
|
|
def echo(requests_batch):
|
2020-02-10 09:37:39 -08:00
|
|
|
time.sleep(0.01) # Sleep for 10ms
|
2020-12-11 11:53:47 -08:00
|
|
|
return ["hi" for _ in range(len(requests_batch))]
|
2020-02-10 09:37:39 -08:00
|
|
|
|
|
|
|
|
2020-12-11 11:53:47 -08:00
|
|
|
config = {"num_replicas": 7, "max_batch_size": 16}
|
2020-09-04 12:02:23 -05:00
|
|
|
client.create_backend("echo:v1", echo, config=config)
|
|
|
|
client.create_endpoint("echo", backend="echo:v1", route="/echo")
|
2020-02-10 09:37:39 -08:00
|
|
|
|
|
|
|
print("Warming up")
|
|
|
|
for _ in range(5):
|
2020-04-29 09:32:39 -07:00
|
|
|
resp = requests.get("http://127.0.0.1:8000/echo").text
|
2020-02-10 09:37:39 -08:00
|
|
|
print(resp)
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
2020-05-01 11:54:27 -05:00
|
|
|
connections = int(config["num_replicas"] * config["max_batch_size"] * 0.75)
|
2020-10-07 10:57:40 -07:00
|
|
|
num_threads = 2
|
|
|
|
time_to_run = "60m"
|
2020-02-24 21:18:53 -08:00
|
|
|
|
|
|
|
while True:
|
|
|
|
proc = subprocess.Popen(
|
|
|
|
[
|
2020-12-11 11:53:47 -08:00
|
|
|
"wrk",
|
|
|
|
"-c",
|
|
|
|
str(connections),
|
|
|
|
"-t",
|
|
|
|
str(num_threads),
|
|
|
|
"-d",
|
|
|
|
time_to_run,
|
|
|
|
"http://127.0.0.1:8000/echo",
|
2020-02-24 21:18:53 -08:00
|
|
|
],
|
|
|
|
stdout=PIPE,
|
2020-12-11 11:53:47 -08:00
|
|
|
stderr=PIPE,
|
|
|
|
)
|
2020-02-24 21:18:53 -08:00
|
|
|
print("started load testing")
|
|
|
|
proc.wait()
|
|
|
|
out, err = proc.communicate()
|
|
|
|
print(out.decode())
|
|
|
|
print(err.decode())
|