2020-07-27 11:34:47 +08:00
|
|
|
import sys
|
2020-08-25 04:24:23 +08:00
|
|
|
import socket
|
|
|
|
import json
|
2020-07-27 11:34:47 +08:00
|
|
|
import asyncio
|
|
|
|
import logging
|
|
|
|
|
|
|
|
import aiohttp
|
2020-08-25 04:24:23 +08:00
|
|
|
import aiohttp.web
|
|
|
|
from aiohttp import hdrs
|
2020-07-27 11:34:47 +08:00
|
|
|
from grpc.experimental import aio as aiogrpc
|
|
|
|
|
2020-09-24 22:46:35 -07:00
|
|
|
import ray._private.services
|
2020-07-27 11:34:47 +08:00
|
|
|
import ray.new_dashboard.consts as dashboard_consts
|
|
|
|
import ray.new_dashboard.utils as dashboard_utils
|
2021-02-24 08:27:48 +08:00
|
|
|
from ray import ray_constants
|
2020-07-27 11:34:47 +08:00
|
|
|
from ray.core.generated import gcs_service_pb2
|
|
|
|
from ray.core.generated import gcs_service_pb2_grpc
|
|
|
|
from ray.new_dashboard.datacenter import DataSource, DataOrganizer
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
routes = dashboard_utils.ClassMethodRouteTable
|
|
|
|
|
|
|
|
aiogrpc.init_grpc_aio()
|
|
|
|
|
|
|
|
|
|
|
|
def gcs_node_info_to_dict(message):
|
|
|
|
return dashboard_utils.message_to_dict(
|
|
|
|
message, {"nodeId"}, including_default_value_fields=True)
|
|
|
|
|
|
|
|
|
|
|
|
class DashboardHead:
|
2021-02-24 08:27:48 +08:00
|
|
|
def __init__(self, http_host, http_port, http_port_retries, redis_address,
|
|
|
|
redis_password, log_dir):
|
2020-07-27 11:34:47 +08:00
|
|
|
# NodeInfoGcsService
|
|
|
|
self._gcs_node_info_stub = None
|
|
|
|
self._gcs_rpc_error_counter = 0
|
|
|
|
# Public attributes are accessible for all head modules.
|
2021-02-24 08:27:48 +08:00
|
|
|
# Walkaround for issue: https://github.com/ray-project/ray/issues/7084
|
|
|
|
self.http_host = "127.0.0.1" if http_host == "localhost" else http_host
|
2020-08-25 04:24:23 +08:00
|
|
|
self.http_port = http_port
|
2021-02-24 08:27:48 +08:00
|
|
|
self.http_port_retries = http_port_retries
|
2020-08-25 04:24:23 +08:00
|
|
|
self.redis_address = dashboard_utils.address_tuple(redis_address)
|
2020-07-27 11:34:47 +08:00
|
|
|
self.redis_password = redis_password
|
2020-08-30 14:09:34 +08:00
|
|
|
self.log_dir = log_dir
|
2020-07-27 11:34:47 +08:00
|
|
|
self.aioredis_client = None
|
|
|
|
self.aiogrpc_gcs_channel = None
|
2020-08-25 04:24:23 +08:00
|
|
|
self.http_session = None
|
2021-03-18 13:10:57 -07:00
|
|
|
self.ip = ray.util.get_node_ip_address()
|
2020-08-25 04:24:23 +08:00
|
|
|
self.server = aiogrpc.server(options=(("grpc.so_reuseport", 0), ))
|
|
|
|
self.grpc_port = self.server.add_insecure_port("[::]:0")
|
|
|
|
logger.info("Dashboard head grpc address: %s:%s", self.ip,
|
|
|
|
self.grpc_port)
|
2020-07-27 11:34:47 +08:00
|
|
|
|
|
|
|
async def _get_nodes(self):
|
|
|
|
"""Read the client table.
|
|
|
|
|
|
|
|
Returns:
|
2020-09-17 01:17:29 +08:00
|
|
|
A dict of information about the nodes in the cluster.
|
2020-07-27 11:34:47 +08:00
|
|
|
"""
|
|
|
|
request = gcs_service_pb2.GetAllNodeInfoRequest()
|
|
|
|
reply = await self._gcs_node_info_stub.GetAllNodeInfo(
|
|
|
|
request, timeout=2)
|
|
|
|
if reply.status.code == 0:
|
2020-09-17 01:17:29 +08:00
|
|
|
result = {}
|
2020-07-27 11:34:47 +08:00
|
|
|
for node_info in reply.node_info_list:
|
|
|
|
node_info_dict = gcs_node_info_to_dict(node_info)
|
2020-09-17 01:17:29 +08:00
|
|
|
result[node_info_dict["nodeId"]] = node_info_dict
|
|
|
|
return result
|
2020-07-27 11:34:47 +08:00
|
|
|
else:
|
|
|
|
logger.error("Failed to GetAllNodeInfo: %s", reply.status.message)
|
|
|
|
|
|
|
|
async def _update_nodes(self):
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
nodes = await self._get_nodes()
|
2020-08-30 14:09:34 +08:00
|
|
|
|
2020-09-17 01:17:29 +08:00
|
|
|
alive_node_ids = []
|
|
|
|
alive_node_infos = []
|
|
|
|
node_id_to_ip = {}
|
|
|
|
node_id_to_hostname = {}
|
|
|
|
for node in nodes.values():
|
|
|
|
node_id = node["nodeId"]
|
|
|
|
ip = node["nodeManagerAddress"]
|
|
|
|
hostname = node["nodeManagerHostname"]
|
|
|
|
node_id_to_ip[node_id] = ip
|
|
|
|
node_id_to_hostname[node_id] = hostname
|
2020-08-30 14:09:34 +08:00
|
|
|
assert node["state"] in ["ALIVE", "DEAD"]
|
2020-09-17 01:17:29 +08:00
|
|
|
if node["state"] == "ALIVE":
|
|
|
|
alive_node_ids.append(node_id)
|
|
|
|
alive_node_infos.append(node)
|
2020-07-27 11:34:47 +08:00
|
|
|
|
|
|
|
agents = dict(DataSource.agents)
|
2020-09-17 01:17:29 +08:00
|
|
|
for node_id in alive_node_ids:
|
|
|
|
key = f"{dashboard_consts.DASHBOARD_AGENT_PORT_PREFIX}" \
|
|
|
|
f"{node_id}"
|
2020-08-30 14:09:34 +08:00
|
|
|
agent_port = await self.aioredis_client.get(key)
|
|
|
|
if agent_port:
|
2020-09-17 01:17:29 +08:00
|
|
|
agents[node_id] = json.loads(agent_port)
|
|
|
|
for node_id in agents.keys() - set(alive_node_ids):
|
|
|
|
agents.pop(node_id, None)
|
2020-07-27 11:34:47 +08:00
|
|
|
|
2020-09-17 01:17:29 +08:00
|
|
|
DataSource.node_id_to_ip.reset(node_id_to_ip)
|
|
|
|
DataSource.node_id_to_hostname.reset(node_id_to_hostname)
|
2020-07-27 11:34:47 +08:00
|
|
|
DataSource.agents.reset(agents)
|
2020-09-17 01:17:29 +08:00
|
|
|
DataSource.nodes.reset(nodes)
|
|
|
|
|
|
|
|
self._gcs_rpc_error_counter = 0
|
2020-08-30 14:09:34 +08:00
|
|
|
except aiogrpc.AioRpcError:
|
|
|
|
logger.exception("Got AioRpcError when updating nodes.")
|
2020-07-27 11:34:47 +08:00
|
|
|
self._gcs_rpc_error_counter += 1
|
|
|
|
if self._gcs_rpc_error_counter > \
|
|
|
|
dashboard_consts.MAX_COUNT_OF_GCS_RPC_ERROR:
|
|
|
|
logger.error(
|
|
|
|
"Dashboard suicide, the GCS RPC error count %s > %s",
|
|
|
|
self._gcs_rpc_error_counter,
|
|
|
|
dashboard_consts.MAX_COUNT_OF_GCS_RPC_ERROR)
|
|
|
|
sys.exit(-1)
|
2020-08-30 14:09:34 +08:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Error updating nodes.")
|
2020-07-27 11:34:47 +08:00
|
|
|
finally:
|
|
|
|
await asyncio.sleep(
|
|
|
|
dashboard_consts.UPDATE_NODES_INTERVAL_SECONDS)
|
|
|
|
|
|
|
|
def _load_modules(self):
|
|
|
|
"""Load dashboard head modules."""
|
|
|
|
modules = []
|
2020-08-25 04:24:23 +08:00
|
|
|
head_cls_list = dashboard_utils.get_all_modules(
|
|
|
|
dashboard_utils.DashboardHeadModule)
|
|
|
|
for cls in head_cls_list:
|
|
|
|
logger.info("Loading %s: %s",
|
2020-07-27 11:34:47 +08:00
|
|
|
dashboard_utils.DashboardHeadModule.__name__, cls)
|
|
|
|
c = cls(self)
|
|
|
|
dashboard_utils.ClassMethodRouteTable.bind(c)
|
|
|
|
modules.append(c)
|
2020-08-30 14:09:34 +08:00
|
|
|
logger.info("Loaded %d modules.", len(modules))
|
2020-07-27 11:34:47 +08:00
|
|
|
return modules
|
|
|
|
|
|
|
|
async def run(self):
|
|
|
|
# Create an aioredis client for all modules.
|
2020-08-25 04:24:23 +08:00
|
|
|
try:
|
|
|
|
self.aioredis_client = await dashboard_utils.get_aioredis_client(
|
|
|
|
self.redis_address, self.redis_password,
|
|
|
|
dashboard_consts.CONNECT_REDIS_INTERNAL_SECONDS,
|
|
|
|
dashboard_consts.RETRY_REDIS_CONNECTION_TIMES)
|
|
|
|
except (socket.gaierror, ConnectionError):
|
|
|
|
logger.error(
|
|
|
|
"Dashboard head exiting: "
|
|
|
|
"Failed to connect to redis at %s", self.redis_address)
|
|
|
|
sys.exit(-1)
|
|
|
|
|
|
|
|
# Create a http session for all modules.
|
|
|
|
self.http_session = aiohttp.ClientSession(
|
|
|
|
loop=asyncio.get_event_loop())
|
|
|
|
|
2020-07-27 11:34:47 +08:00
|
|
|
# Waiting for GCS is ready.
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
gcs_address = await self.aioredis_client.get(
|
|
|
|
dashboard_consts.REDIS_KEY_GCS_SERVER_ADDRESS)
|
|
|
|
if not gcs_address:
|
|
|
|
raise Exception("GCS address not found.")
|
|
|
|
logger.info("Connect to GCS at %s", gcs_address)
|
2021-01-22 12:10:01 +08:00
|
|
|
options = (("grpc.enable_http_proxy", 0), )
|
|
|
|
channel = aiogrpc.insecure_channel(
|
|
|
|
gcs_address, options=options)
|
2020-07-27 11:34:47 +08:00
|
|
|
except Exception as ex:
|
|
|
|
logger.error("Connect to GCS failed: %s, retry...", ex)
|
|
|
|
await asyncio.sleep(
|
|
|
|
dashboard_consts.CONNECT_GCS_INTERVAL_SECONDS)
|
|
|
|
else:
|
|
|
|
self.aiogrpc_gcs_channel = channel
|
|
|
|
break
|
2020-08-25 04:24:23 +08:00
|
|
|
|
2020-07-27 11:34:47 +08:00
|
|
|
# Create a NodeInfoGcsServiceStub.
|
|
|
|
self._gcs_node_info_stub = gcs_service_pb2_grpc.NodeInfoGcsServiceStub(
|
|
|
|
self.aiogrpc_gcs_channel)
|
|
|
|
|
2020-08-25 04:24:23 +08:00
|
|
|
# Start a grpc asyncio server.
|
|
|
|
await self.server.start()
|
|
|
|
|
2020-07-27 11:34:47 +08:00
|
|
|
async def _async_notify():
|
|
|
|
"""Notify signals from queue."""
|
|
|
|
while True:
|
|
|
|
co = await dashboard_utils.NotifyQueue.get()
|
|
|
|
try:
|
|
|
|
await co
|
2020-08-30 14:09:34 +08:00
|
|
|
except Exception:
|
|
|
|
logger.exception(f"Error notifying coroutine {co}")
|
2020-07-27 11:34:47 +08:00
|
|
|
|
|
|
|
modules = self._load_modules()
|
2020-08-25 04:24:23 +08:00
|
|
|
|
|
|
|
# Http server should be initialized after all modules loaded.
|
|
|
|
app = aiohttp.web.Application()
|
|
|
|
app.add_routes(routes=routes.bound_routes())
|
2021-02-24 08:27:48 +08:00
|
|
|
|
|
|
|
runner = aiohttp.web.AppRunner(app)
|
|
|
|
await runner.setup()
|
|
|
|
last_ex = None
|
|
|
|
for i in range(1 + self.http_port_retries):
|
|
|
|
try:
|
|
|
|
site = aiohttp.web.TCPSite(runner, self.http_host,
|
|
|
|
self.http_port)
|
|
|
|
await site.start()
|
|
|
|
break
|
|
|
|
except OSError as e:
|
|
|
|
last_ex = e
|
|
|
|
self.http_port += 1
|
|
|
|
logger.warning("Try to use port %s: %s", self.http_port, e)
|
|
|
|
else:
|
|
|
|
raise Exception(f"Failed to find a valid port for dashboard after "
|
|
|
|
f"{self.http_port_retries} retries: {last_ex}")
|
|
|
|
http_host, http_port, *_ = site._server.sockets[0].getsockname()
|
|
|
|
logger.info("Dashboard head http address: %s:%s", http_host, http_port)
|
|
|
|
|
|
|
|
# Write the dashboard head port to redis.
|
|
|
|
await self.aioredis_client.set(ray_constants.REDIS_KEY_DASHBOARD,
|
|
|
|
f"{http_host}:{http_port}")
|
|
|
|
await self.aioredis_client.set(
|
|
|
|
dashboard_consts.REDIS_KEY_DASHBOARD_RPC,
|
|
|
|
f"{self.ip}:{self.grpc_port}")
|
2020-08-25 04:24:23 +08:00
|
|
|
|
|
|
|
# Dump registered http routes.
|
|
|
|
dump_routes = [
|
|
|
|
r for r in app.router.routes() if r.method != hdrs.METH_HEAD
|
|
|
|
]
|
|
|
|
for r in dump_routes:
|
|
|
|
logger.info(r)
|
|
|
|
logger.info("Registered %s routes.", len(dump_routes))
|
|
|
|
|
2020-07-27 11:34:47 +08:00
|
|
|
# Freeze signal after all modules loaded.
|
|
|
|
dashboard_utils.SignalManager.freeze()
|
2020-10-28 14:49:31 +08:00
|
|
|
concurrent_tasks = [
|
|
|
|
self._update_nodes(),
|
|
|
|
_async_notify(),
|
|
|
|
DataOrganizer.purge(),
|
|
|
|
DataOrganizer.organize(),
|
|
|
|
]
|
|
|
|
await asyncio.gather(*concurrent_tasks,
|
2020-08-25 04:24:23 +08:00
|
|
|
*(m.run(self.server) for m in modules))
|
|
|
|
await self.server.wait_for_termination()
|