[RLlib] Fix multi agent environment checks for observations that contain only some agents' obs each step. (#25506)

This commit is contained in:
Artur Niederfahrenhorst 2022-06-07 10:33:35 +02:00 committed by GitHub
parent 35bd397181
commit 429d0f0eee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 22 additions and 8 deletions

View file

@ -132,7 +132,20 @@ class MultiAgentEnv(gym.Env):
self._check_if_space_maps_agent_id_to_sub_space()
)
if self._spaces_in_preferred_format:
return self.observation_space.contains(x)
for key, agent_obs in x.items():
if not self.observation_space[key].contains(agent_obs):
return False
if not all(k in self.observation_space for k in x):
if log_once("possibly_bad_multi_agent_dict_missing_agent_observations"):
logger.warning(
"You environment returns observations that are "
"MultiAgentDicts with incomplete information. "
"Meaning that they only contain information on a subset of"
" participating agents. Ignore this warning if this is "
"intended, for example if your environment is a turn-based "
"simulation."
)
return True
logger.warning("observation_space_contains() has not been implemented")
return True

View file

@ -60,9 +60,10 @@ def check_env(env: EnvType) -> None:
),
):
raise ValueError(
"Env must be one of the supported types: BaseEnv, gym.Env, "
"Env must be of one of the following supported types: BaseEnv, "
"gym.Env, "
"MultiAgentEnv, VectorEnv, RemoteBaseEnv, ExternalMultiAgentEnv, "
f"ExternalEnv, but instead was a {type(env)}"
f"ExternalEnv, but instead is of type {type(env)}."
)
if isinstance(env, MultiAgentEnv):
check_multiagent_environments(env)
@ -73,8 +74,8 @@ def check_env(env: EnvType) -> None:
else:
logger.warning(
"Env checking isn't implemented for VectorEnvs, RemoteBaseEnvs, "
"ExternalMultiAgentEnv,or ExternalEnvs or Environments that are "
"Ray actors"
"ExternalMultiAgentEnv, ExternalEnvs or environments that are "
"Ray actors."
)
except Exception:
actual_error = traceback.format_exc()
@ -84,7 +85,7 @@ def check_env(env: EnvType) -> None:
"We've added a module for checking your custom environments. It "
"may cause your experiment to fail if your environment is not set up"
"correctly. You can disable this behavior by setting "
"`disable_env_checking=True` in your config "
"`disable_env_checking=True` in your environment config "
"dictionary. You can run the environment checking module "
"standalone by calling ray.rllib.utils.check_env([env])."
)
@ -485,7 +486,7 @@ def _check_if_element_multi_agent_dict(env, element, function_string, base_env=F
if not isinstance(element, dict):
if base_env:
error = (
f"The element returned by {function_string} has values "
f"The element returned by {function_string} contains values "
f"that are not MultiAgentDicts. Instead, they are of "
f"type: {type(element)}"
)
@ -514,7 +515,7 @@ def _check_if_element_multi_agent_dict(env, element, function_string, base_env=F
f" that are not the names of the agents in the env. "
f"\nAgent_ids in this MultiAgentDict: "
f"{list(element.keys())}\nAgent_ids in this env:"
f"{list(env.get_agent_ids())}. You likley need to add the private "
f"{list(env.get_agent_ids())}. You likely need to add the private "
f"attribute `_agent_ids` to your env, which is a set containing the "
f"ids of agents supported by your env."
)