mirror of
https://github.com/vale981/ray
synced 2025-03-06 10:31:39 -05:00
WIP: Update to support the Food Collector environment (#11373)
* Update to support the Food Collector environment Recently, I am trying out ML Agent with Ray, and trying to use the food collector environment. Since the observation space and action space haven't defined in the unity3d_env.py. I propose to make this changes to add the support for Food Collector. I have tried to use this env in the [unity3d_env_local example](https://github.com/ray-project/ray/blob/master/rllib/examples/unity3d_env_local.py). Please let me know if this the proper adjustment. Even these are just few line of code, please let me know how can I made a proper contribution. * Apply suggestions from code review
This commit is contained in:
parent
66605cfcbd
commit
9073e6507c
1 changed files with 7 additions and 0 deletions
7
rllib/env/unity3d_env.py
vendored
7
rllib/env/unity3d_env.py
vendored
|
@ -223,6 +223,11 @@ class Unity3DEnv(MultiAgentEnv):
|
|||
"VisualHallway": Box(float("-inf"), float("inf"), (84, 84, 3)),
|
||||
# Walker.
|
||||
"Walker": Box(float("-inf"), float("inf"), (212, )),
|
||||
# FoodCollector.
|
||||
"FoodCollector": TupleSpace([
|
||||
Box(float("-inf"), float("inf"), (49, )),
|
||||
Box(float("-inf"), float("inf"), (4, )),
|
||||
]),
|
||||
}
|
||||
action_spaces = {
|
||||
# 3DBall.
|
||||
|
@ -242,6 +247,8 @@ class Unity3DEnv(MultiAgentEnv):
|
|||
"VisualHallway": MultiDiscrete([5]),
|
||||
# Walker.
|
||||
"Walker": Box(float("-inf"), float("inf"), (39, )),
|
||||
# FoodCollector.
|
||||
"FoodCollector": MultiDiscrete([3, 3, 3, 2]),
|
||||
}
|
||||
|
||||
# Policies (Unity: "behaviors") and agent-to-policy mapping fns.
|
||||
|
|
Loading…
Add table
Reference in a new issue