ray/rllib/algorithms/ppo/__init__.py

12 lines
346 B
Python

from ray.rllib.algorithms.ppo.ppo import PPOConfig, PPO, DEFAULT_CONFIG
from ray.rllib.algorithms.ppo.ppo_tf_policy import PPOTF1Policy, PPOTF2Policy
from ray.rllib.algorithms.ppo.ppo_torch_policy import PPOTorchPolicy
__all__ = [
"PPOConfig",
"PPOTF1Policy",
"PPOTF2Policy",
"PPOTorchPolicy",
"PPO",
"DEFAULT_CONFIG",
]