mirror of
https://github.com/vale981/ray
synced 2025-03-09 12:56:46 -04:00
37 lines
1.2 KiB
Python
37 lines
1.2 KiB
Python
from ray.rllib.utils.framework import try_import_torch
|
|
|
|
torch, nn = try_import_torch()
|
|
|
|
|
|
class SkipConnection(nn.Module):
|
|
"""Skip connection layer.
|
|
|
|
Adds the original input to the output (regular residual layer) OR uses
|
|
input as hidden state input to a given fan_in_layer.
|
|
"""
|
|
|
|
def __init__(self, layer, fan_in_layer=None, add_memory=False, **kwargs):
|
|
"""Initializes a SkipConnection nn Module object.
|
|
|
|
Args:
|
|
layer (nn.Module): Any layer processing inputs.
|
|
fan_in_layer (Optional[nn.Module]): An optional
|
|
layer taking two inputs: The original input and the output
|
|
of `layer`.
|
|
"""
|
|
super().__init__(**kwargs)
|
|
self._layer = layer
|
|
self._fan_in_layer = fan_in_layer
|
|
|
|
def forward(self, inputs, **kwargs):
|
|
# del kwargs
|
|
outputs = self._layer(inputs, **kwargs)
|
|
# Residual case, just add inputs to outputs.
|
|
if self._fan_in_layer is None:
|
|
outputs = outputs + inputs
|
|
# Fan-in e.g. RNN: Call fan-in with `inputs` and `outputs`.
|
|
else:
|
|
# NOTE: In the GRU case, `inputs` is the state input.
|
|
outputs = self._fan_in_layer((inputs, outputs))
|
|
|
|
return outputs
|