
 Outputs:
  **out**: output tensor with the same shape as `data`.
 """

 def __init__(self, alpha=1.0, **kwargs):
 super(ELU, self).__init__(**kwargs)
 self._alpha = alpha

 def hybrid_forward(self, F, x):
 return F.LeakyReLU(x, act_type='elu', slope=self._alpha)


 class SELU(HybridBlock):
 r"""
 Scaled Exponential Linear Unit (SELU)
 "SelfNormalizing Neural Networks", Klambauer et al, 2017
 https://arxiv.org/abs/1706.02515


 Inputs: