
    2Vhx                     \    d dl Z d dlmZ d dlmZ d dlmZ  ed       G d de             Zy)    N)activations)keras_export)Layerzkeras.layers.LeakyReLUc                   :     e Zd ZdZd fd	Zd Z fdZd Z xZS )	LeakyReLUa{  Leaky version of a Rectified Linear Unit activation layer.

    This layer allows a small gradient when the unit is not active.

    Formula:

    ``` python
    f(x) = alpha * x if x < 0
    f(x) = x if x >= 0
    ```

    Example:

    ``` python
    leaky_relu_layer = LeakyReLU(negative_slope=0.5)
    input = np.array([-10, -5, 0.0, 5, 10])
    result = leaky_relu_layer(input)
    # result = [-5. , -2.5,  0. ,  5. , 10.]
    ```

    Args:
        negative_slope: Float >= 0.0. Negative slope coefficient.
          Defaults to `0.3`.
        **kwargs: Base layer keyword arguments, such as
            `name` and `dtype`.

    c                     d|v r&|j                  d      }t        j                  d       t        |   di | ||dk  rt        d|       || _        d| _        | j                          y )Nalphaz=Argument `alpha` is deprecated. Use `negative_slope` instead.r   z|The negative_slope value of a Leaky ReLU layer cannot be None or negative value. Expected a float. Received: negative_slope=T )	popwarningswarnsuper__init__
ValueErrornegative_slopesupports_masking_build_at_init)selfr   kwargs	__class__s      W/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/layers/activations/leaky_relu.pyr   zLeakyReLU.__init__&   s    f#ZZ0NMMO 	"6"!^a%7--;,<> 
 - $    c                 D    t        j                  || j                        S )N)r   )r   
leaky_relur   )r   inputss     r   callzLeakyReLU.call8   s     %%4#6#6
 	
r   c                 ^    t         |          }|j                  d| j                  i       |S )Nr   )r   
get_configupdater   )r   configr   s     r   r   zLeakyReLU.get_config=   s-    #%')<)<=>r   c                     |S )Nr
   )r   input_shapes     r   compute_output_shapezLeakyReLU.compute_output_shapeB   s    r   )g333333?)	__name__
__module____qualname____doc__r   r   r   r#   __classcell__)r   s   @r   r   r      s    8$


r   r   )r   	keras.srcr   keras.src.api_exportr   keras.src.layers.layerr   r   r
   r   r   <module>r,      s2     ! - ( &': : (:r   