U
    9%ec%                     @   s   d dl mZmZmZ d dlZd dlZddlm	Z	 e	
eZeejejf ee dddZdejee ee ejdd	d
ZdddZd ddZejejdddZd!ejeeddddZdd Zdd Zdd ZdS )"    )ListOptionalUnionN   )logging)tensorreturnc                    sT   t | tjrt| jS t|  | jtdkr4 S | j } fddt|D S )z
    Deal with dynamic shape in tensorflow cleanly.

    Args:
        tensor (`tf.Tensor` or `np.ndarray`): The tensor we want the shape of.

    Returns:
        `List[int]`: The shape of the tensor as a list.
    Nc                    s$   g | ]\}}|d kr | n|qS N ).0isZdynamicr
   T/var/www/html/Darija-Ai-API/env/lib/python3.8/site-packages/transformers/tf_utils.py
<listcomp>.   s     zshape_list.<locals>.<listcomp>)	
isinstancenpndarraylistshapetfZTensorShapeas_list	enumerate)r   Zstaticr
   r   r   
shape_list   s    



r   )logitsaxisnamer   c                 C   s   t jj| d ||dS )a  
    Stable wrapper that returns the same output as `tf.nn.softmax`, but that works reliably with XLA on CPU. It is
    meant as a workaround for the [following issue](https://github.com/tensorflow/tensorflow/issues/55682), and will be
    removed after it gets fixed. The arguments and outputs are the same as `tf.nn.softmax`, and relies on the fact that
    `softmax(x) = softmax(x + c)` (see https://ogunlao.github.io/2020/04/26/you_dont_really_know_softmax.html).

    Args:
        logits (`tf.Tensor`):
            Must be one of the following types: half, float32, float64.
        axis (`int`, *optional*):
            The dimension softmax would be performed on. The default is -1 which indicates the last dimension.
        name (`str`, *optional*):
            A name for the operation.

    Returns:
        `tf.Tensor`:
            A Tensor. Has the same type and shape as logits.
    g&.>r   r   r   )r   nnZsoftmaxr   r
   r
   r   stable_softmax1   s    r   h㈵>c           	      C   s   |j jdks"|j jdks"t|ts*tdtjj| |gdd\}}|dkrdg| j j }t| | ||< t	||}t	||}tjj
| |||||d}|S )Nr   zOOnly 1D weight and bias tensors are supported for now, with only a single axis.T)ZaxesZkeepdimsr!   )offsetscaleZvariance_epsilon)r   rankr   intNotImplementedErrorr   r   Zmomentsr   reshapeZbatch_normalization)	inputsweightZbiasepsilonr   ZmeanZvariancer   outputsr
   r
   r   functional_layernormI   s"    "r,   c                 C   s   |dk r|| j j7 }|dk r(|| j j7 }||kr4| S t | }tj|||d  }tj|d | |g||d d  gdd}t| |S )Nr   r   r   )r   r$   r   mathZreduce_prodconcatr'   )inputZ	start_dimZend_dimZin_shapeZflattened_dimZ	out_shaper
   r
   r   flatteni   s    
*r1   )encoder_attention_maskr   c                 C   s   t | tjst| } | jjdkr>| dddddddf }| jjdkrb| ddddddf }td| j| |jj }|S )z
    Invert an attention mask (e.g., switches 0. and 1.).

    Args:
        encoder_attention_mask (`torch.Tensor`): An attention mask.

    Returns:
        `tf.Tensor`: The inverted attention mask.
       N   r   )	r   r   TensorZconvert_to_tensorr   r$   castdtypemin)r2   Zencoder_extended_attention_maskr
   r
   r   invert_attention_mask{   s    

r9   	input_ids)r   	embed_dimtensor_namer   c              
   C   s>   t jj| t j|| jdd| dt j|  d| dd dS )a  
    `tf.gather`, on which TF embedding layers are based, won't check positive out of bound indices on GPU, returning
    zeros instead. This function adds a check against that dangerous silent behavior.

    Args:
        tensor (`tf.Tensor`): The tensor of indices to check.
        embed_dim (`int`): The embedding dimension.
        tensor_name (`str`, *optional*): The name of the tensor to use in the error message.
    )r7   zThe maximum value of z (z>) must be smaller than the embedding layer's input dimension (z9). The likely cause is some problem at tokenization time.)messageN)r   	debuggingZassert_lessr6   r7   r.   Z
reduce_max)r   r;   r<   r
   r
   r   check_embeddings_within_bounds   s
    
r?   c           	         s   d  fdd|D }|r.t d  d| t|}d}t||}t fdd|D rt|d7 }t||}qH|dkrt|D ]\}}|| jd	||f < qn
|| j|< d
S )a  Saves attributes (data) of the specified name into the HDF5 group.

    This method deals with an inherent problem of HDF5 file which is not able to store data larger than
    HDF5_OBJECT_HEADER_LIMIT bytes.

    Args:
        group: A pointer to a HDF5 group.
        name: A name of the attributes to save.
        data: Attributes data to store.

    Raises:
      RuntimeError: If any single attribute is too large to be saved.

    Copied from Keras to Transformers to avoid versioning issues.
    i   c                    s   g | ]}t | kr|qS r
   )lenr   xZHDF5_OBJECT_HEADER_LIMITr
   r   r      s      z1save_attributes_to_hdf5_group.<locals>.<listcomp>zSThe following attributes cannot be saved to HDF5 file because they are larger than z bytes: r   c                 3   s   | ]}|j  kV  qd S r	   )nbytesrA   rC   r
   r   	<genexpr>   s     z0save_attributes_to_hdf5_group.<locals>.<genexpr>%s%dN)RuntimeErrorr   ZasarrayZarray_splitanyr   attrs)	groupr   dataZbad_attributesZdata_npyZ
num_chunksZchunked_datachunk_idZ
chunk_datar
   rC   r   save_attributes_to_hdf5_group   s     
rM   c                 C   sj   || j kr dd | j | D }nFg }d}d||f | j krf|dd | j d||f  D  |d7 }q(|S )a  Loads attributes of the specified name from the HDF5 group.

    This method deals with an inherent problem of HDF5 file which is not able to store data larger than
    HDF5_OBJECT_HEADER_LIMIT bytes.

    Args:
        group: A pointer to a HDF5 group.
        name: A name of the attributes to load.

    Returns:
        data: Attributes data.

    Copied from Keras to Transformers to avoid versioning issues.
    c                 S   s$   g | ]}t |d r|dn|qS decodeutf8hasattrrO   r   nr
   r
   r   r      s     z3load_attributes_from_hdf5_group.<locals>.<listcomp>r   rF   c                 S   s$   g | ]}t |d r|dn|qS rN   rQ   rS   r
   r
   r   r      s     r   )rI   extend)rJ   r   rK   rL   r
   r
   r   load_attributes_from_hdf5_group   s    

rV   c                 C   s   dd }t j|| S )zwExpands 1-dimensional `Tensor`s into 2-dimensional `Tensor`s.
    Copied from Keras to here to avoid versioning issues.c                 S   s*   t | tjr&| jjdkr&tj| ddS | S )Nr   r!   r-   )r   r   r5   r   r$   Zexpand_dims)tr
   r
   r   _expand_single_1d_tensor   s    z+expand_1d.<locals>._expand_single_1d_tensor)r   nestZmap_structure)rK   rX   r
   r
   r   	expand_1d   s    rZ   )NN)r    r!   )r   r!   )r:   )typingr   r   r   numpyr   Z
tensorflowr   utilsr   Z
get_logger__name__loggerr5   r   r%   r   strr   r,   r1   r9   r?   rM   rV   rZ   r
   r
   r
   r   <module>   s   
 "
 
/