U
    ,-e                     @   sT   d Z ddlmZ ddlmZ ddlmZmZ ee	Z
ddiZG dd deeZd	S )
z? Dilated Neighborhood Attention Transformer model configuration   )PretrainedConfig)logging)BackboneConfigMixin*get_aligned_output_features_output_indiceszshi-labs/dinat-mini-in1k-224zLhttps://huggingface.co/shi-labs/dinat-mini-in1k-224/resolve/main/config.jsonc                       s   e Zd ZdZdZdddZdddddd	d
gddddgddddgddddgddddddgdddddggdddddddddddf fdd	Z  ZS )DinatConfiga  
    This is the configuration class to store the configuration of a [`DinatModel`]. It is used to instantiate a Dinat
    model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
    defaults will yield a similar configuration to that of the Dinat
    [shi-labs/dinat-mini-in1k-224](https://huggingface.co/shi-labs/dinat-mini-in1k-224) architecture.

    Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
    documentation from [`PretrainedConfig`] for more information.

    Args:
        patch_size (`int`, *optional*, defaults to 4):
            The size (resolution) of each patch. NOTE: Only patch size of 4 is supported at the moment.
        num_channels (`int`, *optional*, defaults to 3):
            The number of input channels.
        embed_dim (`int`, *optional*, defaults to 64):
            Dimensionality of patch embedding.
        depths (`List[int]`, *optional*, defaults to `[2, 2, 6, 2]`):
            Number of layers in each level of the encoder.
        num_heads (`List[int]`, *optional*, defaults to `[3, 6, 12, 24]`):
            Number of attention heads in each layer of the Transformer encoder.
        kernel_size (`int`, *optional*, defaults to 7):
            Neighborhood Attention kernel size.
        dilations (`List[List[int]]`, *optional*, defaults to `[[1, 8, 1], [1, 4, 1, 4], [1, 2, 1, 2, 1, 2], [1, 1, 1, 1, 1]]`):
            Dilation value of each NA layer in the Transformer encoder.
        mlp_ratio (`float`, *optional*, defaults to 3.0):
            Ratio of MLP hidden dimensionality to embedding dimensionality.
        qkv_bias (`bool`, *optional*, defaults to `True`):
            Whether or not a learnable bias should be added to the queries, keys and values.
        hidden_dropout_prob (`float`, *optional*, defaults to 0.0):
            The dropout probability for all fully connected layers in the embeddings and encoder.
        attention_probs_dropout_prob (`float`, *optional*, defaults to 0.0):
            The dropout ratio for the attention probabilities.
        drop_path_rate (`float`, *optional*, defaults to 0.1):
            Stochastic depth rate.
        hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`):
            The non-linear activation function (function or string) in the encoder. If string, `"gelu"`, `"relu"`,
            `"selu"` and `"gelu_new"` are supported.
        initializer_range (`float`, *optional*, defaults to 0.02):
            The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
        layer_norm_eps (`float`, *optional*, defaults to 1e-12):
            The epsilon used by the layer normalization layers.
        layer_scale_init_value (`float`, *optional*, defaults to 0.0):
            The initial value for the layer scale. Disabled if <=0.
        out_features (`List[str]`, *optional*):
            If used as backbone, list of features to output. Can be any of `"stem"`, `"stage1"`, `"stage2"`, etc.
            (depending on how many stages the model has). If unset and `out_indices` is set, will default to the
            corresponding stages. If unset and `out_indices` is unset, will default to the last stage.
        out_indices (`List[int]`, *optional*):
            If used as backbone, list of indices of features to output. Can be any of 0, 1, 2, etc. (depending on how
            many stages the model has). If unset and `out_features` is set, will default to the corresponding stages.
            If unset and `out_features` is unset, will default to the last stage.

    Example:

    ```python
    >>> from transformers import DinatConfig, DinatModel

    >>> # Initializing a Dinat shi-labs/dinat-mini-in1k-224 style configuration
    >>> configuration = DinatConfig()

    >>> # Initializing a model (with random weights) from the shi-labs/dinat-mini-in1k-224 style configuration
    >>> model = DinatModel(configuration)

    >>> # Accessing the model configuration
    >>> configuration = model.config
    ```Zdinat	num_heads
num_layers)Znum_attention_headsZnum_hidden_layers   r   @                        g      @Tg        g?Zgelug{Gz?gh㈵>Nc                    s   t  jf | || _|| _|| _|| _t|| _|| _|| _	|| _
|| _|	| _|
| _|| _|| _|| _|| _|| _t|dt|d   | _|| _dgdd tdt|d D  | _t||| jd\| _| _d S )Nr   r   stemc                 S   s   g | ]}d | qS )Zstage ).0idxr   r   n/var/www/html/Darija-Ai-Train/env/lib/python3.8/site-packages/transformers/models/dinat/configuration_dinat.py
<listcomp>   s     z(DinatConfig.__init__.<locals>.<listcomp>)out_featuresout_indicesstage_names)super__init__
patch_sizenum_channels	embed_dimdepthslenr   r   kernel_size	dilations	mlp_ratioqkv_biashidden_dropout_probattention_probs_dropout_probdrop_path_rate
hidden_actlayer_norm_epsinitializer_rangeintZhidden_sizelayer_scale_init_valueranger   r   Z_out_featuresZ_out_indices)selfr   r   r   r    r   r"   r#   r$   r%   r&   r'   r(   r)   r+   r*   r-   r   r   kwargs	__class__r   r   r   h   s2    
$  zDinatConfig.__init__)__name__
__module____qualname____doc__Z
model_typeZattribute_mapr   __classcell__r   r   r1   r   r      s0   B

.r   N)r6   Zconfiguration_utilsr   utilsr   Zutils.backbone_utilsr   r   Z
get_loggerr3   loggerZ#DINAT_PRETRAINED_CONFIG_ARCHIVE_MAPr   r   r   r   r   <module>   s   
 