U
    9%e:$                     @   sp   d dl Z d dlmZ d dlZd dlZd dlmZmZ ddddgZG dd dZ	d	d Z
d
d ZG dd dZdS )    N)OrderedDict)AnyTupleRemovableHandleunserializable_hookwarn_if_has_hooksBackwardHookc                   @   s   e Zd ZU dZeed< dZeed< ddeedddd	Zdd
ddZ	dd Z
dd
ddZd d
ddZeeeddddZdS )r   a]  
    A handle which provides the capability to remove a hook.

    Args:
        hooks_dict (dict): A dictionary of hooks, indexed by hook ``id``.
        extra_dict (Union[dict, List[dict]]): An additional dictionary or list of
            dictionaries whose keys will be deleted when the same keys are
            removed from ``hooks_dict``.
    idr   next_idN)
extra_dict)
hooks_dictr   returnc                C   sd   t || _tj| _t jd7  _d| _t|trBt |f| _nt|t	r`t
dd |D | _d S )N    c                 s   s   | ]}t |V  qd S Nweakrefref.0dr   r   P/var/www/html/Darija-Ai-API/env/lib/python3.8/site-packages/torch/utils/hooks.py	<genexpr>    s     z+RemovableHandle.__init__.<locals>.<genexpr>)r   r   hooks_dict_refr   r
   r	   extra_dict_ref
isinstancedictlisttuple)selfr   r   r   r   r   __init__   s    

zRemovableHandle.__init__)r   c                 C   sR   |   }|d k	r"| j|kr"|| j= | jD ]$}| }|d k	r(| j|kr(|| j= q(d S r   )r   r	   r   )r   r   r   r   r   r   r   remove"   s    
zRemovableHandle.removec                 C   s<   | j d kr|  | jfS |  | jtdd | j D fS d S )Nc                 s   s   | ]}| V  qd S r   r   )r   r   r   r   r   r   0   s     z/RemovableHandle.__getstate__.<locals>.<genexpr>)r   r   r	   r   r   r   r   r   __getstate__,   s    
zRemovableHandle.__getstate__c                 C   s   |d d krt t | _nt |d | _|d | _ttj| jd t_t|dk sb|d d krjd| _	nt
dd |d D | _	d S )Nr   r         r   c                 s   s   | ]}t |V  qd S r   r   r   r   r   r   r   >   s     z/RemovableHandle.__setstate__.<locals>.<genexpr>)r   r   r   r   r	   maxr   r
   lenr   r   )r   stater   r   r   __setstate__2   s    
zRemovableHandle.__setstate__c                 C   s   | S r   r   r"   r   r   r   	__enter__@   s    zRemovableHandle.__enter__)typevaluetbr   c                 C   s   |    d S r   )r!   )r   r+   r,   r-   r   r   r   __exit__C   s    zRemovableHandle.__exit__)__name__
__module____qualname____doc__int__annotations__r
   r   r    r!   r#   r)   r*   r.   r   r   r   r   r   	   s   


c                 C   s
   d| _ | S )z
    Decorator which marks a function as an unserializable hook.
    This suppresses warnings that would otherwise arise if you attempt
    to serialize a tensor that has a hook.
    T)__torch_unserializable__)fr   r   r   r   G   s    c                 C   s@   | j r<| j D ].}| j | }t|dstdt| d qd S )Nr5   zbackward hook z on tensor will not be serialized.  If this is expected, you can decorate the function with @torch.utils.hooks.unserializable_hook to suppress this warning)Z_backward_hookshasattrwarningswarnrepr)Ztensorkhookr   r   r   r   Q   s
    


c                   @   sH   e Zd ZdZdd Zdd Zdd Zdd	 Zd
d Zdd Z	dd Z
dS )r   a  
    A wrapper class to implement nn.Module backward hooks.
    It handles:
      - Ignoring non-Tensor inputs and replacing them by None before calling the user hook
      - Generating the proper Node to capture a set of Tensor's gradients
      - Linking the gradients captures for the outputs with the gradients captured for the input
      - Calling the user hook once both output and input gradients are available
    c                 C   s4   || _ || _|| _d | _d| _d | _d| _d | _d S )N)
user_hooksuser_pre_hooksmodulegrad_outputs	n_outputsoutput_tensors_indexn_inputsinput_tensors_index)r   r@   r>   r?   r   r   r   r    e   s    zBackwardHook.__init__c                 C   s.   d g| }t ||D ]\}}|||< qt|S r   )zipr   )r   indicesvaluessizeresidxvalr   r   r   _pack_with_nonep   s    

zBackwardHook._pack_with_nonec                 C   s$   g }|D ]}| ||  qt|S r   )appendr   )r   rG   rH   rJ   rK   r   r   r   _unpack_nonew   s    zBackwardHook._unpack_nonec                    s    fdd}| | d S )Nc                    s    j d krd S   j|  j} jD ]N}| j| j }|d krDq&t|t|krptdt| dt| |}q&d  _   j|S )Nz<Backward hook returned an invalid number of grad_input, got , but expected )	rA   rM   rE   rD   r>   r@   r'   RuntimeErrorrO   )Z
grad_input_rJ   r<   outr"   r   r   r<      s    

z)BackwardHook._set_user_hook.<locals>.hookregister_hook)r   grad_fnr<   r   r"   r   _set_user_hook~   s    zBackwardHook._set_user_hookc                 C   s   g }g }d}t |D ]2\}}t|tjr|| || ||jO }q|rTt s\|d fS tjjj	j
j| }t|dkrtddd |D }	t|	dkrtd||	d  t|}
t||D ]\}}||
|< qt|
|fS )NFr   zCCannot set Module backward hook for a Module with no input Tensors.c                 S   s*   g | ]"}|j d k	r|j  dkr|j qS )NZBackwardHookFunctionBackward)rV   name)r   tr   r   r   
<listcomp>   s     
  z2BackwardHook._apply_on_tensors.<locals>.<listcomp>zaError while setting up backward hooks. Please open an issue with a code sample to reproduce this.)	enumerater   torchZTensorrN   requires_gradZis_grad_enablednnmodulesZ
_functionsZBackwardHookFunctionapplyr'   rQ   r   rF   r   )r   fnargsZtensors_idxZtensorsr]   iargZnew_tensorsZgrad_fnsarg_listrK   rL   r   r   r   _apply_on_tensors   s*    


zBackwardHook._apply_on_tensorsc                    s0    fdd}  ||\}}t| _| _|S )Nc                    s     |  d S r   )rW   )rV   r"   r   r   ra      s    z)BackwardHook.setup_input_hook.<locals>.fn)rf   r'   rD   rE   )r   rb   ra   rJ   Z	input_idxr   r"   r   setup_input_hook   s
    
zBackwardHook.setup_input_hookc                    sT    fdd}d}t |ts$|f}d} ||\}}t| _| _|sP|d }|S )Nc                    s    fdd}|  | d S )Nc           	         s     j| j _ jrrt j} jD ]F}| j j}|d krFq*t|}||krjtd| d| | _q* jd krڈ  g g  j	} j
D ]@}| j| j}|d k	rt|trtdd |D stdqd  _ jd k	r jd k	stt fdd jD S d S )NzABackward pre hook returned an invalid number of grad_output, got rP   c                 s   s   | ]}|d kV  qd S r   r   )r   elr   r   r   r      s     zKBackwardHook.setup_output_hook.<locals>.fn.<locals>.hook.<locals>.<genexpr>zoBackward hook for Modules where no input requires gradient should always return None or None for all gradients.c                 3   s   | ]} j | V  qd S r   )rA   )r   rc   r"   r   r   r      s     )rM   rC   rB   rA   r?   r'   r@   rQ   rE   rD   r>   r   r   allAssertionError)	rR   Zgrad_outputexpected_lenZuser_pre_hookZhook_grad_outputsZ
actual_lenZgrad_inputsZ	user_hookrJ   r"   r   r   r<      s0    



$
z8BackwardHook.setup_output_hook.<locals>.fn.<locals>.hookrT   )rV   r<   r"   r   r   ra      s    !z*BackwardHook.setup_output_hook.<locals>.fnTFr   )r   r   rf   r'   rB   rC   )r   rb   ra   is_tuplerJ   Z
output_idxr   r"   r   setup_output_hook   s    $

zBackwardHook.setup_output_hookN)r/   r0   r1   r2   r    rM   rO   rW   rf   rg   rm   r   r   r   r   r   [   s   	!	)r\   collectionsr   r   r8   typingr   r   __all__r   r   r   r   r   r   r   r   <module>   s   >

