U
    9%e.                     @   s   d dl Z d dlm  mZ d dlmZ d dlZdd Zdd Z	dd Z
d	d
 Zdd Zejdd Zdd Zdd Zdd Zdd Zdd ZdS )    N)
namedtuplec                    s   t   fdd}|S )Nc               
      s    dr dj}|| |S  ds4 dr drBdnd}|dkrRdnd}|j}td| d d| d| d	 | |S )	Nautogradsave_for_backwardbackwardzWe found a 'z' registration for  at z but were unable to find a 'z' registration. To use the CustomOp API to register a backward formula, please provide us both a backward function and a 'save for backward' function via `impl_backward` and `impl_save_for_backward` respectively.)Z	_has_impl	_get_implfunclocationRuntimeError)argskwargskernelmissingfoundlocZautograd_fallback	custom_op X/var/www/html/Darija-Ai-API/env/lib/python3.8/site-packages/torch/_custom_op/autograd.pyinner   s    

z*autograd_kernel_indirection.<locals>.inner)autograd_not_implemented)r   r   r   r   r   autograd_kernel_indirection   s    r   c                    s    fdd}|S )Nc               
      sP   t  r$tdd | |fr$tdt j   | |W  5 Q R  S Q R X d S )Nc                 S   s   t | tjo| jS N)
isinstancetorchTensorZrequires_gradxr   r   r   <lambda>3       z:autograd_not_implemented.<locals>.kernel.<locals>.<lambda>z.Autograd has not been implemented for operator)r   Zis_grad_enabledpytreeZtree_anyr
   _C_AutoDispatchBelowAutograd)r   r   r   r   r   r   1   s     z(autograd_not_implemented.<locals>.kernelr   )r   r   r   r#   r   r   0   s    r   c              	   C   s   |d k	rt |ts|f}n|}t|t|ks2tg }tt||D ]f\}\}}t |tjrl|sD|| qDt |t	r|sD|
| qD|rDtd| d| dt| dqD|r| j|  d S )NzWith output_differentiability=z	. At idx z , we received an object of type za that is not a Tensor, so it cannot have be marked as differentiable in output_differentiability.)r   tuplelenAssertionError	enumeratezipr   r   appendlistextendr
   typemark_non_differentiable)ctxoutputoutput_differentiabilityZtuple_outputZnon_differentiable_tensorsidxZdifferentiableoutr   r   r   r-   ;   s*    



r-   c                    s    fdd}|S )Nc                     sp   t | \}d   fdd} fdd}tjd ||}|j| } d k	s`tt t| S )Nc              	      s   |  d tt|}tj   | }W 5 Q R X ttdd |}t|}||}t	| ||f t
| | t|\}t|S )NTc                 S   s   t | S r   )r,   )argr   r   r   r   o   r   zKconstruct_autograd_kernel.<locals>.apply.<locals>.forward.<locals>.<lambda>)Zset_materialize_gradsr    tree_unflattenr*   r   r!   r"   namedtuple_argstree_mapsave_pytree_for_backwardr-   tree_flattenr$   )r.   	flat_argsr   r/   	args_infoZsave_for_backward_fn_inputsZto_saveflat_output)op_overloadout_specr0   save_for_backward_fnschemaspecr   r   forwardg   s    
 

z9construct_autograd_kernel.<locals>.apply.<locals>.forwardc                    sd   d k	st tt|}t| \}}t }t|ts>|f} ||f| }t|| t	||S r   )
r&   r    r4   r*   unpack_savedobjectr   r$   validate_grad_inputs_dictgrad_inputs_dict_to_flat_tuple)r.   Zflat_grad_outputZgradsZsavedr:   Z	inner_ctxgrad_inputs_dict)backward_fnr   r=   r   r   r   {   s    
z:construct_autograd_kernel.<locals>.apply.<locals>.backwardZ	_customop)r    r8   gen_autograd_functionZ_opnameapplyr&   r4   r*   )r   r9   rA   r   generated_clsr;   rG   r   r<   r0   r>   r?   )r=   r@   r   rI   c   s      
z(construct_autograd_kernel.<locals>.applyr   )r?   r0   r   r<   r>   rG   rI   r   rK   r   construct_autograd_kernel[   s    .rL   c                 C   s$   t | tjjft|t|d}|S )N)rA   r   )r,   r   r   Functionstaticmethod)namerA   r   rJ   r   r   r   rH      s    rH   c                 C   s.   dd | j jD }t| jd }t||}|S )Nc                 S   s   g | ]
}|j qS r   )rO   .0r3   r   r   r   
<listcomp>   s     z'namedtuple_args_cls.<locals>.<listcomp>_args)	argumentsflat_allstrrO   r   )r?   attribsrO   	tuple_clsr   r   r   namedtuple_args_cls   s    
rY   c                 C   s   t |tstt| }|| S r   )r   r$   r&   rY   )r?   r   rX   r   r   r   r5      s    r5   c                    s   fdd}t | ts(|dt|   dd  jjjD }|  }||krb|d| d| d |  D ]R\}}t||}t |t	rdt |t
t	fs|d	| d
t| d t|t|ks|d	| dt| dt|  tt||D ]n\}	\}
}|
d kr
qt |
tjs6|d	| dt|
 d|	  |tjkr|d	| d|	 d|	 d|  qqj|d krpqjt |tjs|dt| d| d |tjkrj|d| d| d| d qjd S )Nc                    s*     d}td  d|j d|  d S )Nr   z%In the backward function defined for r   z using the CustomOp API, )r   r
   r	   )whatr   
forward_opr   r   error   s    
z(validate_grad_inputs_dict.<locals>.errorzBexpected the output of the backward function to be a dict but got c                 S   s   h | ]}|j  r|jqS r   )r,   Zis_tensor_likerO   rP   r   r   r   	<setcomp>   s    
z,validate_grad_inputs_dict.<locals>.<setcomp>z3expected the returned grad_input dict to have keys z	 but got z. The backward function must return a gradient (can be None) for each arg to the CustomOp that may be a Tensor or Sequence[Tensor]. Args declared to be non-Tensor-like types should not appear in the grad_input dictzfor input 'zR' expected the grad_input dict to hold a list of gradients but got object of type .z1' expected the grad_input dict to hold a list of z gradients but got z\' expected the grad_input dict to hold a list of None or Tensor gradients but got object of z
 at index z(', got a Tensor as the gradient for the z(-th value but expected None because the z(-th value was not a Tensor (it was type zgot object of type z as the gradient for input 'z:', but expected the gradient to be either None or a Tensorz(got a Tensor as the gradient for input 'z3' but expected None as the gradient because input 'z ' was not a Tensor (it was type z).)r   dictr,   Z_schemarT   rU   keysitemsgetattrr*   r$   r%   r'   r(   r   r   )rF   r\   r:   r]   Zexpected_keysZactual_keysrO   Zgradarg_infor1   ginfor   r[   r   rD      s8    

"

"

rD   c                 C   sZ   g }|   D ]6\}}|| kr8|tdd | q|| |  qtt|d S )Nc                 S   s   d S r   r   r   r   r   r   r      r   z0grad_inputs_dict_to_flat_tuple.<locals>.<lambda>r   )_asdictrb   r)   r    r6   r$   r8   )rF   r:   resultrO   rd   r   r   r   rE      s    rE   c           	      C   s   t |\}}t|}dd t|D }dd t|D }dd |D }dd |D }|| _|| _| j|  || _|| _|| _	d S )Nc                 S   s    g | ]\}}t |tjr|qS r   r   r   r   rQ   r1   thingr   r   r   rR      s    z,save_pytree_for_backward.<locals>.<listcomp>c                 S   s    g | ]\}}t |tjs|qS r   ri   rj   r   r   r   rR      s    c                 S   s   g | ]}t |tjr|qS r   ri   rQ   rk   r   r   r   rR      s      c                 S   s   g | ]}t |tjs|qS r   ri   rl   r   r   r   rR      s      )
r    r8   r%   r'   r@   num_eltsr   tensor_idxssaved_non_tensorsnon_tensor_idxs)	r.   stuff
flat_stuffr@   rm   rn   rp   ZtensorsZnon_tensorsr   r   r   r7      s    
r7   c                 C   s^   d g| j  }t| j| jD ]\}}|||< qt| j| jD ]\}}|||< q:t|| j}|S r   )	rm   r(   Zsaved_tensorsrn   ro   rp   r    r4   r@   )r.   rr   Ztensorr1   Z
non_tensorrq   r   r   r   rB   
  s    

rB   )r   Ztorch.utils._pytreeutilsZ_pytreer    collectionsr   	functoolsr   r   r-   rL   rH   	lru_cacherY   r5   rD   rE   r7   rB   r   r   r   r   <module>   s    9
;