U
    *-e                     @   s   d dl Z d dlZ d dlZd dlmZ d dlmZmZ d dlm	Z	m
Z
mZmZmZ d dlmZ d dlmZ ddgZed	d
G dd deZe jedddZed	d
G dd de jjZdS )    N)enable_python_dispatcher)Nodemap_aggregate)AnyTuple
NamedTupleOptionalDict)compatibility)detect_fake_modeTensorMetadata	ShapePropT)Zis_backward_compatiblec                   @   s`   e Zd ZU ejed< ejed< eed< ee	df ed< e
ej ed< eed< eeef ed< d	S )
r   shapedtyperequires_grad.stridememory_formatis_quantizedqparamsN)__name__
__module____qualname__torchSize__annotations__r   boolr   intr   r   r	   strr    r   r   [/var/www/html/Darija-Ai-Train/env/lib/python3.8/site-packages/torch/fx/passes/shape_prop.pyr      s   


)resultreturnc                 C   s   | j }| j}| j}|  }tjtjtjh}d}|D ]}| j|dr2|} qLq2| j	}i }	|r| 
 }
|
|	d< |
tjtjhkr|  |	d< |  |	d< n@|
tjtjtjhkr|   |	d< |   |	d< |  |	d< t|||||||	S )zB
    Extract a TensorMetadata NamedTuple describing `result`.
    N)r   qschemescaleZ
zero_pointZaxis)r   r   r   r   r   Zcontiguous_formatZchannels_lastZchannels_last_3dZis_contiguousr   r"   Zper_tensor_affineZper_tensor_symmetricZq_scaleZq_zero_pointZper_channel_affineZ per_channel_affine_float_qparamsZper_channel_symmetricZq_per_channel_scalestolistZq_per_channel_zero_pointsZq_per_channel_axisr   )r    r   r   r   r   Zmemory_formatsr   Zquery_formatr   r   r"   r   r   r   _extract_tensor_metadata   sD          r%   c                       sB   e Zd ZdZd
 fdd	Zeed fddZ fdd	Z  Z	S )r   aE  
    Execute an FX graph Node-by-Node and
    record the shape and type of the result
    into the corresponding node.

    Example:
         In this example, we record the shape
         and data type of a module given
         an example input ``torch.randn(50, D_in)``.
         We print the name, shape and dtype of each node.

        class TwoLayerNet(torch.nn.Module):
            def __init__(self, D_in, H, D_out):
                super().__init__()
                self.linear1 = torch.nn.Linear(D_in, H)
                self.linear2 = torch.nn.Linear(H, D_out)
            def forward(self, x):
                h_relu = self.linear1(x).clamp(min=0)
                y_pred = self.linear2(h_relu)
                return y_pred
        N, D_in, H, D_out = 64, 1000, 100, 10
        x = torch.randn(N, D_in)
        y = torch.randn(N, D_out)
        model = TwoLayerNet(D_in, H, D_out)
        gm = torch.fx.symbolic_trace(model)
        sample_input = torch.randn(50, D_in)
        ShapeProp(gm).propagate(sample_input)

        for node in gm.graph.nodes:
            print(node.name, node.meta['tensor_meta'].dtype,
                node.meta['tensor_meta'].shape)

        The output of this code is:

        x torch.float32 torch.Size([50, 1000])
        linear1 torch.float32 torch.Size([50, 100])
        clamp_1 torch.float32 torch.Size([50, 100])
        linear2 torch.float32 torch.Size([50, 10])
        output torch.float32 torch.Size([50, 10])

    Args:
         module (GraphModule): The module to be executed
         fake_mode (FakeTensorMode): A fake mode for copying the gm

    Nc                    s\   t  | |d krt }|d k	rDddlm} || j|| _|| _nd | _d | _| j| _d S )Nr   )deepcopy_to_fake_tensor)	super__init__r   Ztorch._dynamo.utilsr&   modulefake_module	fake_modereal_module)selfZgmr+   r&   	__class__r   r   r(   u   s    
zShapeProp.__init__)nr!   c              
      s   zn| j d k	r| j | _zL| jd k	rR| j$ t  t |}W 5 Q R X W 5 Q R X nt |}W 5 | j| _X W nF tk
r } z(t	  t
d|  d|j |W 5 d }~X Y nX d  fdd}t||} r||jd< t||jd< |S )NzShapeProp error for: node=z with meta=Fc                    s    t | tjrd t| S | S d S )NT)
isinstancer   Tensorr%   )objZfound_tensorr   r   extract_tensor_meta   s    z/ShapeProp.run_node.<locals>.extract_tensor_metaZtensor_metatype)r*   r)   r,   r+   r   r'   run_node	Exception	traceback	print_excRuntimeErrorZformat_nodemetar   r6   )r-   r0   r    er5   r<   r.   r4   r   r7      s.    

"

zShapeProp.run_nodec                    s.    j dk	r fdd|D }n|}t j| S )a  
        Run `module` via interpretation and return the result and
        record the shape and type of each node.

        Args:
            *args (Tensor): the sample input.

        Returns:
            Any: The value returned from executing the Module
        Nc                    s(   g | ] }t |tjr  j|n|qS r   )r1   r   r2   r+   Zfrom_tensor).0tr-   r   r   
<listcomp>   s     z'ShapeProp.propagate.<locals>.<listcomp>)r+   r'   run)r-   argsZ	fake_argsr.   r@   r   	propagate   s    
zShapeProp.propagate)N)
r   r   r   __doc__r(   r   r   r7   rD   __classcell__r   r   r.   r   r   F   s   -&)r   Ztorch.fxr9   Ztorch._dispatch.pythonr   Ztorch.fx.noder   r   typingr   r   r   r   r	   Ztorch.fx._compatibilityr
   Ztorch._guardsr   __all__r   r2   r%   ZfxZInterpreterr   r   r   r   r   <module>   s   )