U
    9%e                     @   s   d dl Z d dlZd dlZd dlZd dlZd dlZddlmZmZ ddl	m
Z
 eeZe
edddddZe jed	d
Ze jedd
Zdd Ze ddd ZdS )    N   )device_from_inputsfake_tensor_unsupported)register_backend N  )	schedulertrialsc             
      s
  dd l ddl m} ddlm} tj| |}t|}dd t|D }| | }	t	|	dkrlt
d | jS |j||\}
}|jdkr|j}j }nd}jt }|d krtjdd }|d	krdd
l m} t }tj|s||
d ||\}}|D ]}t|j  qtd t	|dkr|!||}tj|s|dksdt"|j#||$|gdd}z|%| W n0 t&k
r   tj|rt'|  Y nX |(|6 j)j*dddid |j+|
||d}W 5 Q R X W 5 Q R X n|dkrddl m,} t- b}|jdkrRjt  d|j.j/dd }|j0j1|
||dd|dd}|j0j2||
||d}W 5 Q R X nD|dks|sȈj)j*dd  |j+|
||d}W 5 Q R X nt3d!|4|d | d"d# fd$d% fd&d'}|S )(Nr   )relay)graph_executorc                 S   s    g | ]\}}d | |j fqS )inp_)shape).0idxi r   Y/var/www/html/Darija-Ai-API/env/lib/python3.8/site-packages/torch/_dynamo/backends/tvm.py
<listcomp>   s     ztvm.<locals>.<listcomp>z0Explicitly fall back to eager due to zero outputcudaZTVM_SCHEDULERauto_scheduler)r   mainzNo tasksi  )Znum_measure_trialsZmeasure_callbacksZearly_stopping   z relay.backend.use_auto_schedulerT)	opt_levelconfig)targetparamsmeta_schedule)r   z --num-cores F)Zlogicalr   @   Zevolutionary)modr   work_dirZmax_trials_globalZnum_trials_per_iterr   Zstrategy)databaser   r   r   default
   )r   zThis tuning option is invalid/not implemented for torchdynamo's TVM-related backend. There are three available options: default, auto_scheduler and meta_schedule.c                 S   s*   | j dkrt|  S tjj|  S )z8A helper function to transfer a NDArray to torch.tensor.bool)dtypetorchZ
from_numpynumpyutilsZdlpackfrom_dlpackZ	to_dlpack)Z	nd_tensorr   r   r   to_torch_tensorp   s    
ztvm.<locals>.to_torch_tensorc                    s,   | j tjkr  j|   S  j| S )z8A helper function to transfer a torch.tensor to NDArray.)r#   r$   r"   ndarraycpur%   r'   )Ztorch_tensor)tvmr   r   to_tvm_tensory   s    ztvm.<locals>.to_tvm_tensorc                     s   dd | D }   \}}dd | D }t|dD ]R\}}| dkr6|jrX| }d| }||krxtd| q6 || q6 	   fddt
  D S )	Nc                 S   s   g | ]}|  qS r   )
contiguous)r   ar   r   r   r      s     z)tvm.<locals>.exec_tvm.<locals>.<listcomp>c                 S   s   h | ]\}}|qS r   r   )r   name_r   r   r   	<setcomp>   s     z(tvm.<locals>.exec_tvm.<locals>.<setcomp>r   r   z6input %s skipped as not found in tvm's runtime libraryc                    s   g | ]}  |qS r   )Z
get_output)r   r   )mr(   r   r   r      s     )Zget_input_infoitems	enumeratedimZrequires_graddetachlogwarningZ	set_inputrunrangeZget_num_outputs)Zi_argsargsZ
shape_infor1   Zactive_inputsr   argZinp_name)r3   r(   r-   r   r   exec_tvm   s(    
ztvm.<locals>.exec_tvm)5r,   r	   Ztvm.contribr
   r$   Zjittracer   r5   lenr8   r9   forwardZfrontendZfrom_pytorchtyper   indexr   r+   ZTargetllvm_targetosenvirongetr   tempfileNamedTemporaryFilepathexistsZextract_tasksprintZcompute_dagZTaskSchedulerAssertionErrorZTuningOptionsZRecordToFileZtune	ExceptionunlinkZApplyHistoryBestZ	transformZPassContextbuildr   TemporaryDirectoryr&   	cpu_countZrelay_integrationZ
tune_relayZcompile_relayNotImplementedErrorZGraphModule)ZgmZexample_inputsr   r   r	   r
   Zjit_modZdeviceZ
shape_listZexample_outputsr   r   devr   r   Zlog_filetasksZtask_weightstaskZtunerZtune_optionlibmsr   r   r>   r   )r3   r(   r-   r,   r   r,      s    



  

 &

		r,   r   )r   r   c                   C   s,   zt d W dS  tk
r&   Y dS X d S )Nr,   TF)	importlibimport_moduleImportErrorr   r   r   r   has_tvm   s
    
r\   c                   C   s   dt d krdS dS )NZavx512z/proc/cpuinfozllvm -mcpu=skylake-avx512zllvm -mcpu=core-avx2)openreadr   r   r   r   rD      s    rD   )	functoolsrY   loggingrE   rH   r$   commonr   r   registryr   	getLogger__name__r8   r,   partialZtvm_meta_scheduleZtvm_auto_schedulerr\   	lru_cacherD   r   r   r   r   <module>   s"   
 
