U
    9%eI                     @   s   d Z ddlZddlmZmZ ddlmZmZ ddlmZ ddl	Z
ddlmZ ddlmZ dd	lmZmZmZ dd
lmZ ddlmZ ddlmZ ddlmZmZ ddlmZ dd ZG dd deeedZdS )zBase class for mixture models.    N)ABCMetaabstractmethod)IntegralReal)time)	logsumexp   )cluster)BaseEstimatorDensityMixin_fit_context)kmeans_plusplus)ConvergenceWarning)check_random_state)Interval
StrOptions)check_is_fittedc                 C   s,   t | } | j|kr(td||| jf dS )zValidate the shape of the input parameter 'param'.

    Parameters
    ----------
    param : array

    param_shape : tuple

    name : str
    z:The parameter '%s' should have the shape of %s, but got %sN)nparrayshape
ValueError)paramZparam_shapename r   T/var/www/html/Darija-Ai-API/env/lib/python3.8/site-packages/sklearn/mixture/_base.py_check_shape   s    


r   c                   @   sz  e Zd ZU dZeeddddgeeddddgeeddddgeeddddgeeddddgedd	d
dhgdgdgdgeeddddgd
Ze	e
d< dd Zedd Zdd Zedd Zd?ddZeddd@ddZdd  Zed!d" Zed#d$ Zed%d& Zd'd( ZdAd)d*Zd+d, Zd-d. ZdBd/d0Zd1d2 Zed3d4 Zed5d6 Zd7d8 Zd9d: Z d;d< Z!d=d> Z"dS )CBaseMixturezBase class for mixture models.

    This abstract class specifies an interface for all mixture classes and
    provides basic common methods for mixture models.
       Nleft)closedg        r   kmeansrandomrandom_from_data	k-means++random_statebooleanverbose
n_componentstol	reg_covarmax_itern_initinit_paramsr$   
warm_startr&   verbose_interval_parameter_constraintsc                 C   s@   || _ || _|| _|| _|| _|| _|| _|| _|	| _|
| _	d S Nr'   )selfr(   r)   r*   r+   r,   r-   r$   r.   r&   r/   r   r   r   __init__A   s    zBaseMixture.__init__c                 C   s   dS )zCheck initial parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)
        Nr   r2   Xr   r   r   _check_parametersY   s    zBaseMixture._check_parametersc                 C   s$  |j \}}| jdkrRt|| jf}tj| jd|d|j}d|t	||f< n| jdkr|j
|| jfd}||jddddtjf  }n| jdkrt|| jf}|j|| jd	d
}d||t	| jf< nD| jdkrt|| jf}t|| j|d\}}d||t	| jf< | || dS )a?  Initialize the model parameters.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        random_state : RandomState
            A random number generator instance that controls the random seed
            used for the method chosen to initialize the parameters.
        r    r   )Z
n_clustersr,   r$   r!   sizeZaxisNr"   F)r8   replacer#   )r$   )r   r-   r   Zzerosr(   r	   ZKMeansfitZlabels_Zarangeuniformsumnewaxischoicer   _initialize)r2   r5   r$   	n_samples_resplabelindicesr   r   r   _initialize_parametersc   s@    

  
 
  
z"BaseMixture._initialize_parametersc                 C   s   dS )zInitialize the model parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        resp : array-like of shape (n_samples, n_components)
        Nr   )r2   r5   rC   r   r   r   r@      s    
zBaseMixture._initializec                 C   s   |  || | S )a  Estimate model parameters with the EM algorithm.

        The method fits the model ``n_init`` times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for ``max_iter``
        times until the change of likelihood or lower bound is less than
        ``tol``, otherwise, a ``ConvergenceWarning`` is raised.
        If ``warm_start`` is ``True``, then ``n_init`` is ignored and a single
        initialization is performed upon the first call. Upon consecutive
        calls, training starts where it left off.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        self : object
            The fitted mixture.
        )fit_predictr2   r5   yr   r   r   r;      s    zBaseMixture.fitT)Zprefer_skip_nested_validationc                 C   s  | j |tjtjgdd}|jd | jk rDtd| j d|jd  | | | jo\t	| d }|rj| j
nd}tj }d| _t| j}|j\}}t|D ]}	| |	 |r| || |rtj n| j}
| jdkr|  }d}qtd| jd D ]\}|
}| |\}}| || | ||}
|
| }| || t|| jk rd	| _ qNq| |
 |
|ksn|tj kr|
}|  }|}q| js| jdkrtd
|	d  t | | || _ || _| |\}}|j!ddS )a  Estimate model parameters using X and predict the labels for X.

        The method fits the model n_init times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for `max_iter`
        times until the change of likelihood or lower bound is less than
        `tol`, otherwise, a :class:`~sklearn.exceptions.ConvergenceWarning` is
        raised. After fitting, it predicts the most probable label for the
        input data points.

        .. versionadded:: 0.20

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        r   )dtypeZensure_min_samplesr   z:Expected n_samples >= n_components but got n_components = z, n_samples = 
converged_r   FTzzInitialization %d did not converge. Try different init parameters, or increase max_iter, tol or check for degenerate data.r9   )"_validate_datar   Zfloat64Zfloat32r   r(   r   r6   r.   hasattrr,   infrK   r   r$   range_print_verbose_msg_init_begrF   Zlower_bound_r+   _get_parameters_e_step_m_stepZ_compute_lower_bound_print_verbose_msg_iter_endabsr)   _print_verbose_msg_init_endwarningswarnr   _set_parametersZn_iter_argmax)r2   r5   rI   Zdo_initr,   Zmax_lower_boundr$   rA   rB   initlower_boundZbest_paramsZbest_n_itern_iterZprev_lower_boundlog_prob_normlog_respZchanger   r   r   rG      s^    






zBaseMixture.fit_predictc                 C   s   |  |\}}t||fS )a  E step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : float
            Mean of the logarithms of the probabilities of each sample in X

        log_responsibility : array, shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        )_estimate_log_prob_respr   mean)r2   r5   r^   r_   r   r   r   rR     s    zBaseMixture._e_stepc                 C   s   dS )a*  M step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        log_resp : array-like of shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        Nr   )r2   r5   r_   r   r   r   rS   2  s    zBaseMixture._m_stepc                 C   s   d S r1   r   r2   r   r   r   rQ   @  s    zBaseMixture._get_parametersc                 C   s   d S r1   r   )r2   paramsr   r   r   rY   D  s    zBaseMixture._set_parametersc                 C   s(   t |  | j|dd}t| |ddS )a  Compute the log-likelihood of each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        log_prob : array, shape (n_samples,)
            Log-likelihood of each sample in `X` under the current model.
        Fresetr   r9   )r   rL   r   _estimate_weighted_log_probr4   r   r   r   score_samplesH  s    zBaseMixture.score_samplesc                 C   s   |  | S )a  Compute the per-sample average log-likelihood of the given data X.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_dimensions)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        log_likelihood : float
            Log-likelihood of `X` under the Gaussian mixture model.
        )rg   ra   rH   r   r   r   score[  s    zBaseMixture.scorec                 C   s(   t |  | j|dd}| |jddS )a  Predict the labels for the data samples in X using trained model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        Frd   r   r9   )r   rL   rf   rZ   r4   r   r   r   predictn  s    zBaseMixture.predictc                 C   s.   t |  | j|dd}| |\}}t|S )a  Evaluate the components' density for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        resp : array, shape (n_samples, n_components)
            Density of each Gaussian component for each sample in X.
        Frd   )r   rL   r`   r   exp)r2   r5   rB   r_   r   r   r   predict_proba  s    zBaseMixture.predict_probac                    s   t  |dk rtdj jj\} tj|j}j	dkrrt
fddtjj|D }nTj	dkrt
fddtj|D }n&t
 fddtjj|D }t
d	d t|D }||fS )
ay  Generate random samples from the fitted Gaussian distribution.

        Parameters
        ----------
        n_samples : int, default=1
            Number of samples to generate.

        Returns
        -------
        X : array, shape (n_samples, n_features)
            Randomly generated sample.

        y : array, shape (nsamples,)
            Component labels.
        r   zNInvalid value for 'n_samples': %d . The sampling requires at least one sample.fullc                    s$   g | ]\}}}  ||t|qS r   )multivariate_normalint.0ra   Z
covariancesample)rngr   r   
<listcomp>  s   z&BaseMixture.sample.<locals>.<listcomp>Ztiedc                    s$   g | ]\}}  |jt|qS r   )rm   covariances_rn   )rp   ra   rq   )rr   r2   r   r   rs     s   c                    s0   g | ](\}}}|j | fd t|  qS )r7   )Zstandard_normalr   sqrtro   )
n_featuresrr   r   r   rs     s   c                 S   s    g | ]\}}t j||td qS ))rJ   )r   rl   rn   )rp   jrq   r   r   r   rs     s     )r   r   r(   Zmeans_r   r   r$   ZmultinomialZweights_Zcovariance_typer   Zvstackziprt   Zconcatenate	enumerate)r2   rA   rB   Zn_samples_compr5   rI   r   )rv   rr   r2   r   rq     sN    


  

  zBaseMixture.samplec                 C   s   |  ||   S )a  Estimate the weighted log-probabilities, log P(X | Z) + log weights.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        weighted_log_prob : array, shape (n_samples, n_component)
        )_estimate_log_prob_estimate_log_weightsr4   r   r   r   rf     s    z'BaseMixture._estimate_weighted_log_probc                 C   s   dS )zEstimate log-weights in EM algorithm, E[ log pi ] in VB algorithm.

        Returns
        -------
        log_weight : array, shape (n_components, )
        Nr   rb   r   r   r   r{     s    z!BaseMixture._estimate_log_weightsc                 C   s   dS )a9  Estimate the log-probabilities log P(X | Z).

        Compute the log-probabilities per each component for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob : array, shape (n_samples, n_component)
        Nr   r4   r   r   r   rz     s    zBaseMixture._estimate_log_probc              	   C   sL   |  |}t|dd}tjdd ||ddtjf  }W 5 Q R X ||fS )a@  Estimate log probabilities and responsibilities for each sample.

        Compute the log probabilities, weighted log probabilities per
        component and responsibilities for each sample in X with respect to
        the current state of the model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : array, shape (n_samples,)
            log p(X)

        log_responsibilities : array, shape (n_samples, n_components)
            logarithm of the responsibilities
        r   r9   ignore)ZunderN)rf   r   r   Zerrstater>   )r2   r5   Zweighted_log_probr^   r_   r   r   r   r`     s
    
 z#BaseMixture._estimate_log_prob_respc                 C   sB   | j dkrtd|  n&| j dkr>td|  t | _| j| _dS )(Print verbose message on initialization.r   zInitialization %dr   N)r&   printr   _init_prev_time_iter_prev_time)r2   r,   r   r   r   rP     s    

z'BaseMixture._print_verbose_msg_init_begc                 C   sX   || j  dkrT| jdkr&td|  n.| jdkrTt }td||| j |f  || _dS )r}   r   r   z  Iteration %dr   z0  Iteration %d	 time lapse %.5fs	 ll change %.5fN)r/   r&   r~   r   r   )r2   r]   Zdiff_llZcur_timer   r   r   rT     s    

z'BaseMixture._print_verbose_msg_iter_endc                 C   sD   | j dkrtd| j  n&| j dkr@td| jt | j |f  dS )z.Print verbose message on the end of iteration.r   zInitialization converged: %sr   z7Initialization converged: %s	 time lapse %.5fs	 ll %.5fN)r&   r~   rK   r   r   )r2   llr   r   r   rV   (  s    

z'BaseMixture._print_verbose_msg_init_end)N)N)N)r   )#__name__
__module____qualname____doc__r   r   r   r   r0   dict__annotations__r3   r   r6   rF   r@   r;   r   rG   rR   rS   rQ   rY   rg   rh   ri   rk   rq   rf   r{   rz   r`   rP   rT   rV   r   r   r   r   r   +   sV   

	+

f




>
	
	r   )	metaclass)r   rW   abcr   r   numbersr   r   r   numpyr   Zscipy.specialr    r	   baser
   r   r   r   
exceptionsr   utilsr   Zutils._param_validationr   r   Zutils.validationr   r   r   r   r   r   r   <module>   s   