
    !g2                         d dl Zd dlmZ d dlmc mZ d dlm	Z	 	 d Z
	 	 	 	 ddZ	 ddZ G d de      Z G d	 d
ej                        Z ej                   ee       y)    N)Results)cache_readonlyc                 H      fd} fd} fd}|||fS )ao  
    Negative penalized log-likelihood functions.

    Returns the negative penalized log-likelihood, its derivative, and
    its Hessian.  The penalty only includes the smooth (L2) term.

    All three functions have argument signature (x, model), where
    ``x`` is a point in the parameter space and ``model`` is an
    arbitrary statsmodels regression model.
    c                     |j                   }   dz
  z  t        j                  | dz        z  dz  } |j                  t        j                  |    fi }| |z  |z   S )N      )nobsnpsumlogliker_)	paramsmodelr	   pen_llfllfL1_wtalphakloglike_kwdss	        Y/var/www/dash_apps/app1/venv/lib/python3.12/site-packages/statsmodels/base/elastic_net.py	nploglikez_gen_npfuncs.<locals>.nploglike)   sc    zz(a%i(266&!)+<<q@emmBEE&M:\:ut|g%%    c                     |j                   }   dz
  z  | z  } |j                  t        j                  |    fi d    |z  }||z   S )Nr   r   )r	   scorer
   r   )	r   r   r	   pen_gradgrr   r   r   
score_kwdss	        r   npscorez_gen_npfuncs.<locals>.npscore/   sW    zz8q5y)F2ekk"%%-6:6q99D@H}r   c                     |j                   }   dz
  z  } |j                  t        j                  |    fi d    |z  |z   }|S )Nr   )r   r   )r	   hessianr
   r   )	r   r   r	   pen_hesshr   r   	hess_kwdsr   s	        r   nphessz_gen_npfuncs.<locals>.nphess5   sR    zz8q5y)U]]255=6I6t<<tChNr    )	r   r   r   r   r   r#   r   r   r$   s	   ``````   r   _gen_npfuncsr&      s!    & gv%%r   c                 n	   | j                   j                  d   }|
i n|
}
|i n|}|i n|}t        j                  |      r|t        j                  |      z  }|t        j
                  |      }n|j                         }d}t        j
                  t        |      t              }| j                         }d|d<   |j                  dd      }d|v rS|d   N|%t        j                  |j                  d            }n'|t        j                  |j                  d            z  }t        |      D cg c]  }t        ||||
||       }}d}t        |      D ]  }|j                         }t        |      D ]  }||   r	|j                         }d	||<   t        j                  | j                   |      }|||z  } | j                  | j                   | j                   dd|f   fd|i|}||   \  }}}t#        ||||||   ||   |z  ||	
      ||<   |d	kD  st        j$                  ||         |k  sd||<   d||<    t        j&                  t        j$                  ||z
              }||k  sd} n d	|t        j$                  |      |k  <   |st)        | |      }||_        t-        |      S t        j.                  |      } t        j
                  ||f      }!| j0                  D ci c]  }|t3        | |d       }}t        |       d	kD  rr | j                  | j                   | j                   dd| f   fi |}"|"j5                         }#|#j6                  || <   |#j8                  |!t        j:                  | |       <   nA | j                  | j                   | j                   ddd	f   fi |}"|"j5                  d	      }#t=        |#j                  t>        j@                        r|#jB                  j                  }$n|#j                  }$tE        |#d      r|#jF                  }%nd}%| jH                  | jJ                  }'}&t        |       | _$        | jL                  | jH                  z
  | _%         |$| ||!|%      }d|_'        ||_        ||_(        ddz   i|_)        |&|'c| _$        | _%        |S c c}w c c}w )a	  
    Return an elastic net regularized fit to a regression model.

    Parameters
    ----------
    model : model object
        A statsmodels object implementing ``loglike``, ``score``, and
        ``hessian``.
    method : {'coord_descent'}
        Only the coordinate descent algorithm is implemented.
    maxiter : int
        The maximum number of iteration cycles (an iteration cycle
        involves running coordinate descent on all variables).
    alpha : scalar or array_like
        The penalty weight.  If a scalar, the same penalty weight
        applies to all variables in the model.  If a vector, it
        must have the same length as `params`, and contains a
        penalty weight for each coefficient.
    L1_wt : scalar
        The fraction of the penalty given to the L1 penalty term.
        Must be between 0 and 1 (inclusive).  If 0, the fit is
        a ridge fit, if 1 it is a lasso fit.
    start_params : array_like
        Starting values for `params`.
    cnvrg_tol : scalar
        If `params` changes by less than this amount (in sup-norm)
        in one iteration cycle, the algorithm terminates with
        convergence.
    zero_tol : scalar
        Any estimated coefficient smaller than this value is
        replaced with zero.
    refit : bool
        If True, the model is refit using only the variables that have
        non-zero coefficients in the regularized fit.  The refitted
        model is not regularized.
    check_step : bool
        If True, confirm that the first step is an improvement and search
        further if it is not.
    loglike_kwds : dict-like or None
        Keyword arguments for the log-likelihood function.
    score_kwds : dict-like or None
        Keyword arguments for the score function.
    hess_kwds : dict-like or None
        Keyword arguments for the Hessian function.

    Returns
    -------
    Results
        A results object.

    Notes
    -----
    The ``elastic net`` penalty is a combination of L1 and L2
    penalties.

    The function that is minimized is:

    -loglike/n + alpha*((1-L1_wt)*|params|_2^2/2 + L1_wt*|params|_1)

    where |*|_1 and |*|_2 are the L1 and L2 norms.

    The computational approach used here is to obtain a quadratic
    approximation to the smooth part of the target function:

    -loglike/n + alpha*(1-L1_wt)*|params|_2^2/2

    then repeatedly optimize the L1 penalized version of this function
    along coordinate axes.
    r   Ng-C6?)dtypeFhasconstoffsetexposurer   )tol
check_stepT        )maxiterscale      ?)r0   	iteration)*exogshaper
   isscalaroneszeroscopylenbool_get_init_kwdspoplogranger&   dot	__class__endog_opt_1dabsmaxRegularizedResults	convergedRegularizedResultsWrapperflatnonzero
_init_keysgetattrfitr   normalized_cov_paramsix_
issubclasswrapResultsWrapper_resultshasattrr0   df_modeldf_residr	   regularizedmethodfit_history)(r   rV   r/   r   r   start_params	cnvrg_tolzero_tolrefitr-   r   r   r#   k_exogr   btolparams_zero	init_argsmodel_offsetr   fgh_listrF   itrparams_saveparams0r*   
model_1varfuncgradhesspchangeresultsiicovmodel1rsltklassr0   pqs(                                           r   fit_elasticnetrr   >   s   T ZZa F%-2<L!)zJ'YI	{{5' &!""$D((3v;d3K$$&I!Ij==40LY9Z#8#D66)--
";<LBFF9==#<==L v  	QulJ	J H   IW~ ( kkmv 	A
 1~
 kkmGGAJVVEJJ0F',& )UZZ1-K6<K@IKJ  ({D$dD*fQiq%Z1F1I
 Qw266&),x7!%Aq	=	B && 456YIQ(V )*F266&>H$%$UF3%(11 
	B
((FF#
$C5:5E5EFGE1d++FIF
2w{ KKArE*9.79zz|[[r
"88BFF2rN
 !ejjA.>L)Lzz!z$ $..$"5"56'' tW

 >>5>>qAWENZZ%..0EN %E2EEEOEL$cAg.E &'"ENENLS x Gs   2R-/R2c                    |} | ||      }	 |||      }
 |||      }|
||z  z
  }|t        j                  |      kD  ry|dk\  r	||
z
  |z  }n|dk  r
||
z    |z  }nt         j                  S |s||z   S  | ||z   |      |t        j                  ||z         z  z   }||	|t        j                  |      z  z   dz   k  r||z   S ddlm}  || |f|dz
  |dz   f|      }|S )az  
    One-dimensional helper for elastic net.

    Parameters
    ----------
    func : function
        A smooth function of a single variable to be optimized
        with L1 penaty.
    grad : function
        The gradient of `func`.
    hess : function
        The Hessian of `func`.
    model : statsmodels model
        The model being fit.
    start : real
        A starting value for the function argument
    L1_wt : non-negative real
        The weight for the L1 penalty function.
    tol : non-negative real
        A convergence threshold.
    check_step : bool
        If True, check that the first step is an improvement and
        use bisection if it is not.  If False, return after the
        first step regardless.

    Notes
    -----
    ``func``, ``grad``, and ``hess`` have argument signature (x,
    model), where ``x`` is a point in the parameter space and
    ``model`` is the model being fit.

    If the log-likelihood for the model is exactly quadratic, the
    global minimum is returned in one step.  Otherwise numerical
    bisection is used.

    Returns
    -------
    The argmin of the objective function.
    r.   r   g|=)brentr   )argsbrackr,   )r
   rC   nanscipy.optimizert   )rf   rg   rh   r   startr   r,   r-   xfbcdr"   f1rt   x_opts                    r   rB   rB     s   j 	AQAQAQA	AaCA rvvay 	AvQY!O	
QaiL1vv
 1u	a!eU	eBFF1q5M1	1B	Qrvvay 5((1u %$eXac1Q3ZSAELr   c                   2     e Zd ZdZ fdZed        Z xZS )rE   z
    Results for models estimated using regularization

    Parameters
    ----------
    model : Model
        The model instance used to estimate the parameters.
    params : ndarray
        The estimated (regularized) parameters.
    c                 &    t         |   ||       y )N)super__init__)selfr   r   r@   s      r   r   zRegularizedResults.__init__q  s    'r   c                 L    | j                   j                  | j                        S )zR
        The predicted values from the model at the estimated parameters.
        )r   predictr   )r   s    r   fittedvalueszRegularizedResults.fittedvaluest  s    
 zz!!$++..r   )__name__
__module____qualname____doc__r   r   r   __classcell__)r@   s   @r   rE   rE   f  s!    	( / /r   rE   c                       e Zd ZddddZeZy)rG   columnsrows)r   residr   N)r   r   r   _attrs_wrap_attrsr%   r   r   rG   rG   |  s    F
 Kr   rG   )coord_descentd   r.   r1   NgHz>g:0yE>FTNNN)T)numpyr
   statsmodels.base.modelr   statsmodels.base.wrapperbasewrapperrO   statsmodels.tools.decoratorsr   r&   rr   rB   rE   rP   rG   populate_wrapperr%   r   r   <module>r      sv     * ' ' 7.&B ;>DH:>AEOf Sl/ /, 3 3    /(*r   