TeamHG-Memex/eli5 · xgboost.py
python logo
def _check_booster_args(xgb, is_regression=None):
    # type: (Any, Optional[bool]) -> Tuple[Booster, Optional[bool]]
    if isinstance(xgb, Booster):
        booster = xgb
    else:
        if hasattr(xgb, 'get_booster'):
            booster = xgb.get_booster()
        else:  # xgb < 0.7
            booster = xgb.booster()
        _is_regression = isinstance(xgb, XGBRegressor)
        if is_regression is not None and is_regression != _is_regression:
            raise ValueError(
                'Inconsistent is_regression={} passed. '
                'You don\'t have to pass it when using scikit-learn API'
                .format(is_regression))
        is_regression = _is_regression
    return booster, is_regression
Similar code snippets
1.
tmoerman/arboreto · core.py
Match rating: 50.43% · See similar code snippets
python logo
def fit_model(regressor_type,
              regressor_kwargs,
              tf_matrix,
              target_gene_expression,
              early_stop_window_length=EARLY_STOP_WINDOW_LENGTH,
              seed=DEMON_SEED):
    """
    :param regressor_type: string. Case insensitive.
    :param regressor_kwargs: a dictionary of key-value pairs that configures the regressor.
    :param tf_matrix: the predictor matrix (transcription factor matrix) as a numpy array.
    :param target_gene_expression: the target (y) gene expression to predict in function of the tf_matrix (X).
    :param early_stop_window_length: window length of the early stopping monitor.
    :param seed: (optional) random seed for the regressors.
    :return: a trained regression model.
    """
    regressor_type = regressor_type.upper()

    assert tf_matrix.shape[0] == len(target_gene_expression)

    def do_sklearn_regression():
        regressor = SKLEARN_REGRESSOR_FACTORY[regressor_type](random_state=seed, **regressor_kwargs)

        with_early_stopping = is_oob_heuristic_supported(regressor_type, regressor_kwargs)

        if with_early_stopping:
            regressor.fit(tf_matrix, target_gene_expression, monitor=EarlyStopMonitor(early_stop_window_length))
        else:
            regressor.fit(tf_matrix, target_gene_expression)

        return regressor

    if is_sklearn_regressor(regressor_type):
        return do_sklearn_regression()
    # elif is_xgboost_regressor(regressor_type):
    #     raise ValueError('XGB regressor not yet supported')
    else:
        raise ValueError('Unsupported regressor type: {0}'.format(regressor_type))
2.
bsolomon1124/pyfinance · general.py
Match rating: 48.26% · See similar code snippets
python logo
def variance_inflation_factor(regressors, hasconst=False):
    """Calculate variance inflation factor (VIF) for each all `regressors`.

    A wrapper/modification of statsmodels:
    statsmodels.stats.outliers_influence.variance_inflation_factor

    One recommendation is that if VIF is greater than 5, then the explanatory
    variable `x` is highly collinear with the other explanatory
    variables, and the parameter estimates will have large standard errors
    because of this. [source: StatsModels]

    Parameters
    ----------
    regressors: DataFrame
        DataFrame containing the entire set of regressors
    hasconst : bool, default False
        If False, a column vector will be added to `regressors` for use in
        OLS

    Example
    -------
    # Generate some data
    from datetime import date
    from pandas_datareader.data import DataReader as dr

    syms = {'TWEXBMTH' : 'usd',
            'T10Y2YM' : 'term_spread',
            'PCOPPUSDM' : 'copper'
           }
    start = date(2000, 1, 1)
    data = (dr(syms.keys(), 'fred', start)
            .pct_change()
            .dropna())
    data = data.rename(columns = syms)

    print(variance_inflation_factor(data))
    usd            1.31609
    term_spread    1.03793
    copper         1.37055
    dtype: float64
    """

    if not hasconst:
        regressors = add_constant(regressors, prepend=False)
    k = regressors.shape[1]

    def vif_sub(x, regressors):
        x_i = regressors.iloc[:, x]
        mask = np.arange(k) != x
        x_not_i = regressors.iloc[:, mask]
        rsq = linear_model.OLS(x_i, x_not_i, missing="drop").fit().rsquared_adj
        vif = 1.0 / (1.0 - rsq)
        return vif

    vifs = pd.Series(np.arange(k), index=regressors.columns)
    vifs = vifs.apply(vif_sub, args=(regressors,))

    # Find the constant column (probably called 'const', but not necessarily
    # and drop it. `is_nonzero_const` borrowed from statsmodels.add_constant
    is_nonzero_const = np.ptp(regressors.values, axis=0) == 0
    is_nonzero_const &= np.all(regressors != 0.0, axis=0)
    vifs.drop(vifs.index[is_nonzero_const], inplace=True)
    return vifs
3.
dmlc/xgboost · sklearn.py
Match rating: 47.73% · See similar code snippets
python logo
def __setstate__(self, state):
        # backward compatibility code
        # load booster from raw if it is raw
        # the booster now support pickle
        bst = state["_Booster"]
        if bst is not None and not isinstance(bst, Booster):
            state["_Booster"] = Booster(model_file=bst)
        self.__dict__.update(state)
4.
mnubo/kubernetes-py · LoadBalancerStatus.py
Match rating: 47.1% · See similar code snippets
python logo
def ingress(self, ingress=None):
        if not is_valid_list(ingress, LoadBalancerIngress):
            raise SyntaxError('LoadBalancerStatus: ingress: [ {0} ] is invalid.'.format(ingress))
        self._ingress = ingress
5.
wdm0006/sklearn-extensions · elm.py
Match rating: 47.0% · See similar code snippets
python logo
def fit(self, X, y):
        """
        Fit the model using X, y as training data.

        Parameters
        ----------
        X : {array-like, sparse matrix} of shape [n_samples, n_features]
            Training vectors, where n_samples is the number of samples
            and n_features is the number of features.

        y : array-like of shape [n_samples, n_outputs]
            Target values (class labels in classification, real numbers in
            regression)

        Returns
        -------
        self : object

            Returns an instance of self.
        """
        rhl = self._create_random_layer()
        self._genelm_regressor = GenELMRegressor(hidden_layer=rhl,
                                                 regressor=self.regressor)
        self._genelm_regressor.fit(X, y)
        return self
6.
mattjj/pyslds · util.py
Match rating: 46.44% · See similar code snippets
python logo
def diag_regression_logprior(regression):
    from scipy.stats import multivariate_normal, gamma
    A = regression.A
    sigmasq = regression.sigmasq_flat
    J, h, alpha, beta = \
        regression.J_0, regression.h_0, regression.alpha_0, regression.beta_0
    Sigma = np.linalg.inv(J)
    mu = Sigma.dot(h)

    lp = 0
    for d in range(regression.D_out):
        lp += multivariate_normal(mu, Sigma).logpdf(A[d])
        lp += gamma(alpha, scale=1./beta).logpdf(1. / sigmasq[d])
    return lp
7.
google/mobly · android_device.py
Match rating: 46.12% · See similar code snippets
python logo
def reboot(self):
        """Reboots the device.

        Generally one should use this method to reboot the device instead of
        directly calling `adb.reboot`. Because this method gracefully handles
        the teardown and restoration of running services.

        This method is blocking and only returns when the reboot has completed
        and the services restored.

        Raises:
            Error: Waiting for completion timed out.
        """
        if self.is_bootloader:
            self.fastboot.reboot()
            return
        with self.handle_reboot():
            self.adb.reboot()
8.
nucypher/constantSorrow · constants.py
Match rating: 46.08% · See similar code snippets
python logo
def bool_value(self, bool_value):
        if self.__repr_content is not None:
            if bool(self) is not bool(bool_value):
                raise ValueError("Based on the set representation, {} was previously {}; can't change to {}.".format(
                    self.__name,
                    bool(self),
                    bool(bool_value)))

        if self.__bool_repr is not None:
            if bool(self) is not bool(bool_value):
                raise ValueError("The specified bool value for {} was previously {}; can't change to {}.".format(
                    self.__name,
                    bool(self),
                    bool(
                        bool_value)))

        self.__bool_repr = bool(bool_value)
        return self
9.
espressif/esptool · esptool.py
Match rating: 44.79% · See similar code snippets
python logo
def flash_defl_finish(self, reboot=False):
        if not reboot and not self.IS_STUB:
            # skip sending flash_finish to ROM loader, as this
            # exits the bootloader. Stub doesn't do this.
            return
        pkt = struct.pack('<I', int(not reboot))
        self.check_command("leave compressed flash mode", self.ESP_FLASH_DEFL_END, pkt)
        self.in_bootloader = False
10.
Erotemic/ubelt · util_format.py
Match rating: 44.62% · See similar code snippets
python logo
def _rectify_countdown_or_bool(count_or_bool):
    """
    used by recursive functions to specify which level to turn a bool on in
    counting down yields True, True, ..., False
    counting up yields False, False, False, ... True

    Args:
        count_or_bool (bool or int): if positive and an integer, it will count
            down, otherwise it will remain the same.

    Returns:
        int or bool: count_or_bool_

    CommandLine:
        python -m utool.util_str --test-_rectify_countdown_or_bool

    Example:
        >>> from ubelt.util_format import _rectify_countdown_or_bool  # NOQA
        >>> count_or_bool = True
        >>> a1 = (_rectify_countdown_or_bool(2))
        >>> a2 = (_rectify_countdown_or_bool(1))
        >>> a3 = (_rectify_countdown_or_bool(0))
        >>> a4 = (_rectify_countdown_or_bool(-1))
        >>> a5 = (_rectify_countdown_or_bool(-2))
        >>> a6 = (_rectify_countdown_or_bool(True))
        >>> a7 = (_rectify_countdown_or_bool(False))
        >>> a8 = (_rectify_countdown_or_bool(None))
        >>> result = [a1, a2, a3, a4, a5, a6, a7, a8]
        >>> print(result)
        [1, 0, 0, -1, -2, True, False, False]
    """
    if count_or_bool is True or count_or_bool is False:
        count_or_bool_ = count_or_bool
    elif isinstance(count_or_bool, int):
        if count_or_bool == 0:
            return 0
        elif count_or_bool > 0:
            count_or_bool_ = count_or_bool - 1
        else:
            # We dont countup negatives anymore
            count_or_bool_ = count_or_bool
    else:
        count_or_bool_ = False
    return count_or_bool_