Python xgboost.__version__() Examples

The following are 4 code examples of xgboost.__version__(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module xgboost , or try the search function .
Example #1
Source File: utils.py    From onnxconverter-common with MIT License 6 votes vote down vote up
def xgboost_installed():
    """
    Checks that *xgboost* is available.
    """
    try:
        import xgboost  # noqa F401
    except ImportError:
        return False
    from xgboost.core import _LIB
    try:
        _LIB.XGBoosterDumpModelEx
    except AttributeError:
        # The version is not recent enough even though it is version 0.6.
        # You need to install xgboost from github and not from pypi.
        return False
    from xgboost import __version__
    vers = LooseVersion(__version__)
    allowed = LooseVersion('0.7')
    if vers < allowed:
        warnings.warn('The converter works for xgboost >= 0.7. Earlier versions might not.')
    return True 
Example #2
Source File: model_wrapper.py    From AMPL with MIT License 5 votes vote down vote up
def create_model_wrapper(params, featurizer, ds_client=None):
    """Factory function for creating Model objects of the correct subclass for params.model_type.

    Args:
        params (Namespace) : Parameters passed to the model pipeline
        featurizer (Featurization): Object managing the featurization of compounds
        ds_client (DatastoreClient): Interface to the file datastore

    Returns:
        model (pipeline.Model): Wrapper for DeepChem, sklearn or other model.

    Raises:
        ValueError: Only params.model_type = 'NN', 'RF' or 'xgboost' is supported.
    """
    if params.model_type == 'NN':
        return DCNNModelWrapper(params, featurizer, ds_client)
    elif params.model_type == 'RF':
        return DCRFModelWrapper(params, featurizer, ds_client)
    elif params.model_type == 'xgboost':
        if not xgboost_supported:
            raise Exception("Unable to import xgboost. \
                             xgboost package needs to be installed to use xgboost model. \
                             Installatin: \
                             from pip: pip3 install xgboost.\
                             livermore compute (lc): /usr/mic/bio/anaconda3/bin/pip install xgboost --user \
                             twintron-blue (TTB): /opt/conda/bin/pip install xgboost --user/ \ "
                            )
        elif float(xgb.__version__) < 0.9:
            raise Exception(f"xgboost required to be >= 0.9 for GPU support. \
                             current version = {float(xgb.__version__)} \
                             installatin: \
                             from pip: pip3 install --upgrade xgboost \
                             livermore compute (lc): /usr/mic/bio/anaconda3/bin/pip install --upgrade xgboost --user \
                             twintron-blue (TTB): /opt/conda/bin/pip install --upgrade xgboost --user/ "
                            )
        else:
            return DCxgboostModelWrapper(params, featurizer, ds_client)
    else:
        raise ValueError("Unknown model_type %s" % params.model_type)

# **************************************************************************************** 
Example #3
Source File: xgboost.py    From mljar-supervised with MIT License 5 votes vote down vote up
def __init__(self, params):
        super(XgbAlgorithm, self).__init__(params)
        self.library_version = xgb.__version__

        self.explain_level = params.get("explain_level", 0)
        self.boosting_rounds = additional.get("max_rounds", 10000)
        self.max_iters = 1
        self.early_stopping_rounds = additional.get("early_stopping_rounds", 50)

        self.learner_params = {
            "tree_method": "hist",
            "booster": "gbtree",
            "objective": self.params.get("objective"),
            "eval_metric": self.params.get("eval_metric"),
            "eta": self.params.get("eta", 0.01),
            "max_depth": self.params.get("max_depth", 1),
            "min_child_weight": self.params.get("min_child_weight", 1),
            "subsample": self.params.get("subsample", 0.8),
            "colsample_bytree": self.params.get("colsample_bytree", 0.8),
            "silent": self.params.get("silent", 1),
            "seed": self.params.get("seed", 1),
        }

        # check https://github.com/dmlc/xgboost/issues/5637
        if self.learner_params["seed"] > 2147483647:
            self.learner_params["seed"] = self.learner_params["seed"] % 2147483647
        if "num_class" in self.params:  # multiclass classification
            self.learner_params["num_class"] = self.params.get("num_class")

        self.best_ntree_limit = 0
        logger.debug("XgbLearner __init__") 
Example #4
Source File: xgboost.py    From mlflow with Apache License 2.0 5 votes vote down vote up
def get_default_conda_env():
    """
    :return: The default Conda environment for MLflow Models produced by calls to
             :func:`save_model()` and :func:`log_model()`.
    """
    import xgboost as xgb

    return _mlflow_conda_env(
        additional_conda_deps=None,
        # XGBoost is not yet available via the default conda channels, so we install it via pip
        additional_pip_deps=[
            "xgboost=={}".format(xgb.__version__),
        ],
        additional_conda_channels=None)