Python xgboost.XGBModel() Examples

The following are 6 code examples of xgboost.XGBModel(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module xgboost , or try the search function .
Example #1
Source File: automl.py    From Kaggler with MIT License 7 votes vote down vote up
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        def objective(hyperparams):
            model = XGBModel(n_estimators=self.n_est, **self.params, **hyperparams)
            model.fit(X=X_trn, y=y_trn,
                      eval_set=[(X_val, y_val)],
                      eval_metric=self.metric,
                      early_stopping_rounds=self.n_stop,
                      verbose=False)
            score = model.evals_result()['validation_0'][self.metric][model.best_iteration] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
Example #2
Source File: automl.py    From Kaggler with MIT License 5 votes vote down vote up
def fit(self, X, y):
        self.model = XGBModel(n_estimators=self.n_best, **self.params)
        self.model.fit(X=X[self.features], y=y, eval_metric='mae', verbose=False)
        return self 
Example #3
Source File: base.py    From pandas-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def plot_importance(self, ax=None, height=0.2,
                        xlim=None, title='Feature importance',
                        xlabel='F score', ylabel='Features',
                        grid=True, **kwargs):

        """Plot importance based on fitted trees.

        Parameters
        ----------
        ax : matplotlib Axes, default None
            Target axes instance. If None, new figure and axes will be created.
        height : float, default 0.2
            Bar height, passed to ax.barh()
        xlim : tuple, default None
            Tuple passed to axes.xlim()
        title : str, default "Feature importance"
            Axes title. To disable, pass None.
        xlabel : str, default "F score"
            X axis title label. To disable, pass None.
        ylabel : str, default "Features"
            Y axis title label. To disable, pass None.
        kwargs :
            Other keywords passed to ax.barh()

        Returns
        -------
        ax : matplotlib Axes
        """

        import xgboost as xgb

        if not isinstance(self._df.estimator, xgb.XGBModel):
            raise ValueError('estimator must be XGBRegressor or XGBClassifier')
        # print(type(self._df.estimator.booster), self._df.estimator.booster)
        return xgb.plot_importance(self._df.estimator,
                                   ax=ax, height=height, xlim=xlim, title=title,
                                   xlabel=xlabel, ylabel=ylabel, grid=True, **kwargs) 
Example #4
Source File: base.py    From pandas-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def to_graphviz(self, num_trees=0, rankdir='UT',
                    yes_color='#0000FF', no_color='#FF0000', **kwargs):

        """Convert specified tree to graphviz instance. IPython can automatically plot the
        returned graphiz instance. Otherwise, you shoud call .render() method
        of the returned graphiz instance.

        Parameters
        ----------
        num_trees : int, default 0
            Specify the ordinal number of target tree
        rankdir : str, default "UT"
            Passed to graphiz via graph_attr
        yes_color : str, default '#0000FF'
            Edge color when meets the node condigion.
        no_color : str, default '#FF0000'
            Edge color when doesn't meet the node condigion.
        kwargs :
            Other keywords passed to graphviz graph_attr

        Returns
        -------
        ax : matplotlib Axes
        """

        import xgboost as xgb

        if not isinstance(self._df.estimator, xgb.XGBModel):
            raise ValueError('estimator must be XGBRegressor or XGBClassifier')
        return xgb.to_graphviz(self._df.estimator,
                               num_trees=num_trees, rankdir=rankdir,
                               yes_color=yes_color, no_color=no_color, **kwargs) 
Example #5
Source File: base.py    From pandas-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def plot_tree(self, num_trees=0, rankdir='UT', ax=None, **kwargs):

        """Plot specified tree.

        Parameters
        ----------
        booster : Booster, XGBModel
            Booster or XGBModel instance
        num_trees : int, default 0
            Specify the ordinal number of target tree
        rankdir : str, default "UT"
            Passed to graphiz via graph_attr
        ax : matplotlib Axes, default None
            Target axes instance. If None, new figure and axes will be created.
        kwargs :
            Other keywords passed to to_graphviz

        Returns
        -------
        ax : matplotlib Axes

        """

        import xgboost as xgb

        if not isinstance(self._df.estimator, xgb.XGBModel):
            raise ValueError('estimator must be XGBRegressor or XGBClassifier')
        return xgb.plot_tree(self._df.estimator,
                             num_trees=num_trees, rankdir=rankdir, **kwargs) 
Example #6
Source File: xgboost_migration.py    From h2o4gpu with Apache License 2.0 5 votes vote down vote up
def load_pkl(name):
    """Load xgboost model from pickle and perform conversion from version
    0.90 if necessary.

    :return:
        XGBoost model
    """
    import pickle
    import xgboost
    with open(name, 'rb') as f:
        try:
            model = pickle.load(f)
            return model
        except xgboost.core.XGBoostError as e:
            if "Check failed: header == serialisation_header_" in str(e):
                # pylint: disable=unused-import
                import xgboost_prev
                import tempfile

                class Unpickler(pickle.Unpickler):
                    def find_class(self, module, name):
                        if module.startswith("xgboost"):
                            return pickle.Unpickler.find_class(
                                self, module.replace(
                                    "xgboost", "xgboost_prev"),
                                name)
                        return pickle.Unpickler.find_class(self, module, name)
                f.seek(0)
                model = Unpickler(f).load()
                temp_file = tempfile.NamedTemporaryFile(
                    prefix='xgboost_migration', suffix='.model')
                model.save_model(temp_file.name)
                migrated_model = xgboost.XGBModel()
                migrated_model.load_model(temp_file.name)
                return migrated_model
            raise