Python xgboost.plot_importance() Examples
The following are 6
code examples of xgboost.plot_importance().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
xgboost
, or try the search function
.
Example #1
Source File: xgb_utils.py From kaggle-HomeDepot with MIT License | 6 votes |
def plot_importance(self): ax = xgb.plot_importance(self.model) self.save_topn_features() return ax
Example #2
Source File: xgb_utils.py From kaggle-HomeDepot with MIT License | 5 votes |
def save_topn_features(self, fname="XGBRegressor_topn_features.txt", topn=-1): ax = xgb.plot_importance(self.model) yticklabels = ax.get_yticklabels()[::-1] if topn == -1: topn = len(yticklabels) else: topn = min(topn, len(yticklabels)) with open(fname, "w") as f: for i in range(topn): f.write("%s\n"%yticklabels[i].get_text())
Example #3
Source File: xgb_utils.py From kaggle-HomeDepot with MIT License | 5 votes |
def plot_importance(self): ax = xgb.plot_importance(self.model) self.save_topn_features() return ax
Example #4
Source File: xgb_utils.py From kaggle-HomeDepot with MIT License | 5 votes |
def save_topn_features(self, fname="XGBClassifier_topn_features.txt", topn=10): ax = xgb.plot_importance(self.model) yticklabels = ax.get_yticklabels()[::-1] if topn == -1: topn = len(yticklabels) else: topn = min(topn, len(yticklabels)) with open(fname, "w") as f: for i in range(topn): f.write("%s\n"%yticklabels[i].get_text())
Example #5
Source File: xgbbasemodel.py From Supply-demand-forecasting with MIT License | 5 votes |
def run_train_validation(self): x_train, y_train,x_validation,y_validation = self.get_train_validationset() dtrain = xgb.DMatrix(x_train, label= y_train,feature_names=x_train.columns) dvalidation = xgb.DMatrix(x_validation, label= y_validation,feature_names=x_validation.columns) self.set_xgb_parameters() evals=[(dtrain,'train'),(dvalidation,'eval')] model = xgb.train(self.xgb_params, dtrain, evals=evals, **self.xgb_learning_params) xgb.plot_importance(model) plt.show() print "features used:\n {}".format(self.get_used_features()) return
Example #6
Source File: base.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 5 votes |
def plot_importance(self, ax=None, height=0.2, xlim=None, title='Feature importance', xlabel='F score', ylabel='Features', grid=True, **kwargs): """Plot importance based on fitted trees. Parameters ---------- ax : matplotlib Axes, default None Target axes instance. If None, new figure and axes will be created. height : float, default 0.2 Bar height, passed to ax.barh() xlim : tuple, default None Tuple passed to axes.xlim() title : str, default "Feature importance" Axes title. To disable, pass None. xlabel : str, default "F score" X axis title label. To disable, pass None. ylabel : str, default "Features" Y axis title label. To disable, pass None. kwargs : Other keywords passed to ax.barh() Returns ------- ax : matplotlib Axes """ import xgboost as xgb if not isinstance(self._df.estimator, xgb.XGBModel): raise ValueError('estimator must be XGBRegressor or XGBClassifier') # print(type(self._df.estimator.booster), self._df.estimator.booster) return xgb.plot_importance(self._df.estimator, ax=ax, height=height, xlim=xlim, title=title, xlabel=xlabel, ylabel=ylabel, grid=True, **kwargs)