Python sklearn.isotonic.IsotonicRegression() Examples
The following are 9
code examples of sklearn.isotonic.IsotonicRegression().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.isotonic
, or try the search function
.
Example #1
Source File: test_isotonic.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_IsotonicRegression(self): # disable at this moment return """ data = np.abs(np.random.randn(100)) data = data.cumsum() df = pdml.ModelFrame(np.arange(len(data)), target=data) mod1 = df.isotonic.IsotonicRegression() mod2 = isotonic.IsotonicRegression() # df.fit(mod1) # mod2.fit(iris.data) # result = df.predict(mod1) # expected = mod2.predict(iris.data) # self.assertIsInstance(result, pdml.ModelSeries) # self.assert_numpy_array_almost_equal(result.values, expected) """
Example #2
Source File: Variogram.py From scikit-gstat with MIT License | 5 votes |
def _build_harmonized_model(self): x = self.bins y = self.experimental _x = x[~np.isnan(y)] _y = y[~np.isnan(y)] regr = IsotonicRegression(increasing=True).fit(_x, _y) # create the model function def harmonize(x): """Monotonized Variogram Return the isotonic harmonized experimental variogram. This means, the experimental variogram is monotonic after harmonization. The harmonization is done using following Hinterding (2003) using the PAVA algorithm (Barlow and Bartholomew, 1972). Returns ------- gamma : numpy.ndarray monotonized experimental variogram References ---------- Barlow, R., D. Bartholomew, et al. (1972): Statistical Interference Under Order Restrictions. John Wiley and Sons, New York. Hiterding, A. (2003): Entwicklung hybrider Interpolationsverfahren für den automatisierten Betrieb am Beispiel meteorologischer Größen. Dissertation, Institut für Geoinformatik, Westphälische Wilhelms-Universität Münster, IfGIprints, Münster. ISBN: 3-936616-12-4 """ if isinstance(x, (list, tuple, np.ndarray)): return regr.transform(x) else: return regr.transform([x]) return harmonize
Example #3
Source File: test_isotonic.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_objectmapper(self): df = pdml.ModelFrame([]) self.assertIs(df.isotonic.IsotonicRegression, isotonic.IsotonicRegression)
Example #4
Source File: classification.py From metropolis-hastings-gans with Apache License 2.0 | 5 votes |
def __init__(self): self.clf = IsotonicRegression(y_min=0.0, y_max=1.0, out_of_bounds='clip')
Example #5
Source File: after_treat.py From CO2MPAS-TA with European Union Public License 1.1 | 5 votes |
def calibrate_after_treatment_speed_model( times, after_treatment_warm_up_phases, after_treatment_speeds_delta, is_hybrid=False): """ Calibrates the engine after treatment speed model. :param times: Time vector [s]. :type times: numpy.array :param after_treatment_warm_up_phases: Phases when engine speed is affected by the after treatment warm up [-]. :type after_treatment_warm_up_phases: numpy.array :param after_treatment_speeds_delta: Engine speed delta due to the after treatment warm up [RPM]. :type after_treatment_speeds_delta: numpy.array :param is_hybrid: Is the vehicle hybrid? :type is_hybrid: bool :return: After treatment speed model. :rtype: function """ if after_treatment_warm_up_phases.any(): from sklearn.isotonic import IsotonicRegression x, y, model = [], [], IsotonicRegression(increasing=False) for i, j in co2_utl.index_phases(after_treatment_warm_up_phases): x.extend(times[i:j + 1] - (times[i] if is_hybrid else 0.0)) y.extend(after_treatment_speeds_delta[i:j + 1]) # noinspection PyUnresolvedReferences return model.fit(x, y).predict
Example #6
Source File: after_treat.py From CO2MPAS-TA with European Union Public License 1.1 | 5 votes |
def calibrate_after_treatment_power_model( times, after_treatment_warm_up_phases, engine_powers_out, is_hybrid=False): """ Calibrates the engine after treatment speed model. :param times: Time vector [s]. :type times: numpy.array :param after_treatment_warm_up_phases: Phases when engine speed is affected by the after treatment warm up [-]. :type after_treatment_warm_up_phases: numpy.array :param engine_powers_out: Engine power vector [kW]. :type engine_powers_out: numpy.array :param is_hybrid: Is the vehicle hybrid? :type is_hybrid: bool :return: After treatment speed model. :rtype: function """ if after_treatment_warm_up_phases.any(): from sklearn.isotonic import IsotonicRegression x, y = [], [] for i, j in co2_utl.index_phases(after_treatment_warm_up_phases): t = times[i:j + 1] - (times[i] if is_hybrid else 0.0) x.extend(t) y.extend(co2_utl.median_filter(t, engine_powers_out[i:j + 1], 4)) # noinspection PyUnresolvedReferences return IsotonicRegression().fit(x, np.maximum(0, y)).predict
Example #7
Source File: gspv.py From CO2MPAS-TA with European Union Public License 1.1 | 5 votes |
def _gspv_interpolate_cloud(powers, velocities): from sklearn.isotonic import IsotonicRegression from scipy.interpolate import InterpolatedUnivariateSpline regressor = IsotonicRegression() regressor.fit(powers, velocities) x = np.linspace(min(powers), max(powers)) y = regressor.predict(x) return InterpolatedUnivariateSpline(x, y, k=1, ext=3) # noinspection PyMissingOrEmptyDocstring,PyPep8Naming
Example #8
Source File: calibration.py From carl with BSD 3-Clause "New" or "Revised" License | 4 votes |
def fit(self, T, y, sample_weight=None): """Fit using `T`, `y` as training data. Parameters ---------- * `T` [array-like, shape=(n_samples,)]: Training data. * `y` [array-like, shape=(n_samples,)]: Training target. * `sample_weight` [array-like, shape=(n_samples,), optional]: Weights. If set to None, all weights will be set to 1. Returns ------- * `self` [object]: `self`. Notes ----- `T` is stored for future use, as `predict` needs T to interpolate new input data. """ # Check input T = column_or_1d(T) # Fit isotonic regression self.ir_ = IsotonicRegression(y_min=self.y_min, y_max=self.y_max, increasing=self.increasing, out_of_bounds="clip") self.ir_.fit(T, y, sample_weight=sample_weight) # Interpolators if self.interpolation: p = self.ir_.transform(T) change_mask1 = (p - np.roll(p, 1)) > 0 change_mask2 = np.roll(change_mask1, -1) change_mask1[0] = True change_mask1[-1] = True change_mask2[0] = True change_mask2[-1] = True self.interp1_ = interp1d(T[change_mask1], p[change_mask1], bounds_error=False, fill_value=(0., 1.)) self.interp2_ = interp1d(T[change_mask2], p[change_mask2], bounds_error=False, fill_value=(0., 1.)) return self
Example #9
Source File: calibration.py From fklearn with Apache License 2.0 | 4 votes |
def isotonic_calibration_learner(df: pd.DataFrame, target_column: str = "target", prediction_column: str = "prediction", output_column: str = "calibrated_prediction", y_min: float = 0.0, y_max: float = 1.0) -> LearnerReturnType: """ Fits a single feature isotonic regression to the dataset. Parameters ---------- df : pandas.DataFrame A Pandas' DataFrame with features and target columns. The model will be trained to predict the target column from the features. target_column : str The name of the column in `df` that should be used as target for the model. This column should be binary, since this is a classification model. prediction_column : str The name of the column with the uncalibrated predictions from the model. output_column : str The name of the column with the calibrated predictions from the model. y_min: float Lower bound of Isotonic Regression y_max: float Upper bound of Isotonic Regression """ clf = IsotonicRegression(y_min=y_min, y_max=y_max, out_of_bounds='clip') clf.fit(df[prediction_column], df[target_column]) def p(new_df: pd.DataFrame) -> pd.DataFrame: return new_df.assign(**{output_column: clf.predict(new_df[prediction_column])}) p.__doc__ = learner_pred_fn_docstring("isotonic_calibration_learner") log = {'isotonic_calibration_learner': { 'output_column': output_column, 'target_column': target_column, 'prediction_column': prediction_column, 'package': "sklearn", 'package_version': sklearn.__version__, 'training_samples': len(df)}, 'object': clf} return p, p(df), log