Python sklearn.base.is_regressor() Examples
The following are 5
code examples of sklearn.base.is_regressor().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.base
, or try the search function
.
Example #1
Source File: engine.py From hyperparameter_hunter with MIT License | 6 votes |
def base_estimator(self, value): # Build `base_estimator` if string given if isinstance(value, str): value = cook_estimator( value, space=self.space, random_state=self.rng.randint(0, np.iinfo(np.int32).max) ) # Check if regressor if not is_regressor(value) and value is not None: raise ValueError(f"`base_estimator` must be a regressor. Got {value}") # Treat per second acquisition function specially is_multi_regressor = isinstance(value, MultiOutputRegressor) if self.acq_func.endswith("ps") and not is_multi_regressor: value = MultiOutputRegressor(value) self._base_estimator = value
Example #2
Source File: __init__.py From oddt with BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, models): """Proxy class to build an ensemble of models with an API as one Parameters ---------- models: array An array of models """ self._models = models if len(models) else None if self._models is not None: if is_classifier(self._models[0]): check_type = is_classifier self._scoring_fun = accuracy_score elif is_regressor(self._models[0]): check_type = is_regressor self._scoring_fun = r2_score else: raise ValueError('Expected regressors or classifiers,' ' got %s instead' % type(self._models[0])) for model in self._models: if not check_type(model): raise ValueError('Different types of models found, privide' ' either regressors or classifiers.')
Example #3
Source File: _stack.py From sktime with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _check_final_regressor(self): if not is_regressor(self.final_regressor): raise ValueError(f"`final_regressor` should be a regressor, " f"but found: {self.final_regressor}")
Example #4
Source File: sklearn_patches.py From tslearn with BSD 2-Clause "Simplified" License | 4 votes |
def check_fit_idempotent(name, estimator_orig): # Check that est.fit(X) is the same as est.fit(X).fit(X). Ideally we would # check that the estimated parameters during training (e.g. coefs_) are # the same, but having a universal comparison function for those # attributes is difficult and full of edge cases. So instead we check that # predict(), predict_proba(), decision_function() and transform() return # the same results. check_methods = ["predict", "transform", "decision_function", "predict_proba"] rng = np.random.RandomState(0) if estimator_orig._get_tags()['non_deterministic']: msg = name + ' is non deterministic' raise SkipTest(msg) estimator = clone(estimator_orig) set_random_state(estimator) if 'warm_start' in estimator.get_params().keys(): estimator.set_params(warm_start=False) n_samples = 100 X, _ = _create_small_ts_dataset() X = X.reshape((X.shape[0], X.shape[1])) X = pairwise_estimator_convert_X(X, estimator) if is_regressor(estimator_orig): y = rng.normal(size=n_samples) else: y = rng.randint(low=0, high=2, size=n_samples) train, test = next(ShuffleSplit(test_size=.2, random_state=rng).split(X)) X_train, y_train = _safe_split(estimator, X, y, train) X_test, y_test = _safe_split(estimator, X, y, test, train) # Fit for the first time estimator.fit(X_train, y_train) result = {method: getattr(estimator, method)(X_test) for method in check_methods if hasattr(estimator, method)} # Fit again set_random_state(estimator) estimator.fit(X_train, y_train) for method in check_methods: if hasattr(estimator, method): new_result = getattr(estimator, method)(X_test) if np.issubdtype(new_result.dtype, np.floating): tol = 2*np.finfo(new_result.dtype).eps else: tol = 2*np.finfo(np.float64).eps assert_allclose_dense_sparse( result[method], new_result, atol=max(tol, 1e-9), rtol=max(tol, 1e-7), err_msg="Idempotency check failed for method {}".format(method) )
Example #5
Source File: sklearn_patches.py From tslearn with BSD 2-Clause "Simplified" License | 4 votes |
def yield_all_checks(name, estimator): tags = estimator._get_tags() if "2darray" not in tags["X_types"]: warnings.warn("Can't test estimator {} which requires input " " of type {}".format(name, tags["X_types"]), SkipTestWarning) return if tags["_skip_test"]: warnings.warn("Explicit SKIP via _skip_test tag for estimator " "{}.".format(name), SkipTestWarning) return yield from _yield_checks(name, estimator) if is_classifier(estimator): yield from _yield_classifier_checks(name, estimator) if is_regressor(estimator): yield from _yield_regressor_checks(name, estimator) if hasattr(estimator, 'transform'): if not tags["allow_variable_length"]: # Transformer tests ensure that shapes are the same at fit and # transform time, hence we need to skip them for estimators that # allow variable-length inputs yield from _yield_transformer_checks(name, estimator) if isinstance(estimator, ClusterMixin): yield from _yield_clustering_checks(name, estimator) if is_outlier_detector(estimator): yield from _yield_outliers_checks(name, estimator) # We are not strict on presence/absence of the 3rd dimension # yield check_fit2d_predict1d if not tags["non_deterministic"]: yield check_methods_subset_invariance yield check_fit2d_1sample yield check_fit2d_1feature yield check_fit1d yield check_get_params_invariance yield check_set_params yield check_dict_unchanged yield check_dont_overwrite_parameters yield check_fit_idempotent if (is_classifier(estimator) or is_regressor(estimator) or isinstance(estimator, ClusterMixin)): if tags["allow_variable_length"]: yield check_different_length_fit_predict_transform