Python sklearn.gaussian_process.kernels.RBF Examples
The following are 14
code examples of sklearn.gaussian_process.kernels.RBF().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.gaussian_process.kernels
, or try the search function
.
Example #1
Source File: lib_classifier.py From Realtime-Action-Recognition with MIT License | 7 votes |
def _init_all_models(self): self.names = ["Nearest Neighbors", "Linear SVM", "RBF SVM", "Gaussian Process", "Decision Tree", "Random Forest", "Neural Net", "AdaBoost", "Naive Bayes", "QDA"] self.model_name = None self.classifiers = [ KNeighborsClassifier(5), SVC(kernel="linear", C=10.0), SVC(gamma=0.01, C=1.0, verbose=True), GaussianProcessClassifier(1.0 * RBF(1.0)), DecisionTreeClassifier(max_depth=5), RandomForestClassifier( max_depth=30, n_estimators=100, max_features="auto"), MLPClassifier((20, 30, 40)), # Neural Net AdaBoostClassifier(), GaussianNB(), QuadraticDiscriminantAnalysis()]
Example #2
Source File: test_gpc.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def test_random_starts(): # Test that an increasing number of random-starts of GP fitting only # increases the log marginal likelihood of the chosen theta. n_samples, n_features = 25, 2 rng = np.random.RandomState(0) X = rng.randn(n_samples, n_features) * 2 - 1 y = (np.sin(X).sum(axis=1) + np.sin(3 * X).sum(axis=1)) > 0 kernel = C(1.0, (1e-2, 1e2)) \ * RBF(length_scale=[1e-3] * n_features, length_scale_bounds=[(1e-4, 1e+2)] * n_features) last_lml = -np.inf for n_restarts_optimizer in range(5): gp = GaussianProcessClassifier( kernel=kernel, n_restarts_optimizer=n_restarts_optimizer, random_state=0).fit(X, y) lml = gp.log_marginal_likelihood(gp.kernel_.theta) assert_greater(lml, last_lml - np.finfo(np.float32).eps) last_lml = lml
Example #3
Source File: gaussianproc.py From pyFTS with GNU General Public License v3.0 | 6 votes |
def __init__(self, **kwargs): super(GPR, self).__init__(**kwargs) self.name = "GPR" self.detail = "Gaussian Process Regression" self.is_high_order = True self.has_point_forecasting = True self.has_interval_forecasting = True self.has_probability_forecasting = True self.uod_clip = False self.benchmark_only = True self.min_order = 1 self.alpha = kwargs.get("alpha", 0.05) self.data = None self.lscale = kwargs.get('length_scale', 1) self.kernel = ConstantKernel(1.0) * RBF(length_scale=self.lscale) self.model = GaussianProcessRegressor(kernel=self.kernel, alpha=.05, n_restarts_optimizer=10, normalize_y=False) #self.model_fit = None
Example #4
Source File: test_gpc.py From twitter-stock-recommendation with MIT License | 6 votes |
def test_random_starts(): # Test that an increasing number of random-starts of GP fitting only # increases the log marginal likelihood of the chosen theta. n_samples, n_features = 25, 2 rng = np.random.RandomState(0) X = rng.randn(n_samples, n_features) * 2 - 1 y = (np.sin(X).sum(axis=1) + np.sin(3 * X).sum(axis=1)) > 0 kernel = C(1.0, (1e-2, 1e2)) \ * RBF(length_scale=[1e-3] * n_features, length_scale_bounds=[(1e-4, 1e+2)] * n_features) last_lml = -np.inf for n_restarts_optimizer in range(5): gp = GaussianProcessClassifier( kernel=kernel, n_restarts_optimizer=n_restarts_optimizer, random_state=0).fit(X, y) lml = gp.log_marginal_likelihood(gp.kernel_.theta) assert_greater(lml, last_lml - np.finfo(np.float32).eps) last_lml = lml
Example #5
Source File: track_lib.py From TNT with GNU General Public License v3.0 | 5 votes |
def GP_regression(tr_x,tr_y,test_x): A = np.ones((len(tr_x),2)) A[:,0] = tr_x[:,0] p = np.matmul(np.linalg.pinv(A),tr_y) mean_tr_y = np.matmul(A,p) A = np.ones((len(test_x),2)) A[:,0] = test_x[:,0] mean_test_y = np.matmul(A,p) kernel = ConstantKernel(100,(1e-5, 1e5))*RBF(1, (1e-5, 1e5))+RBF(1, (1e-5, 1e5)) gp = GaussianProcessRegressor(kernel=kernel, alpha=1, n_restarts_optimizer=9) gp.fit(tr_x, tr_y-mean_tr_y) test_y, sigma = gp.predict(test_x, return_std=True) test_y = test_y+mean_test_y #import pdb; pdb.set_trace() return test_y
Example #6
Source File: track_lib.py From TNT with GNU General Public License v3.0 | 5 votes |
def GP_regression(tr_x,tr_y,test_x): A = np.ones((len(tr_x),2)) A[:,0] = tr_x[:,0] p = np.matmul(np.linalg.pinv(A),tr_y) mean_tr_y = np.matmul(A,p) A = np.ones((len(test_x),2)) A[:,0] = test_x[:,0] mean_test_y = np.matmul(A,p) kernel = ConstantKernel(100,(1e-5, 1e5))*RBF(1, (1e-5, 1e5))+RBF(1, (1e-5, 1e5)) gp = GaussianProcessRegressor(kernel=kernel, alpha=1, n_restarts_optimizer=9) gp.fit(tr_x, tr_y-mean_tr_y) test_y, sigma = gp.predict(test_x, return_std=True) test_y = test_y+mean_test_y #import pdb; pdb.set_trace() return test_y
Example #7
Source File: lib_classifier.py From Realtime-Action-Recognition with MIT License | 5 votes |
def __init__(self): self._init_all_models() # self.clf = self._choose_model("Nearest Neighbors") # self.clf = self._choose_model("Linear SVM") # self.clf = self._choose_model("RBF SVM") # self.clf = self._choose_model("Gaussian Process") # self.clf = self._choose_model("Decision Tree") # self.clf = self._choose_model("Random Forest") self.clf = self._choose_model("Neural Net")
Example #8
Source File: gaussian_process.py From BTB with MIT License | 5 votes |
def __init_metamodel__(self, length_scale=1): if self._model_kwargs is None: self._model_kwargs = {} self._model_kwargs['kernel'] = RBF(length_scale=length_scale)
Example #9
Source File: acquisition_function.py From polyaxon with Apache License 2.0 | 5 votes |
def get_gaussian_process(config, random_generator): if not isinstance(config, GaussianProcessConfig): raise ValueError("Received a non valid configuration.") if GaussianProcessesKernels.is_rbf(config.kernel): kernel = RBF(length_scale=config.length_scale) else: kernel = Matern(length_scale=config.length_scale, nu=config.nu) return GaussianProcessRegressor( kernel=kernel, n_restarts_optimizer=config.num_restarts_optimizer, random_state=random_generator, )
Example #10
Source File: kernels.py From chocolate with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, space): self.space = space self.k = kernels.ConstantKernel() * kernels.RBF()
Example #11
Source File: test_regression.py From fklearn with Apache License 2.0 | 5 votes |
def test_gp_regression_learner(): df_train = pd.DataFrame({ 'id': ["id1", "id2", "id3", "id4"], 'x1': [10.0, 13.0, 10.0, 13.0], "x2": [0, 1, 1, 0], 'y': [2.3, 4.0, 100.0, -3.9] }) df_test = pd.DataFrame({ 'id': ["id4", "id4", "id5", "id6"], 'x1': [12.0, 1000.0, -4.0, 0.0], "x2": [1, 1, 0, 1], 'y': [1.3, -4.0, 0.0, 49] }) from sklearn.gaussian_process.kernels import RBF, WhiteKernel, DotProduct kernel = RBF() + WhiteKernel() + DotProduct() learner = gp_regression_learner(features=["x1", "x2"], target="y", kernel=kernel, alpha=0.1, extra_variance="fit", return_std=True, extra_params=None, prediction_column="prediction") predict_fn, pred_train, log = learner(df_train) pred_test = predict_fn(df_test) expected_col_train = df_train.columns.tolist() + ["prediction", "prediction_std"] expected_col_test = df_test.columns.tolist() + ["prediction", "prediction_std"] assert Counter(expected_col_train) == Counter(pred_train.columns.tolist()) assert Counter(expected_col_test) == Counter(pred_test.columns.tolist()) assert (pred_test.columns == pred_train.columns).all() assert "prediction" in pred_test.columns
Example #12
Source File: learners.py From M-LOOP with MIT License | 5 votes |
def create_gaussian_process(self): ''' Create the initial Gaussian process. ''' if self.cost_has_noise: gp_kernel = skk.RBF(length_scale=self.length_scale) + skk.WhiteKernel(noise_level=self.noise_level) else: gp_kernel = skk.RBF(length_scale=self.length_scale) if self.update_hyperparameters: self.gaussian_process = skg.GaussianProcessRegressor(kernel=gp_kernel,n_restarts_optimizer=self.hyperparameter_searches) else: self.gaussian_process = skg.GaussianProcessRegressor(kernel=gp_kernel,optimizer=None)
Example #13
Source File: datasets.py From aboleth with Apache License 2.0 | 4 votes |
def gp_draws(ntrain, ntest, kern=RBF(length_scale=0.5), noise=0.1, xmin=-10, xmax=10): r"""Generate a random (noisy) draw from a Gaussian Process. Parameters ---------- ntrain : int number of training points to generate ntest : int number of testing points to generate kern : scikit.gaussian_process.kernels kernel to generate data from noise : float Gaussian noise (standard deviation) to add to GP draws xmin : float minimum extent of inputs, X xmax : float maximum extent of inputs, X Returns ------- Xtrain : ndarray of shape (ntrain, 1) of training inputs Ytrain : ndarray of shape (ntrain, 1) of training targets Xtest : ndarray of shape (ntrain, 1) of testing inputs Ytest : ndarray of shape (ntrain, 1) of testing targets """ randgen = np.random.RandomState(next(seedgen)) Xtrain = randgen.rand(ntrain)[:, np.newaxis] * (xmin - xmax) - xmin Xtest = np.linspace(xmin, xmax, ntest)[:, np.newaxis] Xcat = np.vstack((Xtrain, Xtest)) K = kern(Xcat, Xcat) U, S, V = np.linalg.svd(K) L = U.dot(np.diag(np.sqrt(S))).dot(V) f = randgen.randn(ntrain + ntest).dot(L) Ytrain = f[0:ntrain] + randgen.randn(ntrain) * noise ftest = f[ntrain:] Xtrain = Xtrain.astype(np.float32) Ytrain = Ytrain[:, np.newaxis].astype(np.float32) Xtest = Xtest.astype(np.float32) ftest = ftest[:, np.newaxis].astype(np.float32) return Xtrain, Ytrain, Xtest, ftest
Example #14
Source File: calibration_gaussian_emulator.py From CityEnergyAnalyst with MIT License | 4 votes |
def gaussian_emulator(locator, config): """ Thi is a Gaussian process linear emulator. It is used to create a surrogate model of CEA whose output is either rmse or cvrmse for more details on the work behind this please check: Rysanek A., Fonseca A., Schlueter, A. Bayesian calibration of Dyanmic building Energy Models. Applied Energy 2017. :param locator: pointer to location of CEA files :param samples: matrix m x n with samples simulated from CEA. m are the number of input variables [0,1] :param cv_rmse: array with results of cv_rmse after running n samples. :param building_name: name of building whose calibration process is being acted upon :return: file with database of emulator stored in locator.get_calibration_cvrmse_file(building_name) """ # INITIALIZE TIMER t0 = time.clock() # Local variables building_name = config.single_calibration.building building_load = config.single_calibration.load with open(locator.get_calibration_problem(building_name, building_load),'r') as input_file: problem = pickle.load(input_file) samples_norm = problem["samples_norm"] target = problem["cv_rmse"] # Kernel with parameters given in GPML book for the gaussian surrogate models. The hyperparameters are optimized so you can get anything here. k1 = 5**2 * RBF(length_scale=1e-5) # long term smooth rising trend RBF: radio basis functions (you can have many, this is one). k2 = 5**2 * RBF(length_scale=0.000415) * ExpSineSquared(length_scale=3.51e-5, periodicity=0.000199) # seasonal component # medium term irregularity k3 = 316**2 * RationalQuadratic(length_scale=3.54, alpha=1e+05) k4 = 316**2 * RBF(length_scale=4.82) + WhiteKernel(noise_level=0.43) # noise terms kernel = k1 + k2 + k3 + k4 # give the data to the regressor. gp = GaussianProcessRegressor(kernel=kernel, alpha=1e-7, normalize_y=True, n_restarts_optimizer=2) gp.fit(samples_norm, target) # then fit the gp to your observations and the minmax. It takes 30 min - 1 h. # this is the result joblib.dump(gp, locator.get_calibration_gaussian_emulator(building_name, building_load)) time_elapsed = time.clock() - t0 print('done - time elapsed: %d.2f seconds' % time_elapsed)