Python scipy.optimize.fmin_cobyla() Examples
The following are 9
code examples of scipy.optimize.fmin_cobyla().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
scipy.optimize
, or try the search function
.
Example #1
Source File: multi_tissue_convex_optimizer.py From dmipy with MIT License | 6 votes |
def __call__(self, data, x0): params = x0 * self.model.scales_for_optimization params_dict = self.model.parameter_vector_to_parameters(params) phi = self.model(self.acquisition_scheme, quantity="stochastic cost function", **params_dict) phi *= self.S0_tissue_responses if self.model.N_models == 1: vf_x0 = [1.] else: vf_x0 = x0[-self.model.N_models:] vf = fmin_cobyla(self.cobyla_cost_function, x0=vf_x0, cons=[cobyla_positivity_constraint], args=(phi, data), maxfun=2000) return vf
Example #2
Source File: cobyla_optimizer.py From qiskit-aqua with Apache License 2.0 | 6 votes |
def __init__(self, rhobeg: float = 1.0, rhoend: float = 1e-4, maxfun: int = 1000, disp: Optional[int] = None, catol: float = 2e-4) -> None: """Initializes the CobylaOptimizer. This initializer takes the algorithmic parameters of COBYLA and stores them for later use of ``fmin_cobyla`` when :meth:`solve` is invoked. This optimizer can be applied to find a (local) optimum for problems consisting of only continuous variables. Args: rhobeg: Reasonable initial changes to the variables. rhoend: Final accuracy in the optimization (not precisely guaranteed). This is a lower bound on the size of the trust region. disp: Controls the frequency of output; 0 implies no output. Feasible values are {0, 1, 2, 3}. maxfun: Maximum number of function evaluations. catol: Absolute tolerance for constraint violations. """ self._rhobeg = rhobeg self._rhoend = rhoend self._maxfun = maxfun self._disp = disp self._catol = catol
Example #3
Source File: blender.py From kaggle_otto with BSD 3-Clause "New" or "Revised" License | 6 votes |
def get_weights(): # Read validation labels _, labels, _, _, _ = utils.load_data() skf = StratifiedKFold(labels, n_folds=5, random_state=23) test_index = None for _, test_idx in skf: test_index = np.append(test_index, test_idx) if test_index is not None else test_idx val_labels = labels[test_index] # Read predictions on validation set val_predictions = [] prediction_files = utils.get_prediction_files() for preds_file in prediction_files: vp = np.genfromtxt(os.path.join(consts.BLEND_PATH, preds_file), delimiter=',') val_predictions.append(vp) # Minimize blending function p0 = [1.] * len(prediction_files) p = fmin_cobyla(error, p0, args=(val_predictions, val_labels), cons=[constraint], rhoend=1e-5) return p
Example #4
Source File: gaussian_process.py From Splunking-Crime with GNU Affero General Public License v3.0 | 6 votes |
def __init__(self, regr='constant', corr='squared_exponential', beta0=None, storage_mode='full', verbose=False, theta0=1e-1, thetaL=None, thetaU=None, optimizer='fmin_cobyla', random_start=1, normalize=True, nugget=10. * MACHINE_EPSILON, random_state=None): self.regr = regr self.corr = corr self.beta0 = beta0 self.storage_mode = storage_mode self.verbose = verbose self.theta0 = theta0 self.thetaL = thetaL self.thetaU = thetaU self.normalize = normalize self.nugget = nugget self.optimizer = optimizer self.random_start = random_start self.random_state = random_state
Example #5
Source File: gaussian_process.py From twitter-stock-recommendation with MIT License | 6 votes |
def __init__(self, regr='constant', corr='squared_exponential', beta0=None, storage_mode='full', verbose=False, theta0=1e-1, thetaL=None, thetaU=None, optimizer='fmin_cobyla', random_start=1, normalize=True, nugget=10. * MACHINE_EPSILON, random_state=None): self.regr = regr self.corr = corr self.beta0 = beta0 self.storage_mode = storage_mode self.verbose = verbose self.theta0 = theta0 self.thetaL = thetaL self.thetaU = thetaU self.normalize = normalize self.nugget = nugget self.optimizer = optimizer self.random_start = random_start self.random_state = random_state
Example #6
Source File: methods.py From Localization with MIT License | 5 votes |
def lse(cA, mode='2D', cons=True): l = len(cA) r = [w.r for w in cA] c = [w.c for w in cA] S = sum(r) W = [(S - w) / ((l - 1) * S) for w in r] p0 = gx.point(0, 0, 0) # Initialized point for i in range(l): p0 = p0 + W[i] * c[i] if mode == '2D' or mode == 'Earth1': x0 = num.array([p0.x, p0.y]) elif mode == '3D': x0 = num.array([p0.x, p0.y, p0.z]) else: raise cornerCases('Mode not supported:' + mode) if mode == 'Earth1': fg1 = 1 else: fg1 = 0 if cons: print('GC-LSE geolocating...') if not is_disjoint(cA, fg=fg1): cL = [] for q in range(l): def ff(x, q=q): return r[q] - Norm(x, c[q].std(), mode=mode) cL.append(ff) res = fmin_cobyla(sum_error, x0, cL, args=(c, r, mode), consargs=(), rhoend=1e-5) ans = res else: raise cornerCases('Disjoint') else: print('LSE Geolocating...') res = minimize(sum_error, x0, args=(c, r, mode), method='BFGS') ans = res.x return gx.point(ans)
Example #7
Source File: test_cobyla.py From Computable with MIT License | 5 votes |
def test_simple(self): """ fmin_cobyla """ x = fmin_cobyla(self.fun, self.x0, [self.con1, self.con2], rhobeg=1, rhoend=1e-5, iprint=0, maxfun=100) assert_allclose(x, self.solution, atol=1e-4)
Example #8
Source File: test_cobyla.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_simple(self): # use disp=True as smoke test for gh-8118 x = fmin_cobyla(self.fun, self.x0, [self.con1, self.con2], rhobeg=1, rhoend=1e-5, maxfun=100, disp=True) assert_allclose(x, self.solution, atol=1e-4)
Example #9
Source File: test_cobyla.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_vector_constraints(): # test that fmin_cobyla and minimize can take a combination # of constraints, some returning a number and others an array def fun(x): return (x[0] - 1)**2 + (x[1] - 2.5)**2 def fmin(x): return fun(x) - 1 def cons1(x): a = np.array([[1, -2, 2], [-1, -2, 6], [-1, 2, 2]]) return np.array([a[i, 0] * x[0] + a[i, 1] * x[1] + a[i, 2] for i in range(len(a))]) def cons2(x): return x # identity, acts as bounds x > 0 x0 = np.array([2, 0]) cons_list = [fun, cons1, cons2] xsol = [1.4, 1.7] fsol = 0.8 # testing fmin_cobyla sol = fmin_cobyla(fun, x0, cons_list, rhoend=1e-5) assert_allclose(sol, xsol, atol=1e-4) sol = fmin_cobyla(fun, x0, fmin, rhoend=1e-5) assert_allclose(fun(sol), 1, atol=1e-4) # testing minimize constraints = [{'type': 'ineq', 'fun': cons} for cons in cons_list] sol = minimize(fun, x0, constraints=constraints, tol=1e-5) assert_allclose(sol.x, xsol, atol=1e-4) assert_(sol.success, sol.message) assert_allclose(sol.fun, fsol, atol=1e-4) constraints = {'type': 'ineq', 'fun': fmin} sol = minimize(fun, x0, constraints=constraints, tol=1e-5) assert_allclose(sol.fun, 1, atol=1e-4)