Python scipy.optimize.OptimizeResult() Examples

The following are 30 code examples of scipy.optimize.OptimizeResult(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module scipy.optimize , or try the search function .
Example #1
Source File: optim.py    From GPflowOpt with Apache License 2.0 6 votes vote down vote up
def optimize(self, objectivefx, **kwargs):
        """
        Optimize a given function f over a domain.

        The optimizer class supports interruption. If during the optimization ctrl+c is pressed, the last best point is
        returned.
        
        The actual optimization routine is implemented in _optimize, to be implemented in subclasses.

        :param objectivefx: callable, taking one argument: a 2D numpy array. The number of columns correspond to the 
            dimensionality of the input domain.
        :return: OptimizeResult reporting the results.
        """
        objective = ObjectiveWrapper(objectivefx, **self._wrapper_args)
        try:
            result = self._optimize(objective, **kwargs)
        except KeyboardInterrupt:
            result = OptimizeResult(x=objective._previous_x,
                                    success=False,
                                    message="Caught KeyboardInterrupt, returning last good value.")
        result.x = np.atleast_2d(result.x)
        result.nfev = objective.counter
        return result 
Example #2
Source File: optim.py    From GPflowOpt with Apache License 2.0 6 votes vote down vote up
def optimize(self, objectivefx):
        """
        The StagedOptimizer overwrites the default behaviour of optimize(). It passes the best point of the previous
        stage to the next stage. If the optimization is interrupted or fails, this process stops and the OptimizeResult 
        is returned.
        """

        results = []
        for current, following in zip(self.optimizers[:-1], self.optimizers[1:]):
            result = current.optimize(objectivefx)
            results.append(result)
            if not result.success:
                result.message += " StagedOptimizer interrupted after {0}.".format(current.__class__.__name__)
                break
            following.set_initial(self._best_x(results)[0])

        if result.success:
            result = self.optimizers[-1].optimize(objectivefx)
            results.append(result)

        result.nfev = sum(r.nfev for r in results)
        result.nstages = len(results)
        if any(r.success for r in results):
            result.x, result.fun = self._best_x(results)
        return result 
Example #3
Source File: bo.py    From GPflowOpt with Apache License 2.0 6 votes vote down vote up
def optimize(self, objectivefx, n_iter=20):
        """
        Run Bayesian optimization for a number of iterations.
        
        Before the loop is initiated, first all points retrieved by :meth:`~.optim.Optimizer.get_initial` are evaluated
        on the objective and black-box constraints. These points are then added to the acquisition function 
        by calling :meth:`~.acquisition.Acquisition.set_data` (and hence, the underlying models). 
        
        Each iteration a new data point is selected for evaluation by optimizing an acquisition function. This point
        updates the models.
        
        :param objectivefx: (list of) expensive black-box objective and constraint functions. For evaluation, the 
            responses of all the expensive functions are aggregated column wise.
            Unlike the typical :class:`~.optim.Optimizer` interface, these functions should not return gradients. 
        :param n_iter: number of iterations to run
        :return: OptimizeResult object
        """
        fxs = np.atleast_1d(objectivefx)
        return super(BayesianOptimizer, self).optimize(lambda x: self._evaluate_objectives(x, fxs), n_iter=n_iter) 
Example #4
Source File: utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def load(filename, **kwargs):
    """
    Reconstruct a skopt optimization result from a file
    persisted with skopt.dump.

    .. note::
        Notice that the loaded optimization result can be missing
        the objective function (`.specs['args']['func']`) if `skopt.dump`
        was called with `store_objective=False`.

    Parameters
    ----------
    filename : string or `pathlib.Path`
        The path of the file from which to load the optimization result.

    **kwargs : other keyword arguments
        All other keyword arguments will be passed to `joblib.load`.

    Returns
    -------
    res : `OptimizeResult`, scipy object
        Reconstructed OptimizeResult instance.
    """
    return load_(filename, **kwargs) 
Example #5
Source File: api.py    From dl2 with MIT License 5 votes vote down vote up
def basinhopping(constraint_solve, constraint_check, variables, bounds, args):
    x0, shapes, shapes_flat = vars_to_x(variables)
    
    def loss_fn(x):
        x_to_vars(x, variables, shapes_flat, shapes)
        return constraint_solve.to_diffsat(cache=True).loss(args)

    def local_optimization_step(fun, x0, *losargs, **loskwargs):
        loss_before = loss_fn(x0)
        inner_opt(constraint_solve, constraint_check, variables, bounds, args)
        r = spo.OptimizeResult()
        r.x, _, _ = vars_to_x(variables)
        loss_after = constraint_solve.to_diffsat(cache=True).loss(args)
        r.success = not (loss_before == loss_after and not constraint_check.to_diffsat(cache=True).satisfy(args))
        r.fun = loss_after
        return r

    def check_basinhopping(x, f, accept):
        if abs(f) <= 10 * args.eps_check:
            x_, _, _ = vars_to_x(variables)
            x_to_vars(x, variables, shapes_flat, shapes)
            if constraint_check.to_diffsat(cache=True).satisfy(args):
                return True
            else:
                x_to_vars(x_, variables, shapes_flat, shapes)
        return False
    
    minimizer_kwargs = {}
    minimizer_kwargs['method'] = local_optimization_step

    satisfied = constraint_check.to_diffsat(cache=True).satisfy(args)
    if satisfied:
        return True
    spo.basinhopping(loss_fn, x0, niter=1000, minimizer_kwargs=minimizer_kwargs, callback=check_basinhopping,
                     T=args.basinhopping_T, stepsize=args.basinhopping_stepsize)
    return constraint_check.to_diffsat(cache=True).satisfy(args) 
Example #6
Source File: _basinhopping.py    From Splunking-Crime with GNU Affero General Public License v3.0 5 votes vote down vote up
def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
        self.x = np.copy(x0)
        self.minimizer = minimizer
        self.step_taking = step_taking
        self.accept_tests = accept_tests
        self.disp = disp

        self.nstep = 0

        # initialize return object
        self.res = scipy.optimize.OptimizeResult()
        self.res.minimization_failures = 0

        # do initial minimization
        minres = minimizer(self.x)
        if not minres.success:
            self.res.minimization_failures += 1
            if self.disp:
                print("warning: basinhopping: local minimization failure")
        self.x = np.copy(minres.x)
        self.energy = minres.fun
        if self.disp:
            print("basinhopping step %d: f %g" % (self.nstep, self.energy))

        # initialize storage class
        self.storage = Storage(minres)

        if hasattr(minres, "nfev"):
            self.res.nfev = minres.nfev
        if hasattr(minres, "njev"):
            self.res.njev = minres.njev
        if hasattr(minres, "nhev"):
            self.res.nhev = minres.nhev 
Example #7
Source File: test_engine.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_returns_result_object():
    # TODO: Refactor - Use PyTest
    base_estimator = ExtraTreesRegressor(random_state=2)
    opt = Optimizer([(-2.0, 2.0)], base_estimator, n_initial_points=1, acq_optimizer="sampling")
    result = opt.tell([1.5], 2.0)

    assert isinstance(result, OptimizeResult)
    assert len(result.x_iters) == len(result.func_vals)
    assert np.min(result.func_vals) == result.fun 
Example #8
Source File: helpers.py    From mfea-ii with MIT License 5 votes vote down vote up
def get_optimization_results(t, population, factorial_cost, scalar_fitness, skill_factor, message):
  K = len(set(skill_factor))
  N = len(population) // 2
  results = []
  for k in range(K):
    result         = OptimizeResult()
    x, fun         = get_best_individual(population, factorial_cost, scalar_fitness, skill_factor, k)
    result.x       = x
    result.fun     = fun
    result.message = message
    result.nit     = t
    result.nfev    = (t + 1) * N
    results.append(result)
  return results 
Example #9
Source File: test_common.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def check_result_callable(res):
    """
    Check that the result instance is set right at every callable call.
    """
    assert(isinstance(res, OptimizeResult))
    assert_equal(len(res.x_iters), len(res.func_vals))
    assert_equal(np.min(res.func_vals), res.fun) 
Example #10
Source File: test_optimizer.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_returns_result_object():
    base_estimator = ExtraTreesRegressor(random_state=2)
    opt = Optimizer([(-2.0, 2.0)], base_estimator, n_initial_points=1,
                    acq_optimizer="sampling")
    result = opt.tell([1.5], 2.)

    assert isinstance(result, OptimizeResult)
    assert_equal(len(result.x_iters), len(result.func_vals))
    assert_equal(np.min(result.func_vals), result.fun) 
Example #11
Source File: utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def eval_callbacks(callbacks, result):
    """Evaluate list of callbacks on result.

    The return values of the `callbacks` are ORed together to give the
    overall decision on whether or not the optimization procedure should
    continue.

    Parameters
    ----------
    callbacks : list of callables
        Callbacks to evaluate.

    result : `OptimizeResult`, scipy object
        Optimization result object to be stored.

    Returns
    -------
    decision : bool
        Decision of the callbacks whether or not to keep optimizing
    """
    stop = False
    if callbacks:
        for c in callbacks:
            decision = c(result)
            if decision is not None:
                stop = stop or decision

    return stop 
Example #12
Source File: science_utils.py    From oopt-gnpy with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _int_spontaneous_raman(self, z_array, raman_matrix, alphap_fiber, freq_array,
                               cr_raman_matrix, freq_diff, ase_bc, bn_array, temperature):
        spontaneous_raman_scattering = OptimizeResult()

        simulation = Simulation.get_simulation()
        sim_params = simulation.sim_params

        dx = sim_params.raman_params.space_resolution
        h = ph.value('Planck constant')
        kb = ph.value('Boltzmann constant')

        power_ase = np.nan * np.ones(raman_matrix.shape)
        int_pump = cumtrapz(raman_matrix, z_array, dx=dx, axis=1, initial=0)

        for f_ind, f_ase in enumerate(freq_array):
            cr_raman = cr_raman_matrix[f_ind, :]
            vibrational_loss = f_ase / freq_array[:f_ind]
            eta = 1 / (np.exp((h * freq_diff[f_ind, f_ind + 1:]) / (kb * temperature)) - 1)

            int_fiber_loss = -alphap_fiber[f_ind] * z_array
            int_raman_loss = np.sum((cr_raman[:f_ind] * vibrational_loss * int_pump[:f_ind, :].transpose()).transpose(),
                                    axis=0)
            int_raman_gain = np.sum((cr_raman[f_ind + 1:] * int_pump[f_ind + 1:, :].transpose()).transpose(), axis=0)

            int_gain_loss = int_fiber_loss + int_raman_gain + int_raman_loss

            new_ase = np.sum((cr_raman[f_ind + 1:] * (1 + eta) * raman_matrix[f_ind + 1:, :].transpose()).transpose()
                             * h * f_ase * bn_array[f_ind], axis=0)

            bc_evolution = ase_bc[f_ind] * np.exp(int_gain_loss)
            ase_evolution = np.exp(int_gain_loss) * cumtrapz(new_ase *
                                                             np.exp(-int_gain_loss), z_array, dx=dx, initial=0)

            power_ase[f_ind, :] = bc_evolution + ase_evolution

        spontaneous_raman_scattering.x = 2 * power_ase
        return spontaneous_raman_scattering 
Example #13
Source File: nonnegative.py    From civisml-extensions with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def fit(self, X, y, sample_weight=None):
        """Fit non-negative linear model.

        Parameters
        ----------
        X : numpy array or sparse matrix of shape [n_samples, n_features]
            Training data
        y : numpy array of shape [n_samples,]
            Target values
        sample_weight : numpy array of shape [n_samples]
            Individual weights for each sample

        Returns
        -------
        self : returns an instance of self.

        """
        X, y = check_X_y(X, y, y_numeric=True, multi_output=False)

        if sample_weight is not None and np.atleast_1d(sample_weight).ndim > 1:
            raise ValueError("Sample weights must be 1D array or scalar")

        X, y, X_offset, y_offset, X_scale = self._preprocess_data(
            X, y, fit_intercept=self.fit_intercept, normalize=self.normalize,
            copy=self.copy_X, sample_weight=sample_weight)

        if sample_weight is not None:
            # Sample weight can be implemented via a simple rescaling.
            X, y = _rescale_data(X, y, sample_weight)

        self.coef_, result = nnls(X, y.squeeze())

        if np.all(self.coef_ == 0):
            raise ConvergenceWarning("All coefficients estimated to be zero in"
                                     " the non-negative least squares fit.")

        self._set_intercept(X_offset, y_offset, X_scale)
        self.opt_result_ = OptimizeResult(success=True, status=0, x=self.coef_,
                                          fun=result)
        return self 
Example #14
Source File: max_likelihood.py    From copulae with MIT License 5 votes vote down vote up
def fit(self, method):
        """
        Fits the copula with the Maximum Likelihood Estimator

        Parameters
        ----------
        method: {'ml', 'mpl'}
            This will determine the variance estimate

        Returns
        -------
        ndarray
            Estimated parameters for the copula

        """

        res: OptimizeResult = minimize(self.copula_log_lik, self.initial_params, **self.optim_options)

        if not res['success']:
            if self.verbose >= 1:
                warn_no_convergence()
            return

        estimate = res['x']
        self.copula.params = estimate

        method = f"Maximum {'pseudo-' if method == 'mpl' else ''}likelihood"
        self.copula.fit_smry = FitSummary(estimate, method, res['fun'], len(self.data), self.optim_options, res)

        return estimate 
Example #15
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_lower_f_accepted(self):
        new_minres = OptimizeResult()
        new_minres.x = self.x0 + 1
        new_minres.fun = self.f0 - 1

        ret = self.storage.update(new_minres)
        minres = self.storage.get_lowest()
        assert_(self.x0 != minres.x)
        assert_(self.f0 != minres.fun)
        assert_(ret) 
Example #16
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def setup_method(self):
        self.x0 = np.array(1)
        self.f0 = 0

        minres = OptimizeResult()
        minres.x = self.x0
        minres.fun = self.f0

        self.storage = Storage(minres) 
Example #17
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_custom(self):
        # This function comes from the documentation example.
        def custmin(fun, x0, args=(), maxfev=None, stepsize=0.1,
                maxiter=100, callback=None, **options):
            bestx = x0
            besty = fun(x0)
            funcalls = 1
            niter = 0
            improved = True
            stop = False

            while improved and not stop and niter < maxiter:
                improved = False
                niter += 1
                for dim in range(np.size(x0)):
                    for s in [bestx[dim] - stepsize, bestx[dim] + stepsize]:
                        testx = np.copy(bestx)
                        testx[dim] = s
                        testy = fun(testx, *args)
                        funcalls += 1
                        if testy < besty:
                            besty = testy
                            bestx = testx
                            improved = True
                    if callback is not None:
                        callback(bestx)
                    if maxfev is not None and funcalls >= maxfev:
                        stop = True
                        break

            return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
                                           nfev=funcalls, success=(niter > 1))

        x0 = [1.35, 0.9, 0.8, 1.1, 1.2]
        res = optimize.minimize(optimize.rosen, x0, method=custmin,
                                options=dict(stepsize=0.05))
        assert_allclose(res.x, 1.0, rtol=1e-4, atol=1e-4) 
Example #18
Source File: _basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
        self.x = np.copy(x0)
        self.minimizer = minimizer
        self.step_taking = step_taking
        self.accept_tests = accept_tests
        self.disp = disp

        self.nstep = 0

        # initialize return object
        self.res = scipy.optimize.OptimizeResult()
        self.res.minimization_failures = 0

        # do initial minimization
        minres = minimizer(self.x)
        if not minres.success:
            self.res.minimization_failures += 1
            if self.disp:
                print("warning: basinhopping: local minimization failure")
        self.x = np.copy(minres.x)
        self.energy = minres.fun
        if self.disp:
            print("basinhopping step %d: f %g" % (self.nstep, self.energy))

        # initialize storage class
        self.storage = Storage(minres)

        if hasattr(minres, "nfev"):
            self.res.nfev = minres.nfev
        if hasattr(minres, "njev"):
            self.res.njev = minres.njev
        if hasattr(minres, "nhev"):
            self.res.nhev = minres.nhev 
Example #19
Source File: test_gp.py    From flare with MIT License 5 votes vote down vote up
def test_train_failure(self, all_gps, params, mocker):
        """
        Tests the case when 'L-BFGS-B' fails due to a linear algebra error and
        training falls back to BFGS
        """
        # Sets up mocker for scipy minimize. Note that we are mocking
        # 'flare.gp.minimize' because of how the imports are done in gp
        x_result = np.random.rand()
        fun_result = np.random.rand()
        jac_result = np.random.rand()
        train_result = OptimizeResult(x=x_result, fun=fun_result,
                                      jac=jac_result)

        side_effects = [np.linalg.LinAlgError(), train_result]
        mocker.patch('flare.gp.minimize', side_effect=side_effects)
        two_body_gp = all_gps[True]
        two_body_gp.set_L_alpha = mocker.Mock()

        # Executes training
        two_body_gp.algo = 'L-BFGS-B'
        two_body_gp.train()

        # Assert that everything happened as expected
        assert(flare.gp.minimize.call_count == 2)

        calls = flare.gp.minimize.call_args_list
        args, kwargs = calls[0]
        assert(kwargs['method'] == 'L-BFGS-B')

        args, kwargs = calls[1]
        assert(kwargs['method'] == 'BFGS')

        two_body_gp.set_L_alpha.assert_called_once()
        assert(two_body_gp.hyps == x_result)
        assert(two_body_gp.likelihood == -1 * fun_result)
        assert(two_body_gp.likelihood_gradient == -1 * jac_result) 
Example #20
Source File: optim.py    From GPflowOpt with Apache License 2.0 5 votes vote down vote up
def _optimize(self, objective):
        points = self._get_eval_points()
        evaluations = objective(points)
        idx_best = np.argmin(evaluations, axis=0)

        return OptimizeResult(x=points[idx_best, :],
                              success=True,
                              fun=evaluations[idx_best, :],
                              nfev=points.shape[0],
                              message="OK") 
Example #21
Source File: minuit.py    From python-mle with MIT License 5 votes vote down vote up
def fmin_minuit(func, x0, names=None, verbose=False):
    inits = dict()

    if verbose:
        print_level = 2
    else:
        print_level = 0

    if names is None:
        names = map(lambda x: 'param' + str(x), range(len(x0)))
    else:
        assert(len(x0) == len(names))

    for n, x in zip(names, x0):
        inits[n] = x
        # TODO use a method to set this correctly
        inits['error_' + n] = 1

    m = Minuit(Min_Func(func, names), print_level=print_level, errordef=1, **inits)
    a, b = m.migrad()

    return OptimizeResult(
        x=m.values,
        fun=a['fval'],
        edm=a['edm'],
        nfev=a['nfcn'],
        is_valid=a['is_valid'],
        has_valid_parameters=a['has_valid_parameters'],
    ) 
Example #22
Source File: _basinhopping.py    From lambda-packs with MIT License 5 votes vote down vote up
def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
        self.x = np.copy(x0)
        self.minimizer = minimizer
        self.step_taking = step_taking
        self.accept_tests = accept_tests
        self.disp = disp

        self.nstep = 0

        # initialize return object
        self.res = scipy.optimize.OptimizeResult()
        self.res.minimization_failures = 0

        # do initial minimization
        minres = minimizer(self.x)
        if not minres.success:
            self.res.minimization_failures += 1
            if self.disp:
                print("warning: basinhopping: local minimization failure")
        self.x = np.copy(minres.x)
        self.energy = minres.fun
        if self.disp:
            print("basinhopping step %d: f %g" % (self.nstep, self.energy))

        # initialize storage class
        self.storage = Storage(minres)

        if hasattr(minres, "nfev"):
            self.res.nfev = minres.nfev
        if hasattr(minres, "njev"):
            self.res.njev = minres.njev
        if hasattr(minres, "nhev"):
            self.res.nhev = minres.nhev 
Example #23
Source File: least_squares.py    From lambda-packs with MIT License 4 votes vote down vote up
def call_minpack(fun, x0, jac, ftol, xtol, gtol, max_nfev, x_scale, diff_step):
    n = x0.size

    if diff_step is None:
        epsfcn = EPS
    else:
        epsfcn = diff_step**2

    # Compute MINPACK's `diag`, which is inverse of our `x_scale` and
    # ``x_scale='jac'`` corresponds to ``diag=None``.
    if isinstance(x_scale, string_types) and x_scale == 'jac':
        diag = None
    else:
        diag = 1 / x_scale

    full_output = True
    col_deriv = False
    factor = 100.0

    if jac is None:
        if max_nfev is None:
            # n squared to account for Jacobian evaluations.
            max_nfev = 100 * n * (n + 1)
        x, info, status = _minpack._lmdif(
            fun, x0, (), full_output, ftol, xtol, gtol,
            max_nfev, epsfcn, factor, diag)
    else:
        if max_nfev is None:
            max_nfev = 100 * n
        x, info, status = _minpack._lmder(
            fun, jac, x0, (), full_output, col_deriv,
            ftol, xtol, gtol, max_nfev, factor, diag)

    f = info['fvec']

    if callable(jac):
        J = jac(x)
    else:
        J = np.atleast_2d(approx_derivative(fun, x))

    cost = 0.5 * np.dot(f, f)
    g = J.T.dot(f)
    g_norm = norm(g, ord=np.inf)

    nfev = info['nfev']
    njev = info.get('njev', None)

    status = FROM_MINPACK_TO_COMMON[status]
    active_mask = np.zeros_like(x0, dtype=int)

    return OptimizeResult(
        x=x, cost=cost, fun=f, jac=J, grad=g, optimality=g_norm,
        active_mask=active_mask, nfev=nfev, njev=njev, status=status) 
Example #24
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 4 votes vote down vote up
def test_minimize_scalar_custom(self):
        # This function comes from the documentation example.
        def custmin(fun, bracket, args=(), maxfev=None, stepsize=0.1,
                maxiter=100, callback=None, **options):
            bestx = (bracket[1] + bracket[0]) / 2.0
            besty = fun(bestx)
            funcalls = 1
            niter = 0
            improved = True
            stop = False

            while improved and not stop and niter < maxiter:
                improved = False
                niter += 1
                for testx in [bestx - stepsize, bestx + stepsize]:
                    testy = fun(testx, *args)
                    funcalls += 1
                    if testy < besty:
                        besty = testy
                        bestx = testx
                        improved = True
                if callback is not None:
                    callback(bestx)
                if maxfev is not None and funcalls >= maxfev:
                    stop = True
                    break

            return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
                                           nfev=funcalls, success=(niter > 1))

        res = optimize.minimize_scalar(self.fun, bracket=(0, 4), method=custmin,
                                       options=dict(stepsize=0.05))
        assert_allclose(res.x, self.solution, atol=1e-6) 
Example #25
Source File: utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def create_result(Xi, yi, space=None, rng=None, specs=None, models=None):
    """
    Initialize an `OptimizeResult` object.

    Parameters
    ----------
    Xi : list of lists, shape (n_iters, n_features)
        Location of the minimum at every iteration.

    yi : array-like, shape (n_iters,)
        Minimum value obtained at every iteration.

    space : Space instance, optional
        Search space.

    rng : RandomState instance, optional
        State of the random state.

    specs : dict, optional
        Call specifications.

    models : list, optional
        List of fit surrogate models.

    Returns
    -------
    res : `OptimizeResult`, scipy object
        OptimizeResult instance with the required information.
    """
    res = OptimizeResult()
    yi = np.asarray(yi)
    if np.ndim(yi) == 2:
        res.log_time = np.ravel(yi[:, 1])
        yi = np.ravel(yi[:, 0])
    best = np.argmin(yi)
    res.x = Xi[best]
    res.fun = yi[best]
    res.func_vals = yi
    res.x_iters = Xi
    res.models = models
    res.space = space
    res.random_state = rng
    res.specs = specs
    return res 
Example #26
Source File: utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def dump(res, filename, store_objective=True, **kwargs):
    """
    Store an skopt optimization result into a file.

    Parameters
    ----------
    res : `OptimizeResult`, scipy object
        Optimization result object to be stored.

    filename : string or `pathlib.Path`
        The path of the file in which it is to be stored. The compression
        method corresponding to one of the supported filename extensions ('.z',
        '.gz', '.bz2', '.xz' or '.lzma') will be used automatically.

    store_objective : boolean, default=True
        Whether the objective function should be stored. Set `store_objective`
        to `False` if your objective function (`.specs['args']['func']`) is
        unserializable (i.e. if an exception is raised when trying to serialize
        the optimization result).

        Notice that if `store_objective` is set to `False`, a deep copy of the
        optimization result is created, potentially leading to performance
        problems if `res` is very large. If the objective function is not
        critical, one can delete it before calling `skopt.dump()` and thus
        avoid deep copying of `res`.

    **kwargs : other keyword arguments
        All other keyword arguments will be passed to `joblib.dump`.
    """
    if store_objective:
        dump_(res, filename, **kwargs)

    elif 'func' in res.specs['args']:
        # If the user does not want to store the objective and it is indeed
        # present in the provided object, then create a deep copy of it and
        # remove the objective function before dumping it with joblib.dump.
        res_without_func = deepcopy(res)
        del res_without_func.specs['args']['func']
        dump_(res_without_func, filename, **kwargs)

    else:
        # If the user does not want to store the objective and it is already
        # missing in the provided object, dump it without copying.
        dump_(res, filename, **kwargs) 
Example #27
Source File: least_squares.py    From GraphicDesignPatternByPython with MIT License 4 votes vote down vote up
def call_minpack(fun, x0, jac, ftol, xtol, gtol, max_nfev, x_scale, diff_step):
    n = x0.size

    if diff_step is None:
        epsfcn = EPS
    else:
        epsfcn = diff_step**2

    # Compute MINPACK's `diag`, which is inverse of our `x_scale` and
    # ``x_scale='jac'`` corresponds to ``diag=None``.
    if isinstance(x_scale, string_types) and x_scale == 'jac':
        diag = None
    else:
        diag = 1 / x_scale

    full_output = True
    col_deriv = False
    factor = 100.0

    if jac is None:
        if max_nfev is None:
            # n squared to account for Jacobian evaluations.
            max_nfev = 100 * n * (n + 1)
        x, info, status = _minpack._lmdif(
            fun, x0, (), full_output, ftol, xtol, gtol,
            max_nfev, epsfcn, factor, diag)
    else:
        if max_nfev is None:
            max_nfev = 100 * n
        x, info, status = _minpack._lmder(
            fun, jac, x0, (), full_output, col_deriv,
            ftol, xtol, gtol, max_nfev, factor, diag)

    f = info['fvec']

    if callable(jac):
        J = jac(x)
    else:
        J = np.atleast_2d(approx_derivative(fun, x))

    cost = 0.5 * np.dot(f, f)
    g = J.T.dot(f)
    g_norm = norm(g, ord=np.inf)

    nfev = info['nfev']
    njev = info.get('njev', None)

    status = FROM_MINPACK_TO_COMMON[status]
    active_mask = np.zeros_like(x0, dtype=int)

    return OptimizeResult(
        x=x, cost=cost, fun=f, jac=J, grad=g, optimality=g_norm,
        active_mask=active_mask, nfev=nfev, njev=njev, status=status) 
Example #28
Source File: utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def expected_minimum(res, n_random_starts=20, random_state=None):
    """Compute the minimum over the predictions of the last surrogate model.
    Uses `expected_minimum_random_sampling` with `n_random_starts` = 100000,
    when the space contains any categorical values.

    .. note::
        The returned minimum may not necessarily be an accurate
        prediction of the minimum of the true objective function.

    Parameters
    ----------
    res : `OptimizeResult`, scipy object
        The optimization result returned by a `skopt` minimizer.

    n_random_starts : int, default=20
        The number of random starts for the minimization of the surrogate
        model.

    random_state : int, RandomState instance, or None (default)
        Set random state to something other than None for reproducible
        results.

    Returns
    -------
    x : list
        location of the minimum.
    fun : float
        the surrogate function value at the minimum.
    """
    if res.space.is_partly_categorical:
        return expected_minimum_random_sampling(res, n_random_starts=100000,
                                                random_state=random_state)

    def func(x):
        reg = res.models[-1]
        x = res.space.transform(x.reshape(1, -1))
        return reg.predict(x.reshape(1, -1))[0]

    xs = [res.x]
    if n_random_starts > 0:
        xs.extend(res.space.rvs(n_random_starts, random_state=random_state))

    best_x = None
    best_fun = np.inf

    for x0 in xs:
        r = sp_minimize(func, x0=x0, bounds=res.space.bounds)

        if r.fun < best_fun:
            best_x = r.x
            best_fun = r.fun

    return [v for v in best_x], best_fun 
Example #29
Source File: bo.py    From GPflowOpt with Apache License 2.0 4 votes vote down vote up
def _create_bo_result(self, success, message):
        """
        Analyzes all data evaluated during the optimization, and return an `OptimizeResult`. Constraints are taken
        into account. The contents of x, fun, and constraints depend on the detected scenario:
        - single-objective: the best optimum of the feasible samples (if none, optimum of the infeasible samples)
        - multi-objective: the Pareto set of the feasible samples
        - only constraints: all the feasible samples (can be empty)

        In all cases, if not one sample satisfies all the constraints a message will be given and success=False.

        Do note that the feasibility check is based on the model predictions, but the constrained field contains
        actual data values.
       
        :param success: Optimization successful? (True/False)
        :param message: return message
        :return: OptimizeResult object
        """
        X, Y = self.acquisition.data

        # Filter on constraints
        valid = self.acquisition.feasible_data_index()

        # Extract the samples that satisfies all constraints
        if np.any(valid):
            X = X[valid, :]
            Y = Y[valid, :]
        else:
            success = False
            message = "No evaluations satisfied all the constraints"

        # Split between objectives and constraints
        Yo = Y[:, self.acquisition.objective_indices()]
        Yc = Y[:, self.acquisition.constraint_indices()]

        # Differentiate between different scenarios
        if Yo.shape[1] == 1:  # Single-objective: minimum
            idx = np.argmin(Yo)
        elif Yo.shape[1] > 1:  # Multi-objective: Pareto set
            _, dom = non_dominated_sort(Yo)
            idx = dom == 0
        else:  # Constraint satisfaction problem: all samples satisfying the constraints
            idx = np.arange(Yc.shape[0])

        return OptimizeResult(x=X[idx, :],
                              success=success,
                              fun=Yo[idx, :],
                              constraints=Yc[idx, :],
                              message=message) 
Example #30
Source File: least_squares.py    From Splunking-Crime with GNU Affero General Public License v3.0 4 votes vote down vote up
def call_minpack(fun, x0, jac, ftol, xtol, gtol, max_nfev, x_scale, diff_step):
    n = x0.size

    if diff_step is None:
        epsfcn = EPS
    else:
        epsfcn = diff_step**2

    # Compute MINPACK's `diag`, which is inverse of our `x_scale` and
    # ``x_scale='jac'`` corresponds to ``diag=None``.
    if isinstance(x_scale, string_types) and x_scale == 'jac':
        diag = None
    else:
        diag = 1 / x_scale

    full_output = True
    col_deriv = False
    factor = 100.0

    if jac is None:
        if max_nfev is None:
            # n squared to account for Jacobian evaluations.
            max_nfev = 100 * n * (n + 1)
        x, info, status = _minpack._lmdif(
            fun, x0, (), full_output, ftol, xtol, gtol,
            max_nfev, epsfcn, factor, diag)
    else:
        if max_nfev is None:
            max_nfev = 100 * n
        x, info, status = _minpack._lmder(
            fun, jac, x0, (), full_output, col_deriv,
            ftol, xtol, gtol, max_nfev, factor, diag)

    f = info['fvec']

    if callable(jac):
        J = jac(x)
    else:
        J = np.atleast_2d(approx_derivative(fun, x))

    cost = 0.5 * np.dot(f, f)
    g = J.T.dot(f)
    g_norm = norm(g, ord=np.inf)

    nfev = info['nfev']
    njev = info.get('njev', None)

    status = FROM_MINPACK_TO_COMMON[status]
    active_mask = np.zeros_like(x0, dtype=int)

    return OptimizeResult(
        x=x, cost=cost, fun=f, jac=J, grad=g, optimality=g_norm,
        active_mask=active_mask, nfev=nfev, njev=njev, status=status)