Python scipy.optimize.fmin_tnc() Examples

The following are 19 code examples of scipy.optimize.fmin_tnc(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module scipy.optimize , or try the search function .
Example #1
Source File: optimization.py    From paramz with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_optimizer(f_min):

    optimizers = {'fmin_tnc': opt_tnc,
          'simplex': opt_simplex,
          'lbfgsb': opt_lbfgsb,
          'org-bfgs': opt_bfgs,
          'scg': opt_SCG,
          'adadelta':Opt_Adadelta,
                  'rprop':RProp,
                  'adam':Adam}

    #if rasm_available:
    #    optimizers['rasmussen'] = opt_rasm

    for opt_name in sorted(optimizers.keys()):
        if opt_name.lower().find(f_min.lower()) != -1:
            return optimizers[opt_name]

    raise KeyError('No optimizer was found matching the name: %s' % f_min) 
Example #2
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc45(self):
        fg, x, bounds = self.fg45, [2] * 5, [(0, 1), (0, 2), (0, 3),
                                             (0, 4), (0, 5)]
        xopt = [1, 2, 3, 4, 5]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f45(x), self.f45(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #3
Source File: optimization.py    From paramz with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def opt(self, x_init, f_fp=None, f=None, fp=None):
        """
        Run the TNC optimizer

        """
        tnc_rcstrings = ['Local minimum', 'Converged', 'XConverged', 'Maximum number of f evaluations reached',
             'Line search failed', 'Function is constant']

        assert f_fp != None, "TNC requires f_fp"

        opt_dict = {}
        if self.xtol is not None:
            opt_dict['xtol'] = self.xtol
        if self.ftol is not None:
            opt_dict['ftol'] = self.ftol
        if self.gtol is not None:
            opt_dict['pgtol'] = self.gtol

        opt_result = optimize.fmin_tnc(f_fp, x_init, messages=self.messages,
                       maxfun=self.max_f_eval, **opt_dict)
        self.x_opt = opt_result[0]
        self.f_opt = f_fp(self.x_opt)[0]
        self.funct_eval = opt_result[1]
        self.status = tnc_rcstrings[opt_result[2]] 
Example #4
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc38(self):
        fg, x, bounds = self.fg38, np.array([-3, -1, -3, -1]), [(-10, 10)]*4
        xopt = [1]*4

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f38(x), self.f38(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #5
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc5(self):
        fg, x, bounds = self.fg5, [0, 0], [(-1.5, 4),(-3, 3)]
        xopt = [-0.54719755119659763, -1.5471975511965976]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f5(x), self.f5(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #6
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc3(self):
        fg, x, bounds = self.fg3, [10, 1], ([-np.inf, None], [0.0, None])
        xopt = [0, 0]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f3(x), self.f3(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #7
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc2(self):
        fg, x, bounds = self.fg1, [-2, 1], ([-np.inf, None], [1.5, None])
        xopt = [-1.2210262419616387, 1.5]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #8
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc1c(self):
        x, bounds = [-2, 1], ([-np.inf, None], [-1.5, None])
        xopt = [1, 1]

        x, nf, rc = optimize.fmin_tnc(self.f1, x, fprime=self.g1,
                                      bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #9
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc1b(self):
        x, bounds = [-2, 1], ([-np.inf, None], [-1.5, None])
        xopt = [1, 1]

        x, nf, rc = optimize.fmin_tnc(self.f1, x, approx_grad=True,
                                      bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-4,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #10
Source File: test_tnc.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_tnc1(self):
        fg, x, bounds = self.fg1, [-2, 1], ([-np.inf, None], [-1.5, None])
        xopt = [1, 1]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds, args=(100.0, ),
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #11
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc45(self):
        fg, x, bounds = self.fg45, [2] * 5, [(0, 1), (0, 2), (0, 3),
                                             (0, 4), (0, 5)]
        xopt = [1, 2, 3, 4, 5]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f45(x), self.f45(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #12
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc38(self):
        fg, x, bounds = self.fg38, np.array([-3, -1, -3, -1]), [(-10, 10)]*4
        xopt = [1]*4

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f38(x), self.f38(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #13
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc5(self):
        fg, x, bounds = self.fg5, [0, 0], [(-1.5, 4),(-3, 3)]
        xopt = [-0.54719755119659763, -1.5471975511965976]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f5(x), self.f5(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #14
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc3(self):
        fg, x, bounds = self.fg3, [10, 1], ([-np.inf, None], [0.0, None])
        xopt = [0, 0]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f3(x), self.f3(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #15
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc2(self):
        fg, x, bounds = self.fg1, [-2, 1], ([-np.inf, None], [1.5, None])
        xopt = [-1.2210262419616387, 1.5]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #16
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc1c(self):
        x, bounds = [-2, 1], ([-np.inf, None], [-1.5, None])
        xopt = [1, 1]

        x, nf, rc = optimize.fmin_tnc(self.f1, x, fprime=self.g1,
                                      bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #17
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc1b(self):
        x, bounds = [-2, 1], ([-np.inf, None], [-1.5, None])
        xopt = [1, 1]

        x, nf, rc = optimize.fmin_tnc(self.f1, x, approx_grad=True,
                                      bounds=bounds,
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-4,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #18
Source File: test_tnc.py    From Computable with MIT License 5 votes vote down vote up
def test_tnc1(self):
        fg, x, bounds = self.fg1, [-2, 1], ([-np.inf, None], [-1.5, None])
        xopt = [1, 1]

        x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds, args=(100.0, ),
                                      messages=optimize.tnc.MSG_NONE,
                                      maxfun=200)

        assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
                        err_msg="TNC failed with status: " +
                                optimize.tnc.RCSTRINGS[rc]) 
Example #19
Source File: param_search.py    From catch with MIT License 4 votes vote down vote up
def _optimize_loss(probe_counts, loss_fn, bounds, x0,
                   initial_eps=10.0, step_size=0.001,
                   interp_fn_type='standard'):
    """Optimize loss function with barrier.

    This uses scipy's optimize.fmin_tnc to minimize the loss function
    in which the barrier is weighted by eps. It repeatedly minimizes
    the loss while decreasing eps so that, by the last iteration, the
    weight on the barrier is very small. On each iteration, it starts
    the initial guess/position at the solution to the previous iteration.

    Args:
        probe_counts: dict giving number of probes required for each
            dataset and choice of parameters
        loss_fn: the loss function provided by _make_loss_fn
        bounds: bounds on the parameter values provided by _make_param_bounds_*
        x0: the initial guess of parameter values (i.e., starting position)
        initial_eps: weight of the barrier on the first iteration
        step_size: epsilon value provided to optimize.fmin_tnc
        interp_fn_type: 'standard' (only perform interpolation on mismatches
            and cover_extension parameters) or 'nd' (use scipy's interpolate
            package to interpolate over n-dimensions)

    Returns:
        list of length (number of datasets)*(number of parameters) where
        x_i is the (i % N)'th parameter of the (i/N)'th dataset,
        for i=0,1,2,... where N=(number of datasets)
    """
    eps = initial_eps
    while eps >= 0.01:
        x0_probe_count = ic._make_total_probe_count_across_datasets_fn(
            probe_counts, interp_fn_type=interp_fn_type)(x0)
        logger.info(("Starting an iteration with eps=%f, with x0 yielding %f "
                "probes"), eps, x0_probe_count)

        sol, nfeval, rc = optimize.fmin_tnc(loss_fn, x0, bounds=bounds,
                                            args=(eps,),
                                            approx_grad=True,
                                            epsilon=step_size, disp=1, maxfun=2500)

        if rc in [0, 1, 2]:
            # rc == 0 indicates reaching the local minimum, and rc == 1 or
            # rc == 2 indicates the function value converged
            logger.info("  Iteration was successful")
        else:
            logger.info("  Iteration failed to converge!")

        x0 = sol
        eps = 0.1 * eps

    return sol