Python hyperopt.tpe.suggest() Examples

The following are 30 code examples of hyperopt.tpe.suggest(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module hyperopt.tpe , or try the search function .
Example #1
Source File: test_hyopt.py    From kopt with MIT License 9 votes vote down vote up
def test_compilefn_train_test_split(tmpdir):
    db_name = "test"
    exp_name = "test2"
    fn = CompileFN(db_name, exp_name,
                   data_fn=data.data,
                   model_fn=model.build_model,
                   optim_metric="acc",
                   optim_metric_mode="max",
                   # eval
                   valid_split=.5,
                   stratified=False,
                   random_state=True,
                   save_dir="/tmp/")
    hyper_params = {
        "data": {},
        "shared": {"max_features": 100, "maxlen": 20},
        "model": {"filters": hp.choice("m_filters", (2, 5)),
                  "hidden_dims": 3,
                  },
        "fit": {"epochs": 1}
    }
    fn_test(fn, hyper_params, tmp_dir=str(tmpdir))
    trials = Trials()
    best = fmin(fn, hyper_params, trials=trials, algo=tpe.suggest, max_evals=2)
    assert isinstance(best, dict) 
Example #2
Source File: automl.py    From kddcup2019-automl with MIT License 8 votes vote down vote up
def hyperopt_lightgbm(X: pd.DataFrame, y: pd.Series, params: Dict, config: Config):
    X_train, X_val, y_train, y_val = data_split(X, y, test_size=0.5)
    train_data = lgb.Dataset(X_train, label=y_train)
    valid_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "max_depth": hp.choice("max_depth", np.arange(2, 10, 1, dtype=int)),
        # smaller than 2^(max_depth)
        "num_leaves": hp.choice("num_leaves", np.arange(4, 200, 4, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.2, 0.8, 0.1),
        # "bagging_fraction": hp.quniform("bagging_fraction", 0.2, 0.8, 0.1),
        # "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 10, 2, dtype=int)),
        # "scale_pos_weight":hp.uniform('scale_pos_weight',1.0, 10.0),
        # "colsample_by_tree":hp.uniform("colsample_bytree",0.5,1.0),
        "min_child_weight": hp.quniform('min_child_weight', 2, 50, 2),
        "reg_alpha": hp.uniform("reg_alpha", 2.0, 8.0),
        "reg_lambda": hp.uniform("reg_lambda", 2.0, 8.0),
        "learning_rate": hp.quniform("learning_rate", 0.05, 0.4, 0.01),
        # "learning_rate": hp.loguniform("learning_rate", np.log(0.04), np.log(0.5)),
        #
        "min_data_in_leaf": hp.choice('min_data_in_leaf', np.arange(200, 2000, 100, dtype=int)),
        #"is_unbalance": hp.choice("is_unbalance", [True])
    }

    def objective(hyperparams):
        model = lgb.train({**params, **hyperparams}, train_data, 300,
                          valid_data, early_stopping_rounds=45, verbose_eval=0)

        score = model.best_score["valid_0"][params["metric"]]

        # in classification, less is better
        return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=150, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams 
Example #3
Source File: automl.py    From Kaggler with MIT License 7 votes vote down vote up
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        train_data = lgb.Dataset(X_trn, label=y_trn)
        valid_data = lgb.Dataset(X_val, label=y_val)

        def objective(hyperparams):
            model = lgb.train({**self.params, **hyperparams}, train_data, self.n_est,
                              valid_data, early_stopping_rounds=self.n_stop, verbose_eval=0)

            score = model.best_score["valid_0"][self.metric] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
Example #4
Source File: bayesian_optimizer.py    From pykg2vec with MIT License 7 votes vote down vote up
def optimize(self):
        """Function that performs bayesian optimization"""
        trials = Trials()

        self._best_result = fmin(fn=self._get_loss, space=self.search_space, trials=trials,
                                 algo=tpe.suggest, max_evals=self.max_evals)
        
        columns = list(self.search_space.keys())   
        results = pd.DataFrame(columns=['iteration'] + columns + ['loss'])
        
        for idx, trial in enumerate(trials.trials):
            row = [idx]
            translated_eval = space_eval(self.search_space, {k: v[0] for k, v in trial['misc']['vals'].items()})
            for k in columns:
                row.append(translated_eval[k])
            row.append(trial['result']['loss'])
            results.loc[idx] = row

        path = self.config_local.path_result / self.model_name
        path.mkdir(parents=True, exist_ok=True)
        results.to_csv(str(path / "trials.csv"), index=False)
        
        self._logger.info(results)
        self._logger.info('Found golden setting:')
        self._logger.info(space_eval(self.search_space, self._best_result)) 
Example #5
Source File: automl.py    From Kaggler with MIT License 7 votes vote down vote up
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        def objective(hyperparams):
            model = XGBModel(n_estimators=self.n_est, **self.params, **hyperparams)
            model.fit(X=X_trn, y=y_trn,
                      eval_set=[(X_val, y_val)],
                      eval_metric=self.metric,
                      early_stopping_rounds=self.n_stop,
                      verbose=False)
            score = model.evals_result()['validation_0'][self.metric][model.best_iteration] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
Example #6
Source File: autotune.py    From scVI with MIT License 6 votes vote down vote up
def __init__(
        self,
        logging_queue: multiprocessing.Queue,
        queue: multiprocessing.Queue,
        objective_hyperopt: Callable,
        space: dict,
        exp_key: str,
        mongo_url: str = "localhost:1234/scvi_db",
        algo: Callable = tpe.suggest,
        max_evals: int = 100,
        show_progressbar: bool = False,
    ):
        super().__init__(name="Fmin")
        self.logging_queue = logging_queue
        self.queue = queue
        self.objective_hyperopt = objective_hyperopt
        self.space = space
        self.mongo_url = mongo_url
        self.exp_key = exp_key
        self.algo = algo
        self.max_evals = max_evals
        self.show_progressbar = show_progressbar 
Example #7
Source File: hyperopt_trainer.py    From pirateAI with MIT License 6 votes vote down vote up
def run_hyperopt(self, max_eval, space):
        """
        Runs the hyperopt trainer
        :param max_eval: (int) max evaluations to carry out when running hyperopt
        :param space: {dict} }dictionary of hyperparameter space to explore
        :return: dictionary of best fit models by dna
        """
        # Reset run parameters
        self._max_eval = max_eval
        self._results = {}
        self._eval_idx = 0

        # Hyperopt is picky about the function handle
        def model_handle(params):
            return self.model(params)

        # Run the hyperparameter optimization
        _ = fmin(fn=model_handle, space=space, algo=tpe.suggest, max_evals=max_eval)
        return self._results 
Example #8
Source File: average.py    From fnc-1 with Apache License 2.0 6 votes vote down vote up
def run():

    param_space = {

            'w0': 1.0,
            'w1': hp.quniform('w1', 0.01, 2.0, 0.01),
            'max_evals': 800
            }
    
    
    trial_counter = 0
    trials = Trials()
    objective = lambda p: hyperopt_wrapper(p)
    best_params = fmin(objective, param_space, algo=tpe.suggest,\
        trials = trials, max_evals=param_space["max_evals"])
    
    print 'best parameters: '
    for k, v in best_params.items():
        print "%s: %s" % (k ,v)
    
    trial_loss = np.asarray(trials.losses(), dtype=float)
    best_loss = min(trial_loss)
    print 'best loss: ', best_loss 
Example #9
Source File: task.py    From HRERE with MIT License 6 votes vote down vote up
def run(self):
        trials = Trials()
        best = fmin(self._obj, self.model_param_space._build_space(),
                tpe.suggest, self.max_evals, trials)
        best_params = space_eval(self.model_param_space._build_space(), best)
        best_params = self.model_param_space._convert_into_param(best_params)
        trial_loss = np.asarray(trials.losses(), dtype=float)
        best_ind = np.argmin(trial_loss)
        best_ap = trial_loss[best_ind]
        best_loss = trials.trial_attachments(trials.trials[best_ind])["loss"]
        best_acc = trials.trial_attachments(trials.trials[best_ind])["acc"]
        self.logger.info("-" * 50)
        self.logger.info("Best Average Precision: %.3f" % best_ap)
        self.logger.info("with Loss %.3f, Accuracy %.3f" % (best_loss, best_acc))
        self.logger.info("Best Param:")
        self.task._print_param_dict(best_params)
        self.logger.info("-" * 50) 
Example #10
Source File: autotune.py    From scVI with MIT License 6 votes vote down vote up
def __init__(
        self,
        logging_queue: multiprocessing.Queue,
        queue: multiprocessing.Queue,
        objective_hyperopt: Callable,
        exp_key: str,
        space: dict,
        algo: Callable = tpe.suggest,
        max_evals: int = 100,
        fmin_timer: float = None,
        mongo_url: str = "localhost:1234/scvi_db",
    ):
        super().__init__(name="Fmin Launcher")
        self.logging_queue = logging_queue
        self.queue = queue
        self.objective_hyperopt = objective_hyperopt
        self.exp_key = exp_key
        self.space = space
        self.algo = algo
        self.max_evals = max_evals
        self.fmin_timer = fmin_timer
        self.mongo_url = mongo_url 
Example #11
Source File: test_hyopt.py    From kopt with MIT License 6 votes vote down vote up
def test_compilefn_cross_val(tmpdir):
    db_name = "test"
    exp_name = "test2"
    fn = CompileFN(db_name, exp_name,
                   cv_n_folds=3,
                   stratified=False,
                   random_state=True,
                   data_fn=data.data,
                   model_fn=model.build_model,
                   optim_metric="loss",
                   optim_metric_mode="min",
                   save_dir="/tmp/")
    hyper_params = {
        "data": {},
        "shared": {"max_features": 100, "maxlen": 20},
        "model": {"filters": hp.choice("m_filters", (2, 5)),
                  "hidden_dims": 3,
                  },
        "fit": {"epochs": 1}
    }
    fn_test(fn, hyper_params, tmp_dir=str(tmpdir))
    trials = Trials()
    best = fmin(fn, hyper_params, trials=trials, algo=tpe.suggest, max_evals=2)
    assert isinstance(best, dict) 
Example #12
Source File: bot.py    From ebisu with MIT License 6 votes vote down vote up
def params_search(self):
        """
 ˜      function to search params
        """
        def objective(args):
            logger.info(f"Params : {args}")
            try:
                self.params = args
                self.exchange = BitMexBackTest()
                self.exchange.on_update(self.bin_size, self.strategy)
                profit_factor = self.exchange.win_profit/self.exchange.lose_loss
                logger.info(f"Profit Factor : {profit_factor}")
                ret = {
                    'status': STATUS_OK,
                    'loss': 1/profit_factor
                }
            except Exception as e:
                ret = {
                    'status': STATUS_FAIL
                }

            return ret

        trials = Trials()
        best_params = fmin(objective, self.options(), algo=tpe.suggest, trials=trials, max_evals=200)
        logger.info(f"Best params is {best_params}")
        logger.info(f"Best profit factor is {1/trials.best_trial['result']['loss']}") 
Example #13
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 6 votes vote down vote up
def _suggest(self):
        """Helper function to `suggest` that does the work of calling
        `hyperopt` via its dumb API.
        """
        new_ids = self.trials.new_trial_ids(1)
        assert len(new_ids) == 1
        self.trials.refresh()

        seed = random_seed(self.random)
        new_trials = tpe.suggest(new_ids, self.domain, self.trials, seed)
        assert len(new_trials) == 1

        self.trials.insert_trial_docs(new_trials)
        self.trials.refresh()

        new_trial, = new_trials  # extract singleton
        return new_trial 
Example #14
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 6 votes vote down vote up
def _suggest(self):
        """Helper function to `suggest` that does the work of calling
        `hyperopt` via its dumb API.
        """
        new_ids = self.trials.new_trial_ids(1)
        assert len(new_ids) == 1
        self.trials.refresh()

        seed = random_seed(self.random)
        new_trials = tpe.suggest(new_ids, self.domain, self.trials, seed)
        assert len(new_trials) == 1

        self.trials.insert_trial_docs(new_trials)
        self.trials.refresh()

        new_trial, = new_trials  # extract singleton
        return new_trial 
Example #15
Source File: test_functional_api.py    From hyperas with MIT License 5 votes vote down vote up
def test_functional_api():
    X_train, Y_train, X_test, Y_test = data()
    best_run, best_model = optim.minimize(model=model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=Trials(),
                                          verbose=False)
    best_run, best_model = optim.minimize(model=model_multi_line_arguments,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=Trials(),
                                          verbose=False) 
Example #16
Source File: test_lr_plateau.py    From hyperas with MIT License 5 votes vote down vote up
def test_advanced_callbacks():
    X_train, Y_train, X_test, Y_test = data()
    best_run, best_model = optim.minimize(model=create_model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=Trials(),
                                          verbose=False) 
Example #17
Source File: test_e2e.py    From hyperas with MIT License 5 votes vote down vote up
def test_simple():
    X_train, Y_train, X_test, Y_test = data()
    trials = Trials()
    best_run, best_model = optim.minimize(model=model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=trials,
                                          verbose=False) 
Example #18
Source File: test_e2e.py    From hyperas with MIT License 5 votes vote down vote up
def test_ensemble():
    X_train, X_test, Y_train, Y_test = data()
    optim.best_ensemble(nb_ensemble_models=2,
                        model=model,
                        data=data,
                        algo=rand.suggest,
                        max_evals=1,
                        trials=Trials(),
                        voting='hard') 
Example #19
Source File: hyperopt.py    From BTB with MIT License 5 votes vote down vote up
def hyperopt_tpe(scoring_function, tunable_hyperparameters, iterations):
    """Tree-structured Parzen Estimator"""
    return _hyperopt_tuning_function(
        tpe.suggest,
        scoring_function,
        tunable_hyperparameters,
        iterations
    ) 
Example #20
Source File: base_worker.py    From BOAH with Apache License 2.0 5 votes vote down vote up
def run_tpe(self, num_iterations):
        """
            Wrapper around TPE to return a HpBandSter Result object to integrate better with the other methods
        """
        try:
            from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
        except ImportError:
            raise ImportError('To run TPE, please install the hyperopt package!')
        except:
            raise

        def tpe_objective(config):
            loss = self.evaluate_and_log(config, budget=self.max_budget)
            return({    'config': config,
                        'loss': loss,
                        'status': STATUS_OK})




        space = self.tpe_configspace()
        trials = Trials()
        best = fmin(tpe_objective,
                space=space,
                algo=tpe.suggest,
                max_evals=num_iterations,
                trials=trials)
        return(self.get_result()) 
Example #21
Source File: cash.py    From pyodds with MIT License 5 votes vote down vote up
def model_selector(self, max_evals=50):
        trials = Trials()
        best_clf = fmin(self.f, CUMULATIVE_SEARCH_SPACE, algo=tpe.suggest,
                        max_evals=max_evals, trials=trials)
        config = space_eval(CUMULATIVE_SEARCH_SPACE, best_clf)
        print(config)
        return construct_classifier(config) 
Example #22
Source File: parameter_search.py    From Multitask4Veracity with MIT License 5 votes vote down vote up
def parameter_search(ntrials, objective_function, fname):

    
    search_space= { 'num_dense_layers': hp.choice('nlayers', [1,2]),
                    'num_dense_units': hp.choice('num_dense', [300, 400,
                                                               500, 600]), 
                    'num_epochs': hp.choice('num_epochs', [50]),
                    'num_lstm_units': hp.choice('num_lstm_units', [100, 200,
                                                                    300]),
                    'num_lstm_layers': hp.choice('num_lstm_layers', [1,2]),
                    'learn_rate': hp.choice('learn_rate', [1e-4, 1e-3]), 
                    'batchsize': hp.choice('batchsize', [32]),
                    'l2reg': hp.choice('l2reg', [ 1e-3])
                 
    }
    
    trials = Trials()
    
    best = fmin(objective_function,
        space=search_space,
        algo=tpe.suggest,
        max_evals=ntrials,
        trials=trials)
    
    params = trials.best_trial['result']['Params']
    
    directory = "output"
    if not os.path.exists(directory):
        os.mkdir(directory)
    
    f = open('output/trials_'+fname+'.txt', "wb")
    pickle.dump(trials, f)
    f.close()
    
    filename = 'output/bestparams_'+fname+'.txt'
    f = open(filename, "wb")
    pickle.dump(params, f)
    f.close()
    
    return params 
Example #23
Source File: task.py    From NFETC with MIT License 5 votes vote down vote up
def run(self):
        trials = Trials()
        best = fmin(self._obj, self.model_param_space._build_space(), tpe.suggest, self.max_evals, trials)
        best_params = space_eval(self.model_param_space._build_space(), best)
        best_params = self.model_param_space._convert_into_param(best_params)
        trial_loss = np.asarray(trials.losses(), dtype=float)
        best_ind = np.argmin(trial_loss)
        best_loss = -trial_loss[best_ind]
        best_pacc = trials.trial_attachments(trials.trials[best_ind])["pacc"]
        # best_eacc = trials.trial_attachments(trials.trials[best_ind])["eacc"]
        self.logger.info("-" * 50)
        self.logger.info("Best Exact Accuracy %.3f with Parital Accuracy %.3f" % (best_loss, best_pacc))
        self.logger.info("Best Param:")
        self.task._print_param_dict(best_params)
        self.logger.info("-" * 50) 
Example #24
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 5 votes vote down vote up
def suggest(self, n_suggestions=1):
        """Make `n_suggestions` suggestions for what to evaluate next.

        This requires the user observe all previous suggestions before calling
        again.

        Parameters
        ----------
        n_suggestions : int
            The number of suggestions to return.

        Returns
        -------
        next_guess : list of dict
            List of `n_suggestions` suggestions to evaluate the objective
            function. Each suggestion is a dictionary where each key
            corresponds to a parameter being optimized.
        """
        assert n_suggestions >= 1, "invalid value for n_suggestions"

        # Get the new trials, it seems hyperopt either uses random search or
        # guesses one at a time anyway, so we might as welll call serially.
        new_trials = [self._suggest() for _ in range(n_suggestions)]

        X = []
        for trial in new_trials:
            x_guess = self.cleanup_guess(trial["misc"]["vals"])
            X.append(x_guess)

            # Build lookup to get original trial object
            x_guess_ = HyperoptOptimizer.hashable_dict(x_guess)
            assert x_guess_ not in self.trial_id_lookup, "the suggestions should not already be in the trial dict"
            self.trial_id_lookup[x_guess_] = trial["tid"]

        assert len(X) == n_suggestions
        return X 
Example #25
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 5 votes vote down vote up
def observe(self, X, y):
        """Feed the observations back to hyperopt.

        Parameters
        ----------
        X : list of dict-like
            Places where the objective function has already been evaluated.
            Each suggestion is a dictionary where each key corresponds to a
            parameter being optimized.
        y : array-like, shape (n,)
            Corresponding values where objective has been evaluated.
        """
        assert len(X) == len(y)

        for x_guess, y_ in zip(X, y):
            x_guess_ = HyperoptOptimizer.hashable_dict(x_guess)
            assert x_guess_ in self.trial_id_lookup, "Appears to be guess that did not originate from suggest"

            assert x_guess_ in self.trial_id_lookup, "trial object not available in trial dict"
            trial_id = self.trial_id_lookup.pop(x_guess_)
            trial = self.get_trial(trial_id)
            assert self.cleanup_guess(trial["misc"]["vals"]) == x_guess, "trial ID not consistent with x values stored"

            # Cast to float to ensure native type
            result = {"loss": float(y_), "status": STATUS_OK}
            trial["state"] = JOB_STATE_DONE
            trial["result"] = result
        # hyperopt.fmin.FMinIter.serial_evaluate only does one refresh at end
        # of loop of a bunch of evals, so we will do the same thing here.
        self.trials.refresh() 
Example #26
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 5 votes vote down vote up
def suggest(self, n_suggestions=1):
        """Make `n_suggestions` suggestions for what to evaluate next.

        This requires the user observe all previous suggestions before calling
        again.

        Parameters
        ----------
        n_suggestions : int
            The number of suggestions to return.

        Returns
        -------
        next_guess : list of dict
            List of `n_suggestions` suggestions to evaluate the objective
            function. Each suggestion is a dictionary where each key
            corresponds to a parameter being optimized.
        """
        assert n_suggestions >= 1, "invalid value for n_suggestions"

        # Get the new trials, it seems hyperopt either uses random search or
        # guesses one at a time anyway, so we might as welll call serially.
        new_trials = [self._suggest() for _ in range(n_suggestions)]

        X = []
        for trial in new_trials:
            x_guess = self.cleanup_guess(trial["misc"]["vals"])
            X.append(x_guess)

            # Build lookup to get original trial object
            x_guess_ = HyperoptOptimizer.hashable_dict(x_guess)
            assert x_guess_ not in self.trial_id_lookup, "the suggestions should not already be in the trial dict"
            self.trial_id_lookup[x_guess_] = trial["tid"]

        assert len(X) == n_suggestions
        return X 
Example #27
Source File: gaussian_process.py    From PES-Learn with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def optimize_model(self):
        print("Beginning hyperparameter optimization...")
        print("Trying {} combinations of hyperparameters".format(self.hp_maxit))
        print("Training with {} points (Full dataset contains {} points).".format(self.ntrain, self.n_datapoints))
        print("Using {} training set point sampling.".format(self.sampler))
        print("Errors are root-mean-square error in wavenumbers (cm-1)")
        self.hyperopt_trials = Trials()
        self.itercount = 1  # keep track of hyperopt iterations 
        if self.input_obj.keywords['rseed']:
            rstate = np.random.RandomState(self.input_obj.keywords['rseed'])
        else:
            rstate = None
        best = fmin(self.hyperopt_model,
                    space=self.hyperparameter_space,
                    algo=tpe.suggest,
                    max_evals=self.hp_maxit*2,
                    rstate=rstate, 
                    show_progressbar=False,
                    trials=self.hyperopt_trials)
        hyperopt_complete()
        print("Best performing hyperparameters are:")
        final = space_eval(self.hyperparameter_space, best)
        print(str(sorted(final.items())))
        self.optimal_hyperparameters  = dict(final)
        # obtain final model from best hyperparameters
        print("Fine-tuning final model architecture...")
        self.build_model(self.optimal_hyperparameters, nrestarts=10, maxit=1000)
        print("Final model performance (cm-1):")
        self.test_error = self.vet_model(self.model)
        self.save_model(self.optimal_hyperparameters) 
Example #28
Source File: test_hyperopt.py    From docker-python with Apache License 2.0 5 votes vote down vote up
def test_find_min(self):
        best = fmin(
            fn=lambda x: x ** 2,
            space=hp.uniform('x', -10, 10),
            algo=tpe.suggest,
            max_evals=1,
        )
        self.assertIn('x', best) 
Example #29
Source File: Stock_Prediction_Model_XgBoost.py    From StockRecommendSystem with MIT License 5 votes vote down vote up
def best_model(self):
        algo = partial(tpe.suggest, n_startup_jobs=1)
        best = fmin(self.GBM, space=self.paras.hyper_opt, algo=algo, max_evals=20)
        print("best", best)
        return best 
Example #30
Source File: Stock_Prediction_Model_Stateless_LSTM.py    From StockRecommendSystem with MIT License 5 votes vote down vote up
def best_model(self, X_train, y_train, X_test, y_test):
        self.train_x = X_train
        self.train_y = y_train
        self.test_x  = X_test
        self.test_y  = y_test

        algo = partial(tpe.suggest, n_startup_jobs=1)
        best = fmin(self.LSTM, space=self.paras.hyper_opt, algo=algo, max_evals=20)
        print("best", best)
        return best