Python hyperopt.STATUS_FAIL Examples
The following are 5
code examples of hyperopt.STATUS_FAIL().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
hyperopt
, or try the search function
.
Example #1
Source File: neural_network.py From PES-Learn with BSD 3-Clause "New" or "Revised" License | 6 votes |
def hyperopt_model(self, params): """ A Hyperopt-friendly wrapper for build_model """ # skip building this model if hyperparameter combination already attempted for i in self.hyperopt_trials.results: if 'memo' in i: if params == i['memo']: return {'loss': i['loss'], 'status': STATUS_OK, 'memo': 'repeat'} if self.itercount > self.hp_maxit: return {'loss': 0.0, 'status': STATUS_FAIL, 'memo': 'max iters reached'} error_test, error_valid = self.build_model(params) self.itercount += 1 if np.isnan(error_valid): return {'loss': 1e5, 'status': STATUS_FAIL, 'memo': 'nan'} else: return {'loss': error_valid, 'status': STATUS_OK, 'memo': params}
Example #2
Source File: automl.py From KDDCup2019_admin with MIT License | 5 votes |
def hyperopt_lightgbm(X_train: pd.DataFrame, y_train: pd.Series, params: Dict, config: Config, max_evals=10): X_train, X_test, y_train, y_test = data_split_by_time(X_train, y_train, test_size=0.2) X_train, X_val, y_train, y_val = data_split_by_time(X_train, y_train, test_size=0.3) train_data = lgb.Dataset(X_train, label=y_train) valid_data = lgb.Dataset(X_val, label=y_val) space = { "learning_rate": hp.loguniform("learning_rate", np.log(0.01), np.log(0.5)), #"max_depth": hp.choice("max_depth", [-1, 2, 3, 4, 5, 6]), "max_depth": hp.choice("max_depth", [1, 2, 3, 4, 5, 6]), "num_leaves": hp.choice("num_leaves", np.linspace(10, 200, 50, dtype=int)), "feature_fraction": hp.quniform("feature_fraction", 0.5, 1.0, 0.1), "bagging_fraction": hp.quniform("bagging_fraction", 0.5, 1.0, 0.1), "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 50, 10, dtype=int)), "reg_alpha": hp.uniform("reg_alpha", 0, 2), "reg_lambda": hp.uniform("reg_lambda", 0, 2), "min_child_weight": hp.uniform('min_child_weight', 0.5, 10), } def objective(hyperparams): if config.time_left() < 50: return {'status': STATUS_FAIL} else: model = lgb.train({**params, **hyperparams}, train_data, 100, valid_data, early_stopping_rounds=10, verbose_eval=0) pred = model.predict(X_test) score = roc_auc_score(y_test, pred) #score = model.best_score["valid_0"][params["metric"]] # in classification, less is better return {'loss': -score, 'status': STATUS_OK} trials = Trials() best = hyperopt.fmin(fn=objective, space=space, trials=trials, algo=tpe.suggest, max_evals=max_evals, verbose=1, rstate=np.random.RandomState(1)) hyperparams = space_eval(space, best) log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}") return hyperparams
Example #3
Source File: gaussian_process.py From PES-Learn with BSD 3-Clause "New" or "Revised" License | 5 votes |
def hyperopt_model(self, params): # skip building this model if hyperparameter combination already attempted for i in self.hyperopt_trials.results: if 'memo' in i: if params == i['memo']: return {'loss': i['loss'], 'status': STATUS_OK, 'memo': 'repeat'} if self.itercount > self.hp_maxit: return {'loss': 0.0, 'status': STATUS_FAIL, 'memo': 'max iters reached'} self.build_model(params) error_test = self.vet_model(self.model) self.itercount += 1 return {'loss': error_test, 'status': STATUS_OK, 'memo': params}
Example #4
Source File: optimize.py From starsem2018-entity-linking with Apache License 2.0 | 5 votes |
def optimize(training_config, model_config, train_data, dev_data, eval_dataset, logger): trials = hy.Trials() atexit.register(lambda: wrap_up_optimization(trials, training_config['optimize.save.history'], logger)) logger.debug("Loading embeddings") embedding_matrix, element2idx = utils.load_word_embeddings(model_config['word.embeddings']) entities_embedding_matrix, entity2idx, rels_embedding_matrix, rel2idx = utils.load_kb_embeddings(model_config['kb.embeddings']) def optimization_trial(sampled_parameters): global trials_counter, dev, train try: logger.info("** Trial: {}/{} ** ".format(trials_counter, training_config['optimize.num.trails'])) trials_counter += 1 sampled_parameters['negative.weight.epoch'] = int(sampled_parameters['negative.weight.epoch']) model_trial = getattr(models, training_config.get('model.type', "VectorModel"))(parameters={**model_config, **sampled_parameters}, logger=logger) model_trial.prepare_model(embedding_matrix=embedding_matrix, element2idx=element2idx, entities_embedding_matrix=entities_embedding_matrix, entity2idx=entity2idx, rels_embedding_matrix=rels_embedding_matrix, rel2idx=rel2idx) if train is None and dev is None: dev = (model_trial.encode_batch(dev_data[:-1]), dev_data[-1]) train = (model_trial.encode_batch(train_data[:-1]), train_data[-1]) results = model_trial.train(train, dev=dev, eval_on_dataset=lambda: eval_dataset.eval(MLLinker(model=model_trial, logger=logger), verbose=False)) results['actual_loss'] = results['v_loss'] results['loss'] = 1.0 - results['v_f1'] return {**results, 'status': hy.STATUS_OK, 'sampled.parameters': sampled_parameters} except Exception as ex: logger.error(ex) return {'loss': -1, 'status': hy.STATUS_FAIL, 'sampled.parameters': sampled_parameters} hy.fmin(optimization_trial, optimization_space, algo=hy.rand.suggest, max_evals=training_config['optimize.num.trails'], trials=trials, verbose=1)
Example #5
Source File: optimization.py From nlu-hyperopt with Apache License 2.0 | 4 votes |
def run_trial(space): """The objective function is pickled and transferred to the workers. Hence, this function has to contain all the imports we need. """ data_dir = os.environ.get("INPUT_DATA_DIRECTORY", "./data") model_dir = os.environ.get("INPUT_MODEL_DIRECTORY", "./models") target_metric = os.environ.get("INPUT_TARGET_METRIC", "f1_score") if target_metric not in AVAILABLE_METRICS: logger.error("The metric '{}' is not in the available metrics. " "Please use one of the available metrics: {}." "".format(target_metric, AVAILABLE_METRICS)) return {"loss": 1, "status": STATUS_FAIL} logger.debug("Search space: {}".format(space)) # The epoch has to be an int since `tqdm` otherwise will cause an exception. if "epochs" in space: space["epochs"] = int(space["epochs"]) with open(os.path.join(data_dir, "template_config.yml")) as f: config_yml = f.read().format(**space) config = read_yaml(config_yml) config = rasa.nlu.config.load(config) trainer = Trainer(config) training_data = load_data(os.path.join(data_dir, "train.md")) test_data_path = os.path.join(data_dir, "validation.md") # wrap in train and eval in try/except in case # nlu_hyperopt proposes invalid combination of params try: model = trainer.train(training_data) model_path = trainer.persist(model_dir) if target_metric is None or target_metric == "threshold_loss": loss = _get_threshold_loss(model, test_data_path) else: loss = _get_nlu_evaluation_loss(model_path, target_metric, test_data_path) return {"loss": loss, "status": STATUS_OK} except Exception as e: logger.error(e) return {"loss": 1, "status": STATUS_FAIL}