Python config.config.Config() Examples
The following are 6
code examples of config.config.Config().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
config.config
, or try the search function
.
Example #1
Source File: main.py From ner_with_dependency with GNU General Public License v3.0 | 6 votes |
def evaluate(config:Config, model: NNCRF, batch_insts_ids, name:str, insts: List[Instance]): ## evaluation metrics = np.asarray([0, 0, 0], dtype=int) batch_id = 0 batch_size = config.batch_size for batch in batch_insts_ids: one_batch_insts = insts[batch_id * batch_size:(batch_id + 1) * batch_size] sorted_batch_insts = sorted(one_batch_insts, key=lambda inst: len(inst.input.words), reverse=True) batch_max_scores, batch_max_ids = model.decode(batch) metrics += eval.evaluate_num(sorted_batch_insts, batch_max_ids, batch[-2], batch[1], config.idx2labels) batch_id += 1 p, total_predict, total_entity = metrics[0], metrics[1], metrics[2] precision = p * 1.0 / total_predict * 100 if total_predict != 0 else 0 recall = p * 1.0 / total_entity * 100 if total_entity != 0 else 0 fscore = 2.0 * precision * recall / (precision + recall) if precision != 0 or recall != 0 else 0 print("[%s set] Precision: %.2f, Recall: %.2f, F1: %.2f" % (name, precision, recall,fscore), flush=True) return [precision, recall, fscore]
Example #2
Source File: main.py From ner_with_dependency with GNU General Public License v3.0 | 5 votes |
def get_optimizer(config: Config, model: nn.Module): params = model.parameters() if config.optimizer.lower() == "sgd": print(colored("Using SGD: lr is: {}, L2 regularization is: {}".format(config.learning_rate, config.l2), 'yellow')) return optim.SGD(params, lr=config.learning_rate, weight_decay=float(config.l2)) elif config.optimizer.lower() == "adam": print(colored("Using Adam", 'yellow')) return optim.Adam(params) else: print("Illegal optimizer: {}".format(config.optimizer)) exit(1)
Example #3
Source File: main.py From ner_with_dependency with GNU General Public License v3.0 | 5 votes |
def batching_list_instances(config: Config, insts:List[Instance]): train_num = len(insts) batch_size = config.batch_size total_batch = train_num // batch_size + 1 if train_num % batch_size != 0 else train_num // batch_size batched_data = [] for batch_id in range(total_batch): one_batch_insts = insts[batch_id * batch_size:(batch_id + 1) * batch_size] batched_data.append(simple_batching(config, one_batch_insts)) return batched_data
Example #4
Source File: main.py From ner_with_dependency with GNU General Public License v3.0 | 5 votes |
def test_model(config: Config, test_insts): dep_model_name = config.dep_model.name if config.dep_model == DepModelType.dggcn: dep_model_name += '(' + str(config.num_gcn_layers) + ","+str(config.gcn_dropout)+ ","+str(config.gcn_mlp_layers)+")" model_name = "model_files/lstm_{}_{}_crf_{}_{}_{}_dep_{}_elmo_{}_{}_gate_{}_epoch_{}_lr_{}_comb_{}.m".format(config.num_lstm_layer, config.hidden_dim, config.dataset, config.affix, config.train_num, dep_model_name, config.context_emb.name, config.optimizer.lower(), config.edge_gate, config.num_epochs, config.learning_rate, config.interaction_func) res_name = "results/lstm_{}_{}_crf_{}_{}_{}_dep_{}_elmo_{}_{}_gate_{}_epoch_{}_lr_{}_comb_{}.results".format(config.num_lstm_layer, config.hidden_dim, config.dataset, config.affix, config.train_num, dep_model_name, config.context_emb.name, config.optimizer.lower(), config.edge_gate, config.num_epochs, config.learning_rate, config.interaction_func) model = NNCRF(config) model.load_state_dict(torch.load(model_name)) model.eval() test_batches = batching_list_instances(config, test_insts) evaluate(config, model, test_batches, "test", test_insts) write_results(res_name, test_insts)
Example #5
Source File: app_config.py From rubbergod with GNU General Public License v3.0 | 5 votes |
def get_attr(attr_key: str): """ Helper method for getting values from config override or config template. """ if not hasattr(ConfigOverride.Config, attr_key): return getattr(config_template.Config, attr_key) return getattr(ConfigOverride.Config, attr_key)
Example #6
Source File: main.py From ner_with_dependency with GNU General Public License v3.0 | 4 votes |
def main(): parser = argparse.ArgumentParser(description="Dependency-Guided LSTM CRF implementation") opt = parse_arguments(parser) conf = Config(opt) reader = Reader(conf.digit2zero) setSeed(opt, conf.seed) trains = reader.read_conll(conf.train_file, -1, True) devs = reader.read_conll(conf.dev_file, conf.dev_num, False) tests = reader.read_conll(conf.test_file, conf.test_num, False) if conf.context_emb != ContextEmb.none: print('Loading the {} vectors for all datasets.'.format(conf.context_emb.name)) conf.context_emb_size = reader.load_elmo_vec(conf.train_file.replace(".sd", "").replace(".ud", "").replace(".sud", "").replace(".predsd", "").replace(".predud", "").replace(".stud", "").replace(".ssd", "") + "."+conf.context_emb.name+".vec", trains) reader.load_elmo_vec(conf.dev_file.replace(".sd", "").replace(".ud", "").replace(".sud", "").replace(".predsd", "").replace(".predud", "").replace(".stud", "").replace(".ssd", "") + "."+conf.context_emb.name+".vec", devs) reader.load_elmo_vec(conf.test_file.replace(".sd", "").replace(".ud", "").replace(".sud", "").replace(".predsd", "").replace(".predud", "").replace(".stud", "").replace(".ssd", "") + "."+conf.context_emb.name+".vec", tests) conf.use_iobes(trains + devs + tests) conf.build_label_idx(trains) conf.build_deplabel_idx(trains + devs + tests) print("# deplabels: ", len(conf.deplabels)) print("dep label 2idx: ", conf.deplabel2idx) conf.build_word_idx(trains, devs, tests) conf.build_emb_table() conf.map_insts_ids(trains + devs + tests) print("num chars: " + str(conf.num_char)) # print(str(config.char2idx)) print("num words: " + str(len(conf.word2idx))) # print(config.word2idx) if opt.mode == "train": if conf.train_num != -1: random.shuffle(trains) trains = trains[:conf.train_num] learn_from_insts(conf, conf.num_epochs, trains, devs, tests) else: ## Load the trained model. test_model(conf, tests) # pass print(opt.mode)