Python data.get_data() Examples

The following are 2 code examples of data.get_data(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module data , or try the search function .
Example #1
Source File: run_posenet.py    From deep-camera-relocalization with MIT License 6 votes vote down vote up
def load_data(data_dir, data_file, max_num_images=-1):
  data_path = os.path.join(data_dir, data_file)
  if max_num_images >= 0:
    data_source = get_data(data_path, data_dir, max_num_images)
  else:
    data_source = get_data(data_path, data_dir)
  num_images = len(data_source.images)
  num_batches = (num_images + batch_size - 1) / batch_size
  print 'num_images', num_images, 'batch_size', batch_size, 'num_batches', num_batches
  return data_source 
Example #2
Source File: main.py    From TranSummar with MIT License 5 votes vote down vote up
def predict(model, modules, consts, options):
    print("start predicting,")
    model.eval()
    options["has_y"] = TESTING_DATASET_CLS.HAS_Y
    if options["beam_decoding"]:
        print("using beam search")
    else:
        print("using greedy search")
    rebuild_dir(cfg.cc.BEAM_SUMM_PATH)
    rebuild_dir(cfg.cc.BEAM_GT_PATH)
    rebuild_dir(cfg.cc.GROUND_TRUTH_PATH)
    rebuild_dir(cfg.cc.SUMM_PATH)

    print("loading test set...")
    if options["model_selection"]:
        xy_list = pickle.load(open(cfg.cc.VALIDATE_DATA_PATH + "pj1000.pkl", "rb")) 
    else:
        xy_list = pickle.load(open(cfg.cc.TESTING_DATA_PATH + "test.pkl", "rb")) 
    batch_list, num_files, num_batches = datar.batched(len(xy_list), options, consts)

    print("num_files = ", num_files, ", num_batches = ", num_batches)
    
    running_start = time.time()
    partial_num = 0
    total_num = 0
    si = 0
    for idx_batch in range(num_batches):
        test_idx = batch_list[idx_batch]
        batch_raw = [xy_list[xy_idx] for xy_idx in test_idx]
        batch = datar.get_data(batch_raw, modules, consts, options)
        
        assert len(test_idx) == batch.x.shape[1] # local_batch_size

                    
        word_emb, padding_mask = model.encode(torch.LongTensor(batch.x).to(options["device"]))

        if options["beam_decoding"]:
            for idx_s in range(len(test_idx)):
                if options["copy"]:
                    inputx = (torch.LongTensor(batch.x_ext[:, idx_s]).to(options["device"]), \
                            torch.FloatTensor(batch.x_mask[:, idx_s, :]).to(options["device"]), \
                          word_emb[:, idx_s, :], padding_mask[:, idx_s],\
                          batch.y[:, idx_s], [batch.len_y[idx_s]], batch.original_summarys[idx_s],\
                          batch.max_ext_len, batch.x_ext_words[idx_s])
                else:
                    inputx = (torch.LongTensor(batch.x[:, idx_s]).to(options["device"]), word_emb[:, idx_s, :], padding_mask[:, idx_s],\
                              batch.y[:, idx_s], [batch.len_y[idx_s]], batch.original_summarys[idx_s])

                beam_decode(si, inputx, model, modules, consts, options)
                si += 1
        else:
            pass
            #greedy_decode()

        testing_batch_size = len(test_idx)
        partial_num += testing_batch_size
        total_num += testing_batch_size
        if partial_num >= consts["testing_print_size"]:
            print(total_num, "summs are generated")
            partial_num = 0
    print (si, total_num)