Python six.moves.cPickle.HIGHEST_PROTOCOL Examples

The following are 30 code examples of six.moves.cPickle.HIGHEST_PROTOCOL(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module six.moves.cPickle , or try the search function .
Example #1
Source File: cmodule.py    From attention-lvcsr with MIT License 6 votes vote down vote up
def save_pkl(self):
        """
        Dump this object into its `key_pkl` file.

        May raise a cPickle.PicklingError if such an exception is raised at
        pickle time (in which case a warning is also displayed).

        """
        # Note that writing in binary mode is important under Windows.
        try:
            with open(self.key_pkl, 'wb') as f:
                pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
        except pickle.PicklingError:
            _logger.warning("Cache leak due to unpickle-able key data %s",
                            self.keys)
            os.remove(self.key_pkl)
            raise 
Example #2
Source File: read_PascalVocData.py    From FCN-GoogLeNet with MIT License 6 votes vote down vote up
def read_dataset(data_dir):
    pickle_filename = "PascalVoc.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        PascalVoc_folder = "VOCdevkit"
        result = create_image_lists(os.path.join(data_dir, PascalVoc_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records 
Example #3
Source File: cmodule.py    From D-VAE with MIT License 6 votes vote down vote up
def save_pkl(self):
        """
        Dump this object into its `key_pkl` file.

        May raise a cPickle.PicklingError if such an exception is raised at
        pickle time (in which case a warning is also displayed).

        """
        # Note that writing in binary mode is important under Windows.
        try:
            with open(self.key_pkl, 'wb') as f:
                pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
        except pickle.PicklingError:
            _logger.warning("Cache leak due to unpickle-able key data %s",
                            self.keys)
            os.remove(self.key_pkl)
            raise 
Example #4
Source File: read_MITSceneParsingData.py    From FCN-GoogLeNet with MIT License 6 votes vote down vote up
def read_dataset(data_dir):
    pickle_filename = "MITSceneParsing.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        SceneParsing_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, SceneParsing_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records 
Example #5
Source File: read_LaMemDataset.py    From Colorization.tensorflow with MIT License 6 votes vote down vote up
def read_dataset(data_dir):
    pickle_filename = "lamem.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        lamem_folder = (DATA_URL.split("/")[-1]).split(os.path.extsep)[0]
        result = {'images': create_image_lists(os.path.join(data_dir, lamem_folder))}
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['images']
        del result

    return training_records 
Example #6
Source File: reader.py    From Fully-Convolutional-Networks with MIT License 6 votes vote down vote up
def read_dataset(data_dir):
    pickle_filename = "MITSceneParsing.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        SceneParsing_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, SceneParsing_folder))
        print ("> [SPD] Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("> [SPD] Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records 
Example #7
Source File: test_pickle_store.py    From arctic with GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_pickle_store_future_version():
    data = {'foo': b'abcdefghijklmnopqrstuvwxyz'}
    version = {'_id': sentinel._id,
               'blob': '__chunked__VERSION_ONE_MILLION'}
    coll = Mock()
    arctic_lib = Mock()
    datap = compressHC(cPickle.dumps(data, protocol=cPickle.HIGHEST_PROTOCOL))
    data_1 = datap[0:5]
    data_2 = datap[5:]
    coll.find.return_value = [{'data': Binary(data_1),
                               'symbol': 'sentinel.symbol',
                               'segment': 0},
                              {'data': Binary(data_2),
                               'symbol': 'sentinel.symbol',
                               'segment': 1},
                              ]
    arctic_lib.get_top_level_collection.return_value = coll

    ps = PickleStore()
    with pytest.raises(UnsupportedPickleStoreVersion) as e:
        ps.read(arctic_lib, version, sentinel.symbol)
    assert('unsupported version of pickle store' in str(e.value)) 
Example #8
Source File: test_pickle_store.py    From arctic with GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_pickle_chunk_V1_read():
    data = {'foo': b'abcdefghijklmnopqrstuvwxyz'}
    version = {'_id': sentinel._id,
               'blob': '__chunked__'}
    coll = Mock()
    arctic_lib = Mock()
    datap = compressHC(cPickle.dumps(data, protocol=cPickle.HIGHEST_PROTOCOL))
    data_1 = datap[0:5]
    data_2 = datap[5:]
    coll.find.return_value = [{'data': Binary(data_1),
                               'symbol': 'sentinel.symbol',
                               'segment': 0},
                              {'data': Binary(data_2),
                               'symbol': 'sentinel.symbol',
                               'segment': 1},
                              ]
    arctic_lib.get_top_level_collection.return_value = coll

    ps = PickleStore()
    assert(data == ps.read(arctic_lib, version, sentinel.symbol)) 
Example #9
Source File: 1_prepare_pickle_200.py    From Neural-Network-Programming-with-TensorFlow with MIT License 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      # You may override by setting force=True.
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)
  
  return dataset_names 
Example #10
Source File: 1_prepare_pickle.py    From Neural-Network-Programming-with-TensorFlow with MIT License 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      # You may override by setting force=True.
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)
  
  return dataset_names 
Example #11
Source File: prepare_notmnist.py    From Neural-Network-Programming-with-TensorFlow with MIT License 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          #pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
          print(pickle.HIGHEST_PROTOCOL)
          pickle.dump(dataset, f, 2)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)
  
  return dataset_names 
Example #12
Source File: test_pickle.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_simple():
    fig = plt.figure()
    pickle.dump(fig, BytesIO(), pickle.HIGHEST_PROTOCOL)

    ax = plt.subplot(121)
    pickle.dump(ax, BytesIO(), pickle.HIGHEST_PROTOCOL)

    ax = plt.axes(projection='polar')
    plt.plot(np.arange(10), label='foobar')
    plt.legend()

    pickle.dump(ax, BytesIO(), pickle.HIGHEST_PROTOCOL)

#    ax = plt.subplot(121, projection='hammer')
#    pickle.dump(ax, BytesIO(), pickle.HIGHEST_PROTOCOL)

    plt.figure()
    plt.bar(x=np.arange(10), height=np.arange(10))
    pickle.dump(plt.gca(), BytesIO(), pickle.HIGHEST_PROTOCOL)

    fig = plt.figure()
    ax = plt.axes()
    plt.plot(np.arange(10))
    ax.set_yscale('log')
    pickle.dump(fig, BytesIO(), pickle.HIGHEST_PROTOCOL) 
Example #13
Source File: notmnist_prepare_data.py    From deep-learning-samples with The Unlicense 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      # You may override by setting force=True.
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)

  return dataset_names 
Example #14
Source File: data_process.py    From malayalam-character-recognition with MIT License 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
    dataset_names = []
    for folder in data_folders:
        set_filename = folder + pickle_extension
        dataset_names.append(folder)
        if os.path.exists(set_filename) and not force:
            # You may override by setting force=True.
            print('%s already present - Skipping pickling.' % set_filename)
        else:
            # print('Pickling %s.' % set_filename)
            dataset = load_letter(folder, min_num_images_per_class)
            try:
                with open(set_filename, 'wb') as f:
                    Pickle.dump(dataset, f, Pickle.HIGHEST_PROTOCOL)
            except Exception as e:
                print('Unable to save data to', set_filename, ':', e)

    return dataset_names 
Example #15
Source File: test_pickle.py    From neural-network-animation with MIT License 6 votes vote down vote up
def recursive_pickle(top_obj):
    """
    Recursively pickle all of the given objects subordinates, starting with
    the deepest first. **Very** handy for debugging pickling issues, but
    also very slow (as it literally pickles each object in turn).

    Handles circular object references gracefully.

    """
    objs = depth_getter(top_obj)
    # sort by depth then by nest_info
    objs = sorted(six.itervalues(objs), key=lambda val: (-val[0], val[2]))

    for _, obj, location in objs:
#        print('trying %s' % location)
        try:
            pickle.dump(obj, BytesIO(), pickle.HIGHEST_PROTOCOL)
        except Exception as err:
            print(obj)
            print('Failed to pickle %s. \n Type: %s. Traceback '
                  'follows:' % (location, type(obj)))
            raise 
Example #16
Source File: prepro_ngrams_bak.py    From NeuralBabyTalk with MIT License 6 votes vote down vote up
def main(params):

  det_train_path = 'data/coco/annotations/instances_train2014.json'
  det_val_path = 'data/coco/annotations/instances_val2014.json'

  coco_det_train = COCO(det_train_path)
  coco_det_val = COCO(det_val_path)

  info = json.load(open(params['dict_json'], 'r'))
  imgs = json.load(open(params['input_json'], 'r'))

  itow = info['ix_to_word']
  wtoi = {w:i for i,w in itow.items()}
  wtod = {w:i+1 for w,i in info['wtod'].items()} # word to detection
  dtoi = {w:i+1 for i,w in enumerate(wtod.keys())} # detection to index
  wtol = info['wtol']
  ctol = {c:i+1 for i, c in enumerate(coco_det_train.cats.keys())}

  # imgs = imgs['images']

  ngram_idxs, ref_len = build_dict(imgs, info, wtoi, wtod, dtoi, wtol, ctol, coco_det_train, coco_det_val, params)

  # cPickle.dump({'document_frequency': ngram_words, 'ref_len': ref_len}, open(params['output_pkl']+'-words.p','w'), protocol=cPickle.HIGHEST_PROTOCOL)
  cPickle.dump({'document_frequency': ngram_idxs, 'ref_len': ref_len}, open(params['output_pkl']+'-idxs.p','w'), protocol=cPickle.HIGHEST_PROTOCOL) 
Example #17
Source File: prepro_ngrams_flickr30k.py    From NeuralBabyTalk with MIT License 6 votes vote down vote up
def main(params):

  info = json.load(open(params['dict_json'], 'r'))
  imgs = json.load(open(params['input_json'], 'r'))

  itow = info['ix_to_word']
  wtoi = {w:i for i,w in itow.items()}
  wtod = {w:i+1 for w,i in info['wtod'].items()} # word to detection
  # dtoi = {w:i+1 for i,w in enumerate(wtod.keys())} # detection to index
  dtoi = wtod
  wtol = info['wtol']
  itod = {i:w for w,i in dtoi.items()}

  # imgs = imgs['images']

  ngram_idxs, ref_len = build_dict(imgs, info, wtoi, wtod, dtoi, wtol, itod, params)

  # cPickle.dump({'document_frequency': ngram_words, 'ref_len': ref_len}, open(params['output_pkl']+'-words.p','w'), protocol=cPickle.HIGHEST_PROTOCOL)
  cPickle.dump({'document_frequency': ngram_idxs, 'ref_len': ref_len}, open(params['output_pkl']+'-idxs.p','w'), protocol=cPickle.HIGHEST_PROTOCOL) 
Example #18
Source File: peda.py    From GdbPlugins with GNU General Public License v3.0 6 votes vote down vote up
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True 
Example #19
Source File: 1_notmnist.py    From udacity-deep-learning with GNU General Public License v3.0 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
    dataset_names = []
    for folder in data_folders:
        set_filename = folder + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
            # You may override by setting force=True.
            print('%s already present - Skipping pickling.' % set_filename)
        else:
            print('Pickling %s.' % set_filename)
            dataset = load_letter(folder, min_num_images_per_class)
            try:
                with open(set_filename, 'wb') as f:
                    pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
            except Exception as e:
                print('Unable to save data to', set_filename, ':', e)

    return dataset_names 
Example #20
Source File: 1_prepare_pickle_200_greyscale.py    From Neural-Network-Programming-with-TensorFlow with MIT License 6 votes vote down vote up
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      # You may override by setting force=True.
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)
  
  return dataset_names 
Example #21
Source File: networkx_graph.py    From vitrage with Apache License 2.0 5 votes vote down vote up
def write_gpickle(self):
        return cPickle.dumps(self._g, cPickle.HIGHEST_PROTOCOL) 
Example #22
Source File: eventmgr.py    From pycbc with GNU General Public License v3.0 5 votes vote down vote up
def save_state(self, tnum_finished, filename):
        """Save the current state of the background buffers"""
        from pycbc.io.hdf import dump_state

        self.tnum_finished = tnum_finished
        logging.info('Writing checkpoint file at template %s', tnum_finished)
        fp = h5py.File(filename, 'w')
        dump_state(self, fp, protocol=cPickle.HIGHEST_PROTOCOL)
        fp.close() 
Example #23
Source File: load_data.py    From TextFlow with MIT License 5 votes vote down vote up
def process_data(base_path, dataset, min_note=21, note_range=88):
    output = os.path.join(base_path, dataset.filename)
    if os.path.exists(output):
        try:
            with open(output, "rb") as f:
                return pickle.load(f)
        except (ValueError, UnicodeDecodeError):
            # Assume python env has changed.
            # Recreate pickle file in this env's format.
            os.remove(output)

    print("processing raw data - {} ...".format(dataset.name))
    data = pickle.load(urlopen(dataset.url))
    processed_dataset = {}
    for split, data_split in data.items():
        processed_dataset[split] = {}
        n_seqs = len(data_split)
        processed_dataset[split]['sequence_lengths'] = torch.zeros(n_seqs, dtype=torch.long)
        processed_dataset[split]['sequences'] = []
        for seq in range(n_seqs):
            seq_length = len(data_split[seq])
            processed_dataset[split]['sequence_lengths'][seq] = seq_length
            processed_sequence = torch.zeros((seq_length, note_range))
            for t in range(seq_length):
                note_slice = torch.tensor(list(data_split[seq][t]), dtype=torch.int64) - min_note
                slice_length = len(note_slice)
                if slice_length > 0:
                    processed_sequence[t, note_slice] = torch.ones(slice_length)
            processed_dataset[split]['sequences'].append(processed_sequence)
        print(split)
        print(n_seqs)
        print(processed_dataset[split]['sequence_lengths'])
        print(processed_dataset[split]['sequence_lengths'].max())
        print(processed_dataset[split]['sequences'][0][0], processed_dataset[split]['sequences'][0].shape)
    pickle.dump(processed_dataset, open(output, "wb"), pickle.HIGHEST_PROTOCOL)
    print("dumped processed data to %s" % output)


# this logic will be initiated upon import 
Example #24
Source File: embedding.py    From word-embeddings-benchmarks with MIT License 5 votes vote down vote up
def save(self, fname):
        """Save a pickled version of the embedding into `fname`."""

        vec = self.vectors
        voc = self.vocabulary.getstate()
        state = (voc, vec)
        with open(fname, 'wb') as f:
            pickle.dump(state, f, protocol=pickle.HIGHEST_PROTOCOL) 
Example #25
Source File: data_handlers.py    From feagen with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def write_data(self, result_dict):
        for key, val in six.viewitems(result_dict):
            pickle_path = os.path.join(self.pickle_dir, key + ".pkl")
            with SimpleTimer("Writing generated data %s to pickle file" % key,
                             end_in_new_line=False), \
                    open(pickle_path, "wb") as fp:
                cPickle.dump(val, fp, protocol=cPickle.HIGHEST_PROTOCOL) 
Example #26
Source File: sequitur.py    From sequitur-g2p with GNU General Public License v2.0 5 votes vote down vote up
def checkpoint(self, context):
        print('checkpointing', file=context.log)
        import cPickle as pickle
        fname = self.checkpointFile % context.iteration
        f = open(fname, 'wb')
        pickle.dump((self, context), f, pickle.HIGHEST_PROTOCOL)
        f.close()


# =========================================================================== 
Example #27
Source File: read_celebADataset.py    From WassersteinGAN.tensorflow with MIT License 5 votes vote down vote up
def read_dataset(data_dir):
    pickle_filename = "celebA.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        # utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        celebA_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        dir_path = os.path.join(data_dir, celebA_folder)
        if not os.path.exists(dir_path):
            print ("CelebA dataset needs to be downloaded and unzipped manually")
            print ("Download from: %s" % DATA_URL)
            raise ValueError("Dataset not found")

        result = create_image_lists(dir_path)
        print ("Training set: %d" % len(result['train']))
        print ("Test set: %d" % len(result['test']))
        print ("Validation set: %d" % len(result['validation']))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        celebA = CelebA_Dataset(result)
        del result
    return celebA 
Example #28
Source File: xpediteData.py    From Xpedite with Apache License 2.0 5 votes vote down vote up
def commit(self):
    """Commits accumulated data to the xpedite data file"""
    offset = 0
    layout = {}

    for key in self.dataTable:
      if not isinstance(self.dataTable[key].data, str):
        isMarshalled = True
        self.dataTable[key].binData = pickle.dumps(self.dataTable[key].data, pickle.HIGHEST_PROTOCOL)
      else:
        isMarshalled = False
        self.dataTable[key].binData = self.dataTable[key].data

      dataSize = len(self.dataTable[key].binData)
      layout[key] = LayoutEntry(offset, isMarshalled, dataSize)
      offset += dataSize

    with open(self.dataFile, 'wb') as binFile:
      pTable = pickle.dumps(layout, pickle.HIGHEST_PROTOCOL)
      pTableSize = len(pTable)

      #convert to bytes
      binBuffer = create_string_buffer(8)
      struct.pack_into('i', binBuffer, 0, pTableSize)
      binFile.write(binBuffer)
      binFile.write(pTable)

      for key in self.dataTable:
        binFile.write(self.dataTable[key].binData) 
Example #29
Source File: io.py    From Context-aware-ZSR with MIT License 5 votes vote down vote up
def save_object(obj, file_name):
    """Save a Python object by pickling it."""
    file_name = os.path.abspath(file_name)
    with open(file_name, 'wb') as f:
        pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL) 
Example #30
Source File: io.py    From pcl.pytorch with MIT License 5 votes vote down vote up
def save_object(obj, file_name):
    """Save a Python object by pickling it."""
    file_name = os.path.abspath(file_name)
    with open(file_name, 'wb') as f:
        pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)