Python dill.load() Examples

The following are 30 code examples of dill.load(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module dill , or try the search function .
Example #1
Source File: simple.py    From learning2run with MIT License 7 votes vote down vote up
def load(path, num_cpu=16):
    """Load act function that was returned by learn function.

    Parameters
    ----------
    path: str
        path to the act function pickle
    num_cpu: int
        number of cpus to use for executing the policy

    Returns
    -------
    act: ActWrapper
        function that takes a batch of observations
        and returns actions.
    """
    return ActWrapper.load(path, num_cpu=num_cpu) 
Example #2
Source File: deferred.py    From toil with Apache License 2.0 6 votes vote down vote up
def _runAllDeferredFunctions(self, fileObj):
        """
        Read and run deferred functions until EOF from the given open file.
        """

        try:
            while True:
                # Load each function
                deferredFunction = dill.load(fileObj)
                logger.debug("Loaded deferred function %s" % repr(deferredFunction))
                # Run it
                self._runDeferredFunction(deferredFunction)
        except EOFError as e:
            # This is expected and means we read all the complete entries.
            logger.debug("Out of deferred functions!")
            pass 
Example #3
Source File: base.py    From batchflow with Apache License 2.0 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        self.full_config = Config()
        self.session = kwargs.get('session', None)
        self.graph = tf.Graph() if self.session is None else self.session.graph
        self._graph_context = None
        self._train_lock = threading.Lock()

        # Parameters of batch processing: splitting batches into parts and/or using multiple devices to process data
        self.microbatch = None
        self.devices = []
        self.leading_device = None
        self.device_to_scope = {}
        self.scope_to_device = {}
        self.multi_device = False

        # Private storage for often used tensors
        self._attrs = dict()

        # Save/load things
        self._saver = None
        self.preserve = ['_attrs', 'microbatch',
                         'devices', 'leading_device', 'device_to_scope', 'scope_to_device', 'multi_device']

        super().__init__(*args, **kwargs) 
Example #4
Source File: research.py    From batchflow with Apache License 2.0 6 votes vote down vote up
def __init__(self):
        self.executables = OrderedDict()
        self.loaded = False # TODO: Think about it. Do we need load?
        self.branches = 1
        self.trials = 2
        self.workers = 1
        self.bar = False
        self.name = 'research'
        self.worker_class = PipelineWorker
        self.devices = None
        self.domain = None
        self.n_iters = None
        self.timeout = 5
        self.n_configs = None
        self.n_reps = None
        self.n_configs = None
        self.repeat_each = None
        self.logger = FileLogger()

        # update parameters for config. None or dict with keys (function, params, cache)
        self._update_config = None
        # update parameters for domain. None or dict with keys (function, each)
        self._update_domain = None
        self.n_updates = 0 
Example #5
Source File: procedure_continuous_tasks.py    From action-branching-agents with MIT License 6 votes vote down vote up
def load(path, num_cpu=16):
    """Load act function that was returned by learn function.

    Parameters
    ----------
    path: str
        path to the act function pickle
    num_cpu: int
        number of cpus to use for executing the policy

    Returns
    -------
    act: ActWrapper
        function that takes a batch of observations
        and returns actions.
    """
    return ActWrapper.load(path, num_cpu=num_cpu) 
Example #6
Source File: simple.py    From rl-attack with MIT License 6 votes vote down vote up
def load(path, num_cpu=16):
    """Load act function that was returned by learn function.

    Parameters
    ----------
    path: str
        path to the act function pickle
    num_cpu: int
        number of cpus to use for executing the policy

    Returns
    -------
    act: ActWrapper
        function that takes a batch of observations
        and returns actions.
    """
    return ActWrapper.load(path, num_cpu=num_cpu) 
Example #7
Source File: celery.py    From flask-unchained with MIT License 6 votes vote down vote up
def _register_dill(self):
        def encode(obj, dumper=dill_dumps):
            return dumper(obj, protocol=pickle_protocol)

        def decode(s):
            return pickle_loads(str_to_bytes(s), load=dill_load)

        registry.register(
            name='dill',
            encoder=encode,
            decoder=decode,
            content_type='application/x-python-serialize',
            content_encoding='binary'
        )

    # the same as upstream, but we need to copy it here so we can access it 
Example #8
Source File: simple.py    From rl-attack-detection with MIT License 6 votes vote down vote up
def load(path, num_cpu=16):
    """Load act function that was returned by learn function.

    Parameters
    ----------
    path: str
        path to the act function pickle
    num_cpu: int
        number of cpus to use for executing the policy

    Returns
    -------
    act: ActWrapper
        function that takes a batch of observations
        and returns actions.
    """
    return ActWrapper.load(path, num_cpu=num_cpu) 
Example #9
Source File: dqfd.py    From A-Guide-to-DeepMinds-StarCraft-AI-Environment with Apache License 2.0 6 votes vote down vote up
def load(path, act_params, num_cpu=16):
  """Load act function that was returned by learn function.

  Parameters
  ----------
  path: str
      path to the act function pickle
  num_cpu: int
      number of cpus to use for executing the policy

  Returns
  -------
  act: ActWrapper
      function that takes a batch of observations
      and returns actions.
  """
  return ActWrapper.load(path, num_cpu=num_cpu, act_params=act_params) 
Example #10
Source File: deepq_mineral_shards.py    From A-Guide-to-DeepMinds-StarCraft-AI-Environment with Apache License 2.0 6 votes vote down vote up
def load(path, act_params, num_cpu=16):
  """Load act function that was returned by learn function.

  Parameters
  ----------
  path: str
      path to the act function pickle
  num_cpu: int
      number of cpus to use for executing the policy

  Returns
  -------
  act: ActWrapper
      function that takes a batch of observations
      and returns actions.
  """
  return ActWrapper.load(path, num_cpu=num_cpu, act_params=act_params) 
Example #11
Source File: base_model.py    From MatchZoo with Apache License 2.0 6 votes vote down vote up
def load_embedding_matrix(
        self,
        embedding_matrix: np.ndarray,
        name: str = 'embedding'
    ):
        """
        Load an embedding matrix.

        Load an embedding matrix into the model's embedding layer. The name
        of the embedding layer is specified by `name`. For models with only
        one embedding layer, set `name='embedding'` when creating the keras
        layer, and use the default `name` when load the matrix. For models
        with more than one embedding layers, initialize keras layer with
        different layer names, and set `name` accordingly to load a matrix
        to a chosen layer.

        :param embedding_matrix: Embedding matrix to be loaded.
        :param name: Name of the layer. (default: 'embedding')
        """
        self.get_embedding_layer(name).set_weights([embedding_matrix]) 
Example #12
Source File: test_lambda.py    From lambdify with Apache License 2.0 6 votes vote down vote up
def test_create(self, mock):

        value = 1
        function_name = 'test_function'

        @Lambda(name=function_name, bucket='test', key='test', client=self.client)
        def foo():
            return value

        package = DeploymentPackage(foo)

        zfp = zipfile.ZipFile(StringIO(package.zip_bytes(foo.dumped_code)), "r")
        func = dill.load(zfp.open('.lambda.dump'))
        self.assertEqual(func(), value)

        resp_create = foo.create()
        self.assertEqual(resp_create['FunctionName'], function_name)

        # moto doesn't support ZipFile only lambda deployments, while
        # aws doen't allow other arguments when scpesifying ZipFile argument
        #resp_get = foo.get()
        #self.assertEqual(resp_get['Configuration']['FunctionName'], function_name) 
Example #13
Source File: train.py    From attention-is-all-you-need-pytorch with MIT License 6 votes vote down vote up
def prepare_dataloaders(opt, device):
    batch_size = opt.batch_size
    data = pickle.load(open(opt.data_pkl, 'rb'))

    opt.max_token_seq_len = data['settings'].max_len
    opt.src_pad_idx = data['vocab']['src'].vocab.stoi[Constants.PAD_WORD]
    opt.trg_pad_idx = data['vocab']['trg'].vocab.stoi[Constants.PAD_WORD]

    opt.src_vocab_size = len(data['vocab']['src'].vocab)
    opt.trg_vocab_size = len(data['vocab']['trg'].vocab)

    #========= Preparing Model =========#
    if opt.embs_share_weight:
        assert data['vocab']['src'].vocab.stoi == data['vocab']['trg'].vocab.stoi, \
            'To sharing word embedding the src/trg word2idx table shall be the same.'

    fields = {'src': data['vocab']['src'], 'trg':data['vocab']['trg']}

    train = Dataset(examples=data['train'], fields=fields)
    val = Dataset(examples=data['valid'], fields=fields)

    train_iterator = BucketIterator(train, batch_size=batch_size, device=device, train=True)
    val_iterator = BucketIterator(val, batch_size=batch_size, device=device)

    return train_iterator, val_iterator 
Example #14
Source File: core.py    From jaxnet with Apache License 2.0 5 votes vote down vote up
def load(path: Path):
    with path.open('rb') as file:
        return dill.load(file) 
Example #15
Source File: nonCachingFileStore.py    From toil with Apache License 2.0 5 votes vote down vote up
def _readJobState(jobStateFileName):
        with open(jobStateFileName, 'rb') as fH:
            state = dill.load(fH)
        return state 
Example #16
Source File: abstractFileStore.py    From toil with Apache License 2.0 5 votes vote down vote up
def _load(cls, fileName):
            """
            Load the state of the cache from the state file

            :param str fileName: Path to the cache state file.
            :return: An instance of the state as a namespace.
            :rtype: _StateFile
            """
            # Read the value from the cache state file then initialize and instance of
            # _CacheState with it.
            with open(fileName, 'rb') as fH:
                infoDict = dill.load(fH)
            return cls(infoDict) 
Example #17
Source File: utils_models.py    From auto_ml with MIT License 5 votes vote down vote up
def insert_deep_learning_model(pipeline_step, file_name):
    # This is where we saved the random_name for this model
    random_name = pipeline_step.model
    # Load the Keras model here
    keras_file_name = file_name[:-5] + random_name + '_keras_deep_learning_model.h5'

    model = keras_load_model(keras_file_name)

    # Put the model back in place so that we can still use it to get predictions without having to load it back in from disk
    return model 
Example #18
Source File: __init__.py    From lifetimes with MIT License 5 votes vote down vote up
def load_model(self, path):
        """
        Load model with dill package.

        Parameters
        ----------
        path: str
            From what path load model.

        """
        with open(path, "rb") as in_file:
            self.__dict__.update(dill.load(in_file).__dict__) 
Example #19
Source File: train.py    From aivivn-tone with MIT License 5 votes vote down vote up
def resume_in_parts(self, train_parts, val, val_iterator, batch_size, save_path):
        checkpoint = torch.load(save_path)
        self.model.load_state_dict(checkpoint["model_state_dict"])
        self.model.to(device)
        self.optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
        self.scheduler.load_state_dict(checkpoint["scheduler_state_dict"])
        start_epoch = checkpoint["epoch"] + 1
        self.train_in_parts(train_parts, val, val_iterator, batch_size, start_epoch=start_epoch) 
Example #20
Source File: utils_models.py    From auto_ml with MIT License 5 votes vote down vote up
def load_ml_model(file_name):

    with open(file_name, 'rb') as read_file:
        base_pipeline = dill.load(read_file)

    if isinstance(base_pipeline, utils_categorical_ensembling.CategoricalEnsembler):
        for step in base_pipeline.transformation_pipeline.named_steps:
            pipeline_step = base_pipeline.transformation_pipeline.named_steps[step]

            try:
                if pipeline_step.get('model_name', 'reallylongnonsensicalstring')[:12] == 'DeepLearning':
                    pipeline_step.model = insert_deep_learning_model(pipeline_step, file_name)
            except AttributeError:
                pass

        for step in base_pipeline.trained_models:
            pipeline_step = base_pipeline.trained_models[step]

            try:
                if pipeline_step.get('model_name', 'reallylongnonsensicalstring')[:12] == 'DeepLearning':
                    pipeline_step.model = insert_deep_learning_model(pipeline_step, file_name)
            except AttributeError:
                pass

    else:

        for step in base_pipeline.named_steps:
            pipeline_step = base_pipeline.named_steps[step]
            try:
                if pipeline_step.get('model_name', 'reallylongnonsensicalstring')[:12] == 'DeepLearning':
                    pipeline_step.model = insert_deep_learning_model(pipeline_step, file_name)
            except AttributeError:
                pass

    return base_pipeline

# Keeping this here for legacy support 
Example #21
Source File: feature_sampler.py    From VerifAI with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def restoreFromFile(path):
        with open(path, 'rb') as infile:
            allState = dill.load(infile)
            randState, numpyRandState, sampler = allState
            random.setstate(randState)
            np.random.set_state(numpyRandState)
            return sampler 
Example #22
Source File: utils.py    From bootcamp with Apache License 2.0 5 votes vote down vote up
def load_pickle(file):
    if not os.path.exists(file):
        return None
    logger.info(f'Loading PKL file: {file}.')
    with open(file, 'rb') as r:
        return dill.load(r) 
Example #23
Source File: Process.py    From Transformer with Apache License 2.0 5 votes vote down vote up
def create_fields(opt):
    
    spacy_langs = ['en', 'fr', 'de', 'es', 'pt', 'it', 'nl']
    if opt.src_lang not in spacy_langs:
        print('invalid src language: ' + opt.src_lang + 'supported languages : ' + spacy_langs)  
    if opt.trg_lang not in spacy_langs:
        print('invalid trg language: ' + opt.trg_lang + 'supported languages : ' + spacy_langs)
    
    print("loading spacy tokenizers...")
    
    t_src = tokenize(opt.src_lang)
    t_trg = tokenize(opt.trg_lang)

    TRG = data.Field(lower=True, tokenize=t_trg.tokenizer, init_token='<sos>', eos_token='<eos>')
    SRC = data.Field(lower=True, tokenize=t_src.tokenizer)

    if opt.load_weights is not None:
        try:
            print("loading presaved fields...")
            SRC = pickle.load(open(f'{opt.load_weights}/SRC.pkl', 'rb'))
            TRG = pickle.load(open(f'{opt.load_weights}/TRG.pkl', 'rb'))
        except:
            print("error opening SRC.pkl and TXT.pkl field files, please ensure they are in " + opt.load_weights + "/")
            quit()
        
    return(SRC, TRG) 
Example #24
Source File: tv.py    From Kairos with GNU General Public License v3.0 5 votes vote down vote up
def get_browser_instance(browser=None):
    result = browser
    if os.path.exists(FILENAME):
        result = dill.load(open(FILENAME, 'rb'))
    return result 
Example #25
Source File: procedure_continuous_tasks.py    From action-branching-agents with MIT License 5 votes vote down vote up
def load(path, num_cpu=16):
        with open(path, "rb") as f:
            model_data, act_params = dill.load(f)
        act = deepq.build_act(**act_params)
        sess = U.make_session(num_cpu=num_cpu)
        sess.__enter__()
        with tempfile.TemporaryDirectory() as td:
            arc_path = os.path.join(td, "packed.zip")
            with open(arc_path, "wb") as f:
                f.write(model_data)

            zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
            U.load_state(os.path.join(td, "model"))

        return ActWrapper(act, act_params) 
Example #26
Source File: serialization.py    From kubeface with Apache License 2.0 5 votes vote down vote up
def load(fd):
    return dill.load(fd) 
Example #27
Source File: fileIO.py    From bayesloop with MIT License 5 votes vote down vote up
def load(filename):
    """
    Load an instance of a bayesloop study class that was saved using the bayesloop.save() function.

    Args:
        filename(str): Path + filename to stored bayesloop study

    Returns:
        Study instance
    """
    with open(filename, 'rb') as f:
        S = dill.load(f)
    print('+ Successfully loaded study.')

    return S 
Example #28
Source File: base_preprocessor.py    From MatchZoo with Apache License 2.0 5 votes vote down vote up
def load_preprocessor(dirpath: typing.Union[str, Path]) -> 'mz.DataPack':
    """
    Load the fitted `context`. The reverse function of :meth:`save`.

    :param dirpath: directory path of the saved model.
    :return: a :class:`DSSMPreprocessor` instance.
    """
    dirpath = Path(dirpath)

    data_file_path = dirpath.joinpath(BasePreprocessor.DATA_FILENAME)
    return dill.load(open(data_file_path, 'rb')) 
Example #29
Source File: data_pack.py    From MatchZoo with Apache License 2.0 5 votes vote down vote up
def load_data_pack(dirpath: typing.Union[str, Path]) -> DataPack:
    """
    Load a :class:`DataPack`. The reverse function of :meth:`save`.

    :param dirpath: directory path of the saved model.
    :return: a :class:`DataPack` instance.
    """
    dirpath = Path(dirpath)

    data_file_path = dirpath.joinpath(DataPack.DATA_FILENAME)
    dp = dill.load(open(data_file_path, 'rb'))

    return dp 
Example #30
Source File: base_model.py    From MatchZoo with Apache License 2.0 5 votes vote down vote up
def load_model(dirpath: typing.Union[str, Path]) -> BaseModel:
    """
    Load a model. The reverse function of :meth:`BaseModel.save`.

    :param dirpath: directory path of the saved model
    :return: a :class:`BaseModel` instance

    Example:

            >>> import matchzoo as mz
            >>> model = mz.models.Naive()
            >>> model.guess_and_fill_missing_params(verbose=0)
            >>> model.build()
            >>> model.save('my-model')
            >>> model.params.keys() == mz.load_model('my-model').params.keys()
            True
            >>> import shutil
            >>> shutil.rmtree('my-model')

    """
    dirpath = Path(dirpath)

    params_path = dirpath.joinpath(BaseModel.PARAMS_FILENAME)
    weights_path = dirpath.joinpath(BaseModel.BACKEND_WEIGHTS_FILENAME)

    with open(params_path, mode='rb') as params_file:
        params = dill.load(params_file)

    model_instance = params['model_class'](params=params)
    model_instance.build()
    model_instance.compile()
    model_instance.backend.load_weights(weights_path)
    return model_instance