Python cloudpickle.load() Examples

The following are 30 code examples of cloudpickle.load(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cloudpickle , or try the search function .
Example #1
Source File: keras.py    From mlflow with Apache License 2.0 6 votes vote down vote up
def _load_model(model_path, keras_module, **kwargs):
    keras_models = importlib.import_module(keras_module.__name__ + ".models")
    custom_objects = kwargs.pop("custom_objects", {})
    custom_objects_path = None
    if os.path.isdir(model_path):
        if os.path.isfile(os.path.join(model_path, _CUSTOM_OBJECTS_SAVE_PATH)):
            custom_objects_path = os.path.join(model_path, _CUSTOM_OBJECTS_SAVE_PATH)
        model_path = os.path.join(model_path, _MODEL_SAVE_PATH)
    if custom_objects_path is not None:
        import cloudpickle
        with open(custom_objects_path, "rb") as in_f:
            pickled_custom_objects = cloudpickle.load(in_f)
            pickled_custom_objects.update(custom_objects)
            custom_objects = pickled_custom_objects
    from distutils.version import StrictVersion
    if StrictVersion(keras_module.__version__.split('-')[0]) >= StrictVersion("2.2.3"):
        # NOTE: Keras 2.2.3 does not work with unicode paths in python2. Pass in h5py.File instead
        # of string to avoid issues.
        import h5py
        with h5py.File(os.path.abspath(model_path), "r") as model_path:
            return keras_models.load_model(model_path, custom_objects=custom_objects, **kwargs)
    else:
        # NOTE: Older versions of Keras only handle filepath.
        return keras_models.load_model(model_path, custom_objects=custom_objects, **kwargs) 
Example #2
Source File: elmo.py    From qb with MIT License 6 votes vote down vote up
def load(cls, directory: str):
        with open(os.path.join(directory, 'elmo.pkl'), 'rb') as f:
            params = cloudpickle.load(f)

        guesser = ElmoGuesser(params['config_num'])
        guesser.class_to_i = params['class_to_i']
        guesser.i_to_class = params['i_to_class']
        guesser.random_seed = params['random_seed']
        guesser.dropout = params['dropout']
        guesser.model = ElmoModel(len(guesser.i_to_class))
        guesser.model.load_state_dict(torch.load(
            os.path.join(directory, 'elmo.pt'), map_location=lambda storage, loc: storage
        ))
        guesser.model.eval()
        if CUDA:
            guesser.model = guesser.model.cuda()
        return guesser 
Example #3
Source File: base_class.py    From stable-baselines with MIT License 6 votes vote down vote up
def _load_from_file_cloudpickle(load_path):
        """Legacy code for loading older models stored with cloudpickle

        :param load_path: (str or file-like) where from to load the file
        :return: (dict, OrderedDict) Class parameters and model parameters
        """
        if isinstance(load_path, str):
            if not os.path.exists(load_path):
                if os.path.exists(load_path + ".pkl"):
                    load_path += ".pkl"
                else:
                    raise ValueError("Error: the file {} could not be found".format(load_path))

            with open(load_path, "rb") as file_:
                data, params = cloudpickle.load(file_)
        else:
            # Here load_path is a file-like object, not a path
            data, params = cloudpickle.load(load_path)

        return data, params 
Example #4
Source File: tf_intent_classifer.py    From ai-chatbot-framework with MIT License 6 votes vote down vote up
def load(self, models_dir):
        try:
            del self.model

            tf.keras.backend.clear_session()

            self.model = tf.keras.models.load_model(
                os.path.join(models_dir, "tf_intent_model.hd5"), compile=True)

            self.graph = tf.get_default_graph()

            print("Tf model loaded")

            with open(os.path.join(models_dir, "labels.pkl"), 'rb') as f:
                self.label_encoder = cloudpickle.load(f)
                print("Labels model loaded")

        except IOError:
            return False 
Example #5
Source File: base_class.py    From stable-baselines with MIT License 6 votes vote down vote up
def load(cls, load_path, env=None, custom_objects=None, **kwargs):
        """
        Load the model from file

        :param load_path: (str or file-like) the saved parameter location
        :param env: (Gym Environment) the new environment to run the loaded model on
            (can be None if you only need prediction from a trained model)
        :param custom_objects: (dict) Dictionary of objects to replace
            upon loading. If a variable is present in this dictionary as a
            key, it will not be deserialized and the corresponding item
            will be used instead. Similar to custom_objects in
            `keras.models.load_model`. Useful when you have an object in
            file that can not be deserialized.
        :param kwargs: extra arguments to change the model when loading
        """
        raise NotImplementedError() 
Example #6
Source File: base_class.py    From stable-baselines with MIT License 6 votes vote down vote up
def _setup_load_operations(self):
        """
        Create tensorflow operations for loading model parameters
        """
        # Assume tensorflow graphs are static -> check
        # that we only call this function once
        if self._param_load_ops is not None:
            raise RuntimeError("Parameter load operations have already been created")
        # For each loadable parameter, create appropiate
        # placeholder and an assign op, and store them to
        # self.load_param_ops as dict of variable.name -> (placeholder, assign)
        loadable_parameters = self.get_parameter_list()
        # Use OrderedDict to store order for backwards compatibility with
        # list-based params
        self._param_load_ops = OrderedDict()
        with self.graph.as_default():
            for param in loadable_parameters:
                placeholder = tf.placeholder(dtype=param.dtype, shape=param.shape)
                # param.name is unique (tensorflow variables have unique names)
                self._param_load_ops[param.name] = (placeholder, param.assign(placeholder)) 
Example #7
Source File: dagger.py    From imitation with MIT License 6 votes vote down vote up
def extend_and_update(self, **train_kwargs) -> int:
        """Extend internal batch of data and train.

        Specifically, this method will load new transitions (if necessary), train
        the model for a while, and advance the round counter. If there are no fresh
        demonstrations in the demonstration directory for the current round, then
        this will raise a `NeedsDemosException` instead of training or advancing
        the round counter. In that case, the user should call
        `.get_trajectory_collector()` and use the returned
        `InteractiveTrajectoryCollector` to produce a new set of demonstrations for
        the current interaction round.

        Arguments:
          **train_kwargs: arguments to pass to `BC.train()`.

        Returns:
          round_num: new round number after advancing the round counter.
        """
        tf.logging.info("Loading demonstrations")
        self._try_load_demos()
        tf.logging.info(f"Training at round {self.round_num}")
        self.bc_trainer.train(**train_kwargs)
        self.round_num += 1
        tf.logging.info(f"New round number is {self.round_num}")
        return self.round_num 
Example #8
Source File: simple.py    From DRL_DeliveryDuel with MIT License 6 votes vote down vote up
def load(path):
        with open(path, "rb") as f:
            model_data, act_params = cloudpickle.load(f)
        act = deepq.build_act(**act_params)
        tf_config = tf.ConfigProto()
        tf_config.gpu_options.allow_growth = True
        sess = tf.Session(config=tf_config)
        sess.__enter__()
        with tempfile.TemporaryDirectory() as td:
            arc_path = os.path.join(td, "packed.zip")
            with open(arc_path, "wb") as f:
                f.write(model_data)

            zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
            load_state(os.path.join(td, "model"))

        return ActWrapper(act, act_params) 
Example #9
Source File: pposgd_sensor.py    From midlevel-reps with MIT License 5 votes vote down vote up
def load(path):
    with open(path, "rb") as f:
        model_data= cloudpickle.load(f)
    sess = U.get_session()
    sess.__enter__()
    with tempfile.TemporaryDirectory() as td:
        arc_path = os.path.join(td, "packed.zip")
        with open(arc_path, "wb") as f:
            f.write(model_data)

        zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
        U.load_state(os.path.join(td, "model"))
    #return ActWrapper(act, act_params) 
Example #10
Source File: __init__.py    From Rasa_NLU_Chi with Apache License 2.0 5 votes vote down vote up
def class_from_module_path(module_path):
    """Given the module name and path of a class, tries to retrieve the class.

    The loaded class can be used to instantiate new objects. """
    import importlib

    # load the module, will raise ImportError if module cannot be loaded
    if "." in module_path:
        module_name, _, class_name = module_path.rpartition('.')
        m = importlib.import_module(module_name)
        # get the class, will raise AttributeError if class cannot be found
        return getattr(m, class_name)
    else:
        return globals()[module_path] 
Example #11
Source File: 讀者.py    From Librian with Mozilla Public License 2.0 5 votes vote down vote up
def 編譯(self, s):
        with 讀txt.讀(s) as f:
            return 劇本(liber.load(f), s) 
Example #12
Source File: 讀者.py    From Librian with Mozilla Public License 2.0 5 votes vote down vote up
def 讀檔(self, 路徑):
        try:
            with open(路徑, 'rb') as f:
                data = pickle.load(f)['虛擬機狀態']
                角色.角色表 = data['角色表']
                鏡頭.鏡頭對應 = data['鏡頭對應']
                self.狀態 = data['讀者狀態']
                self.狀態.額外信息 = ('load',)
                self.劇本棧 = data['劇本棧']
                self.箱庭 = data['箱庭']
        except Exception as e:
            logging.warning('讀檔失敗……因爲%s' % e) 
Example #13
Source File: simple.py    From deeprl-baselines with MIT License 5 votes vote down vote up
def load(path):
        with open(path, "rb") as f:
            model_data, act_params = cloudpickle.load(f)
        act = deepq.build_act(**act_params)
        sess = tf.Session()
        sess.__enter__()
        with tempfile.TemporaryDirectory() as td:
            arc_path = os.path.join(td, "packed.zip")
            with open(arc_path, "wb") as f:
                f.write(model_data)

            zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
            U.load_state(os.path.join(td, "model"))

        return ActWrapper(act, act_params) 
Example #14
Source File: __init__.py    From Rasa_NLU_Chi with Apache License 2.0 5 votes vote down vote up
def read_yaml(content):
    fix_yaml_loader()
    return yaml.load(content) 
Example #15
Source File: __init__.py    From Rasa_NLU_Chi with Apache License 2.0 5 votes vote down vote up
def read_yaml_file(filename):
    fix_yaml_loader()
    return yaml.load(read_file(filename, "utf-8")) 
Example #16
Source File: __init__.py    From Rasa_NLU_Chi with Apache License 2.0 5 votes vote down vote up
def pycloud_unpickle(file_name):
    # type: (Text) -> Any
    """Unpickle an object from file using cloudpickle."""
    from future.utils import PY2
    import cloudpickle

    with io.open(file_name, 'rb') as f:  # pragma: no test
        if PY2:
            return cloudpickle.load(f)
        else:
            return cloudpickle.load(f, encoding="latin-1") 
Example #17
Source File: simple.py    From BackpropThroughTheVoidRL with MIT License 5 votes vote down vote up
def load(path):
        with open(path, "rb") as f:
            model_data, act_params = cloudpickle.load(f)
        act = deepq.build_act(**act_params)
        sess = tf.Session()
        sess.__enter__()
        with tempfile.TemporaryDirectory() as td:
            arc_path = os.path.join(td, "packed.zip")
            with open(arc_path, "wb") as f:
                f.write(model_data)

            zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
            U.load_state(os.path.join(td, "model"))

        return ActWrapper(act, act_params) 
Example #18
Source File: simple.py    From BackpropThroughTheVoidRL with MIT License 5 votes vote down vote up
def load(path):
    """Load act function that was returned by learn function.

    Parameters
    ----------
    path: str
        path to the act function pickle

    Returns
    -------
    act: ActWrapper
        function that takes a batch of observations
        and returns actions.
    """
    return ActWrapper.load(path) 
Example #19
Source File: pipeline_invoke_python.py    From models with Apache License 2.0 5 votes vote down vote up
def _initialize_upon_import():
    model_pkl_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'model.pkl')

    # Load pickled model from model directory
    with open(model_pkl_path, 'rb') as fh:
        restored_model = pickle.load(fh)

    return restored_model


# This is called unconditionally at *module import time*...
# Note:  this _model isn't used right now.  see _invoke() code below
# TODO:  Use the values in the trained/pickled model 
Example #20
Source File: pposgd_fuse.py    From midlevel-reps with MIT License 5 votes vote down vote up
def load(path):
    with open(path, "rb") as f:
        model_data = cloudpickle.load(f)
    sess = U.get_session()
    sess.__enter__()
    with tempfile.TemporaryDirectory() as td:
        arc_path = os.path.join(td, "packed.zip")
        with open(arc_path, "wb") as f:
            f.write(model_data)

        zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
        U.load_state(os.path.join(td, "model"))
    # return ActWrapper(act, act_params) 
Example #21
Source File: pposgd_simple.py    From midlevel-reps with MIT License 5 votes vote down vote up
def load(path):
    with open(path, "rb") as f:
        model_data= cloudpickle.load(f)
    sess = U.get_session()
    sess.__enter__()
    with tempfile.TemporaryDirectory() as td:
        arc_path = os.path.join(td, "packed.zip")
        with open(arc_path, "wb") as f:
            f.write(model_data)

        zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
        U.load_state(os.path.join(td, "model"))
    #return ActWrapper(act, act_params) 
Example #22
Source File: deepq.py    From baselines with MIT License 5 votes vote down vote up
def load_act(path):
        with open(path, "rb") as f:
            model_data, act_params = cloudpickle.load(f)
        act = deepq.build_act(**act_params)
        sess = tf.Session()
        sess.__enter__()
        with tempfile.TemporaryDirectory() as td:
            arc_path = os.path.join(td, "packed.zip")
            with open(arc_path, "wb") as f:
                f.write(model_data)

            zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
            load_variables(os.path.join(td, "model"))

        return ActWrapper(act, act_params) 
Example #23
Source File: submission.py    From ramp-workflow with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def pickle_model(fold_output_path, trained_workflow, model_name='model.pkl'):
    """Pickle and reload trained workflow.

    If workflow can't be pickled, print warning and return original workflow.

    Parameters
    ----------
    fold_output_path : str
        the path into which the model will be pickled
    trained_workflow : a rampwf.workflow
        the workflow to be pickled
    model_name : str (default='model.pkl')
        the file name of the pickled workflow
    Returns
    -------
    trained_workflow : a rampwf.workflow
        either the input workflow or the pickled and reloaded workflow
    """
    msg = "Warning: model can't be pickled."
    model_file = os.path.join(fold_output_path, model_name)
    try:
        with open(model_file, 'wb') as pickle_file:
            cloudpickle.dump(trained_workflow, pickle_file)
    except pickle.PicklingError as e:
        print_warning(msg)
        print_warning(e)
        return trained_workflow
    else:
        # check if dumped trained_workflow can be loaded
        try:
            with open(model_file, 'rb') as pickle_file:
                trained_workflow = cloudpickle.load(pickle_file)
        except Exception as e:
            print_warning(msg)
            print_warning(e)

    return trained_workflow 
Example #24
Source File: starspace_intent_classifier.py    From ai-chatbot-framework with MIT License 5 votes vote down vote up
def process(self, query, INTENT_RANKING_LENGTH=5):
        """Return the most likely intent and its similarity to the input."""

        message = self.transform(query)

        intent = {"name": None, "confidence": 0.0}
        intent_ranking = []

        if self.session is None:
            app.logger.error("There is no trained tf.session: "
                             "component is either not trained or "
                             "didn't receive enough training data")

        else:
            # get features (bag of words) for a message
            X = message.get("text_features").reshape(1, -1)

            # stack encoded_all_intents on top of each other
            # to create candidates for test examples
            all_Y = self._create_all_Y(X.shape[0])

            # load tf graph and session
            intent_ids, message_sim = self._calculate_message_sim(X, all_Y)

            if intent_ids.size > 0:
                intent = {"intent": self.inv_intent_dict[intent_ids[0]],
                          "confidence": message_sim[0]}

                ranking = list(zip(list(intent_ids), message_sim))

                ranking = ranking[:INTENT_RANKING_LENGTH]

                intent_ranking = [{"intent": self.inv_intent_dict[intent_idx],
                                   "confidence": score}
                                  for intent_idx, score in ranking]

        return intent, intent_ranking 
Example #25
Source File: sklearn_intent_classifer.py    From ai-chatbot-framework with MIT License 5 votes vote down vote up
def load(self, PATH):
        """
        load trained model from given path
        :param PATH:
        :return:
        """
        try:
            with open(PATH, 'rb') as f:
                self.model = cloudpickle.load(f)
        except IOError:
            return False 
Example #26
Source File: sklearn_intent_classifer.py    From ai-chatbot-framework with MIT License 5 votes vote down vote up
def __init__(self):

        self.model = None

        self.spacynlp = spacy.load('en')

        self.stopwords = set(STOP_WORDS +
                             ["n't", "'s", "'m", "ca"] +
                             list(ENGLISH_STOP_WORDS))

        self.punctuations = " ".join(string.punctuation).split(" ") + \
                            ["-----", "---", "...", "'ve"] 
Example #27
Source File: tf_intent_classifer.py    From ai-chatbot-framework with MIT License 5 votes vote down vote up
def __init__(self):
        self.model = None
        self.nlp = spacy.load('en')
        self.label_encoder = LabelBinarizer()
        self.graph = None 
Example #28
Source File: simple.py    From self-imitation-learning with MIT License 5 votes vote down vote up
def load(path):
    """Load act function that was returned by learn function.

    Parameters
    ----------
    path: str
        path to the act function pickle

    Returns
    -------
    act: ActWrapper
        function that takes a batch of observations
        and returns actions.
    """
    return ActWrapper.load(path) 
Example #29
Source File: simple.py    From self-imitation-learning with MIT License 5 votes vote down vote up
def load(path):
        with open(path, "rb") as f:
            model_data, act_params = cloudpickle.load(f)
        act = deepq.build_act(**act_params)
        sess = tf.Session()
        sess.__enter__()
        with tempfile.TemporaryDirectory() as td:
            arc_path = os.path.join(td, "packed.zip")
            with open(arc_path, "wb") as f:
                f.write(model_data)

            zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
            load_state(os.path.join(td, "model"))

        return ActWrapper(act, act_params) 
Example #30
Source File: __init__.py    From rasa_nlu with Apache License 2.0 5 votes vote down vote up
def pycloud_unpickle(file_name: Text) -> Any:
    """Unpickle an object from file using cloudpickle."""
    import cloudpickle

    with io.open(file_name, 'rb') as f:  # pragma: no test
        return cloudpickle.load(f, encoding="latin-1")