Python jsonpickle.encode() Examples
The following are 30
code examples of jsonpickle.encode().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
jsonpickle
, or try the search function
.
Example #1
Source File: tasks_api.py From a2ml with Apache License 2.0 | 7 votes |
def with_context(params, proc): ctx = create_context(params) if 'args' not in params: params['args'] = [] if 'kwargs' not in params: params['kwargs'] = {} res = proc(ctx) if 'tmp_file_to_remove' in params: remove_tmp_file.delay(params['tmp_file_to_remove']) ctx.set_runs_on_server(False) ctx.config.set('use_server', True, config_name='config') return {'response': res, 'config': jsonpickle.encode(ctx.config)}
Example #2
Source File: objcls.py From sia-cog with MIT License | 7 votes |
def predict(imagepath, target_x, target_y, name, model): if imagepath.startswith('http://') or imagepath.startswith('https://') or imagepath.startswith('ftp://'): response = requests.get(imagepath) img = Image.open(BytesIO(response.content)) img = img.resize((target_x, target_y)) else: if not os.path.exists(imagepath): raise Exception('Input image file does not exist') img = image.load_img(imagepath, target_size=(target_x, target_y)) x = image.img_to_array(img) x = np.expand_dims(x, axis=0) x = processInputImage(name, x) preds = decodePrediction(name, model.predict(x)) result = [] for p in preds[0]: result.append({"synset": p[0], "text": p[1], "prediction": float("{0:.2f}".format((p[2] * 100)))}) return json.loads(jsonpickle.encode(result, unpicklable=False))
Example #3
Source File: test_openlibrary.py From openlibrary-client with GNU Affero General Public License v3.0 | 6 votes |
def test_load_by_isbn(self, mock_get): isbn_key = 'ISBN:0137903952' isbn_bibkeys = { isbn_key: { 'info_url': "https://openlibrary.org/books/%s/Artificial_intelligence" % self.target_olid } } mock_get.return_value.json.side_effect = [isbn_bibkeys, self.raw_edition.copy(), self.raw_author.copy()] actual = json.loads(jsonpickle.encode(self.ol.Edition.get(isbn=u'0137903952'))) mock_get.assert_has_calls([ call("%s/api/books.json?bibkeys=%s" % (self.ol.base_url, isbn_key)), call().raise_for_status(), call().json(), call("%s/books/%s.json" % (self.ol.base_url, self.target_olid)), call().raise_for_status(), call().json(), call("%s/authors/OL440500A.json" % self.ol.base_url), call().raise_for_status(), call().json() ]) self.assertEqual(actual, self.expected, "Data didn't match for ISBN lookup: \n%s\n\nversus:\n\n %s" % (actual, self.expected))
Example #4
Source File: nltkmgr.py From sia-cog with MIT License | 6 votes |
def tokenize(data, language="english", filterStopWords = False, tagging = False): result = {} tags = [] filterChars = [",", ".", "?", ";", ":", "'", "!", "@", "#", "$", "%", "&", "*", "(", ")", "+", "{", "}", "[", "]", "\\", "|"] sent_token = nltk.tokenize.sent_tokenize(data, language) word_token = nltk.tokenize.word_tokenize(data, language) word_token = [w for w in word_token if not w in filterChars] if filterStopWords is True: stop_words = set(stopwords.words(language)) word_token = [w for w in word_token if not w in stop_words] if tagging is True: tags = nltk.pos_tag(word_token) result = {"sent_token": sent_token, "word_token": word_token, "pos_tag": tags} return json.loads(jsonpickle.encode(result, unpicklable=False))
Example #5
Source File: thrift_converter.py From lightstep-tracer-python with MIT License | 6 votes |
def create_report(self, runtime, span_records): report = ttypes.ReportRequest(runtime, span_records, None) for span in report.span_records: if not span.log_records: continue for log in span.log_records: index = span.log_records.index(log) if log.payload_json is not None: try: log.payload_json = \ jsonpickle.encode(log.payload_json, unpicklable=False, make_refs=False, max_depth=constants.JSON_MAX_DEPTH) except: log.payload_json = jsonpickle.encode(constants.JSON_FAIL) span.log_records[index] = log return report
Example #6
Source File: qtable.py From Dollynator with GNU Lesser General Public License v3.0 | 6 votes |
def create_child_qtable(self, provider, option, transaction_hash, child_index): """ Creates the QTable configuration for the child agent. This is done by copying the own QTable configuration and including the new host provider, the parent name and the transaction hash. :param provider: the name the child tree name. :param transaction_hash: the transaction hash the child is bought with. """ next_state = VPSState(provider=provider, option=option) tree = self.tree + "." + str(child_index) dictionary = { "environment": self.environment, "qtable": self.qtable, "providers_offers": self.providers_offers, "self_state": next_state, "transaction_hash": transaction_hash, "tree": tree } filename = os.path.join(user_config_dir(), 'Child_QTable.json') with open(filename, 'w') as json_file: encoded_dictionary = jsonpickle.encode(dictionary) json.dump(encoded_dictionary, json_file)
Example #7
Source File: qtable.py From Dollynator with GNU Lesser General Public License v3.0 | 6 votes |
def write_dictionary(self): """ Writes the QTABLE configuration to the QTable.json file. """ config_dir = user_config_dir() filename = os.path.join(config_dir, 'QTable.json') to_save_var = { "environment": self.environment, "qtable": self.qtable, "providers_offers": self.providers_offers, "self_state": self.self_state, "tree": self.tree } with open(filename, 'w') as json_file: encoded_to_save_var = jsonpickle.encode(to_save_var) json.dump(encoded_to_save_var, json_file)
Example #8
Source File: nltkmgr.py From sia-cog with MIT License | 6 votes |
def synset(data): result = {} syns = wordnet.synsets(data) list = [] for s in syns: r = {} r["name"] = s.name() r["lemma"] = s.lemmas()[0].name() r["definition"] = s.definition() r["examples"] = s.examples() list.append(r) result["list"] = list synonyms = [] antonyms = [] for syn in syns: for l in syn.lemmas(): synonyms.append(l.name()) if l.antonyms(): antonyms.append(l.antonyms()[0].name()) result["synonyms"] = synonyms result["antonyms"] = antonyms return json.loads(jsonpickle.encode(result, unpicklable=False))
Example #9
Source File: intentapi.py From sia-cog with MIT License | 6 votes |
def predictint(): message = "Success" code = 200 result = [] try: start = datetime.utcnow() data = request.args.get('data') result = intentanalyzer.predict(data) result = json.loads(jsonpickle.encode(result, unpicklable=False)) logmgr.LogPredSuccess("intent", constants.ServiceTypes.LangIntent, start) except Exception as e: code = 500 message = str(e) logmgr.LogPredError("intent", constants.ServiceTypes.LangIntent, start, message) return jsonify({"statuscode": code, "message": message, "result": result})
Example #10
Source File: submission.py From Liked-Saved-Image-Downloader with MIT License | 6 votes |
def writeOutSubmissionsAsHtml(redditList, file): submissionsStr = "" for submission in redditList: submissionsStr += submission.getHtml() + u'\n' htmlStructure = u"""<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <title>Reddit Saved Comments</title> </head> <body> {0} </body> </html> """.format(submissionsStr) file.write(htmlStructure.encode('utf8'))
Example #11
Source File: dbconn.py From DueUtil with GNU General Public License v3.0 | 5 votes |
def insert_object(id, pickleable_object): if id.strip() == "": return #todo # jsonpickle_data = json.loads(jsonpickle.encode(pickleable_object)) conn()[type(pickleable_object).__name__].update({'_id': id}, {"$set": {'data': jsonpickle.encode(pickleable_object)}}, upsert=True)
Example #12
Source File: submission.py From Liked-Saved-Image-Downloader with MIT License | 5 votes |
def writeOutSubmissionsAsXML(redditList, file): for submission in redditList: outputString = u'<submission>\n' + submission.getXML() + u'</submission>\n' file.write(outputString.encode('utf8'))
Example #13
Source File: core.py From chepy with GNU General Public License v3.0 | 5 votes |
def _pickle_class(self, obj: Any) -> Any: """This method takes another object as an argument and pickels that into a json object using jsonpickel. The return value is a dictionary Args: obj (Any): Any object Returns: Any: unpickeled JSON as a python object. """ return ujson.loads(jsonpickle.encode(obj, unpicklable=True))
Example #14
Source File: vk_chat.py From vk-messages-bot with MIT License | 5 votes |
def to_json(self): return jsonpickle.encode(self)
Example #15
Source File: client.py From vk-messages-bot with MIT License | 5 votes |
def to_json(self): return jsonpickle.encode(self)
Example #16
Source File: vk_user.py From vk-messages-bot with MIT License | 5 votes |
def to_json(self): return jsonpickle.encode(self)
Example #17
Source File: specs.py From console with MIT License | 5 votes |
def json_of_spec(spec): return json.loads(jsonpickle.encode(spec, unpicklable=False))
Example #18
Source File: session.py From EvilTwinFramework with GNU General Public License v2.0 | 5 votes |
def save_session(self, folder_path): for key in self.session_data.keys(): filename = key + ".data" path = folder_path + filename try: with open(path, "w") as data_file: data_file.write(encode(self.session_data[key])) except Exception as e: print e print "[-] Error trying to write '{}' data to '{}'.".format(key, path)
Example #19
Source File: test_dialogues.py From rasa_core with Apache License 2.0 | 5 votes |
def test_dialogue_serialisation(filename): dialogue_json = utils.read_file(filename) restored = json.loads(dialogue_json) tracker = tracker_from_dialogue_file(filename) en_de_coded = json.loads(jsonpickle.encode(tracker.as_dialogue())) assert restored == en_de_coded
Example #20
Source File: featurizers.py From rasa_core with Apache License 2.0 | 5 votes |
def persist(self, path): featurizer_file = os.path.join(path, "featurizer.json") utils.create_dir_for_file(featurizer_file) with open(featurizer_file, 'w', encoding="utf-8") as f: # noinspection PyTypeChecker f.write(str(jsonpickle.encode(self)))
Example #21
Source File: featurizers.py From rasa_core with Apache License 2.0 | 5 votes |
def _featurize_states( self, trackers_as_states: List[List[Dict[Text, float]]] ) -> Tuple[np.ndarray, List[int]]: """Create X""" features = [] true_lengths = [] for tracker_states in trackers_as_states: dialogue_len = len(tracker_states) # len(trackers_as_states) = 1 means # it is called during prediction or we have # only one story, so no padding is needed if len(trackers_as_states) > 1: tracker_states = self._pad_states(tracker_states) story_features = [self.state_featurizer.encode(state) for state in tracker_states] features.append(story_features) true_lengths.append(dialogue_len) # noinspection PyPep8Naming X = np.array(features) return X, true_lengths
Example #22
Source File: featurizers.py From rasa_core with Apache License 2.0 | 5 votes |
def encode(self, state: Dict[Text, float]) -> np.ndarray: if not self.num_features: raise Exception("LabelTokenizerSingleStateFeaturizer " "was not prepared before encoding.") if state is None or None in state: return np.ones(self.num_features, dtype=np.int32) * -1 # we are going to use floats and convert to int later if possible used_features = np.zeros(self.num_features, dtype=np.float) using_only_ints = True for state_name, prob in state.items(): using_only_ints = using_only_ints and utils.is_int(prob) if state_name in self.user_labels: if PREV_PREFIX + ACTION_LISTEN_NAME in state: # else we predict next action from bot action and memory for t in state_name.split(self.split_symbol): used_features[self.user_vocab[t]] += prob elif state_name in self.slot_labels: offset = len(self.user_vocab) idx = self.slot_labels.index(state_name) used_features[offset + idx] += prob elif state_name[len(PREV_PREFIX):] in self.bot_labels: action_name = state_name[len(PREV_PREFIX):] for t in action_name.split(self.split_symbol): offset = len(self.user_vocab) + len(self.slot_labels) idx = self.bot_vocab[t] used_features[offset + idx] += prob else: logger.warning("Feature '{}' could not be found in " "feature map.".format(state_name)) if using_only_ints: # this is an optimization - saves us a bit of memory return used_features.astype(np.int32) else: return used_features
Example #23
Source File: featurizers.py From rasa_core with Apache License 2.0 | 5 votes |
def encode(self, state: Dict[Text, float]) -> np.ndarray: raise NotImplementedError("SingleStateFeaturizer must have " "the capacity to " "encode states to a feature vector")
Example #24
Source File: __init__.py From rasa_core with Apache License 2.0 | 5 votes |
def __eq__(self, other): if not isinstance(other, AgentUttered): return False else: return (self.text, jsonpickle.encode(self.data)) == \ (other.text, jsonpickle.encode(other.data))
Example #25
Source File: __init__.py From rasa_core with Apache License 2.0 | 5 votes |
def __hash__(self): return hash((self.key, jsonpickle.encode(self.value)))
Example #26
Source File: __init__.py From rasa_core with Apache License 2.0 | 5 votes |
def __eq__(self, other): if not isinstance(other, BotUttered): return False else: return (self.text, jsonpickle.encode(self.data)) == \ (other.text, jsonpickle.encode(other.data))
Example #27
Source File: __init__.py From rasa_core with Apache License 2.0 | 5 votes |
def __hash__(self): return hash((self.text, jsonpickle.encode(self.data)))
Example #28
Source File: __init__.py From rasa_core with Apache License 2.0 | 5 votes |
def __eq__(self, other): if not isinstance(other, UserUttered): return False else: return (self.text, self.intent.get("name"), [jsonpickle.encode(ent) for ent in self.entities]) == \ (other.text, other.intent.get("name"), [jsonpickle.encode(ent) for ent in other.entities])
Example #29
Source File: hubs_api.py From web2board with GNU Lesser General Public License v3.0 | 5 votes |
def _serialize_object(self, obj2ser): return jsonpickle.encode(obj2ser, **self.serialization_args)
Example #30
Source File: core.py From chepy with GNU General Public License v3.0 | 5 votes |
def _convert_to_bytes(self) -> bytes: """This method is used to coerce the curret object in the state variable into a string. The method should be called inside any method that operates on a string object instead of calling `self.state` directly to avoid errors. Raises: NotImplementedError: If type coercian isnt available for the current state type. """ if isinstance(self.state, bytes): return self.state elif isinstance(self.state, str): return self.state.encode() elif isinstance(self.state, int): return str(self.state).encode() elif isinstance(self.state, dict): return str(self.state).encode() elif isinstance(self.state, list): return str(self.state).encode() elif isinstance(self.state, bool): # pragma: no cover return str(self.state).encode() elif isinstance(self.state, bytearray): return bytes(self.state) else: # pragma: no cover # todo check more types here raise NotImplementedError