Python networkx.readwrite.json_graph.node_link_data() Examples
The following are 20
code examples of networkx.readwrite.json_graph.node_link_data().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
networkx.readwrite.json_graph
, or try the search function
.
Example #1
Source File: phylogeny_utils.py From treeomics with GNU General Public License v3.0 | 6 votes |
def save_json_tree(filepath, tree): """ Transform inferred phylogeny to JSON object and save it to a file :param filepath: path to the output file :param tree: reconstructed phylogeny given as networkx DiGraph object """ # create simplified JSON tree out_ids = dict() out_tree = nx.DiGraph() for out_id, node in enumerate(tree.nodes()): out_ids[node] = out_id out_tree.add_node(out_id, name=tree.node[node]['name']) for u, v in tree.edges(): out_tree.add_edge(out_ids[u], out_ids[v], value=len(tree.edge[u][v]['muts'])) # create json output from reconstructed phylogeny json_data = json_graph.node_link_data(out_tree) # json object to output file with open(filepath, 'w') as json_file: json.dump(json_data, json_file, indent=4) logger.info('Create JSON file from reconstructed phylogeny: {}.'.format(filepath))
Example #2
Source File: transformer.py From kgx with BSD 3-Clause "New" or "Revised" License | 6 votes |
def serialize(g: nx.MultiDiGraph) -> Dict: """ Convert networkx.MultiDiGraph as a dictionary. Parameters ---------- g: networkx.MultiDiGraph Graph to convert as a dictionary Returns ------- dict A dictionary """ data = json_graph.node_link_data(g) return data
Example #3
Source File: plotmpld3.py From psst with MIT License | 6 votes |
def __init__(self, G, pos, ax, gravity=1, link_distance=20, charge=-30, node_size=5, link_strength=1, friction=0.9, draggable=True): if pos is None: pass self.dict_ = {"type": "networkxd3forcelayout", "graph": node_link_data(G), "ax_id": mpld3.utils.get_id(ax), "gravity": gravity, "charge": charge, "friction": friction, "link_distance": link_distance, "link_strength": link_strength, "draggable": draggable, "nominal_radius": node_size}
Example #4
Source File: smoothness.py From CS-GNN with MIT License | 6 votes |
def compute_label_smoothness(path, rate=0.): G_org = json_graph.node_link_graph(json.load(open(path+'-G.json'))) # G_org = remove_unlabeled(G_org) if nx.is_directed(G_org): G_org = G_org.to_undirected() class_map = json.load(open(path+'-class_map.json')) for k, v in class_map.items(): if type(v) != list: class_map = convert_list(class_map) break labels = convert_ndarray(class_map) labels = np.squeeze(label_to_vector(labels)) # smooth G_org = label_broadcast(G_org, labels, rate) with open(path+'-G_'+str(rate)+'.json', 'w') as f: f.write(json.dumps(json_graph.node_link_data(G_org))) edge_num = G_org.number_of_edges() G = pygsp.graphs.Graph(nx.adjacency_matrix(G_org)) smoothness = 0 for src, dst in G_org.edges(): if labels[src] != labels[dst]: smoothness += 1 print('The smoothness is: ', 2*smoothness/edge_num)
Example #5
Source File: saver.py From IRCLogParser with GNU General Public License v3.0 | 5 votes |
def save_js_arc(reduced_CC_graph, channels_hash, output_directory, output_file_name): """ Saves the nx_graph as a js file with variable name communities, it is used in index.html to generate the arc graph Args: nx_graph: a networkx graph, here it is the reduced community community graph channels_hash(dict): list of channel names output_drectory(str): location where to save the file output_file_name(str): name of the file to be saved Returns: null """ check_if_dir_exists(output_directory) #create output directory if doesn't exist copy2("./lib/protovis/" + "arc_graph.html", output_directory) #copy required files to output_directory copy2("./lib/protovis/" + "ex.css", output_directory) copy2("./lib/protovis/" + "protovis-r3.2.js", output_directory) jsondict = json_graph.node_link_data(reduced_CC_graph) max_weight_val = max(item['weight'] for item in jsondict['links']) # the key names in the jsondict_top_channels are kept as the following so that index.html can render it jsondict_top_channels = {} jsondict_top_channels['nodes'] = [{'nodeName':channels_hash[int(node['id']) - config.STARTING_HASH_CHANNEL]} for node in jsondict['nodes']] jsondict_top_channels['links'] = [{'source': link['source'], 'target': link['target'],\ 'value': int(link['weight'] * config.EXPANSION_PARAMETER / float(max_weight_val))} for link in jsondict['links']] with open(output_directory + output_file_name, 'w') as f: f.write("var communities =") json.dump(jsondict_top_channels, f)
Example #6
Source File: graphs.py From PyCV-time with MIT License | 5 votes |
def buildGraph(simList, plot=False): g = distanceGraph(simList) trim(g, 0.15) #mst = nx.minimum_spanning_tree(g.to_undirected()) #el = [(i, o, w) for (i, o, w) in g.edges_iter(data=True) # if (i, o) in mst.edges() # or (o, i) in mst.edges()] #g = nx.DiGraph() #g.add_edges_from(el) if plot: nx.draw_networkx(g, with_labels = True) plt.show() return json.dumps(json_graph.node_link_data(g))
Example #7
Source File: networkx_graph.py From vitrage with Apache License 2.0 | 5 votes |
def json_output_graph(self, **kwargs): """supports both 1.10<=networkx<2.0 and networx>=2.0 by returning the same json output regardless networx version :return: graph in json format """ # TODO(annarez): when we decide to support networkx 2.0 with # versioning of Vitrage, we can move part of the logic to vitrageclient node_link_data = json_graph.node_link_data(self._g) node_link_data.update(kwargs) vitrage_id_to_index = dict() for index, node in enumerate(node_link_data['nodes']): vitrage_id_to_index[node[VProps.VITRAGE_ID]] = index if VProps.ID in self._g.nodes[node[VProps.ID]]: node[VProps.ID] = self._g.nodes[node[VProps.ID]][VProps.ID] node[VProps.GRAPH_INDEX] = index vers = nx.__version__ if vers >= '2.0': for i in range(len(node_link_data['links'])): node_link_data['links'][i]['source'] = vitrage_id_to_index[ node_link_data['links'][i]['source']] node_link_data['links'][i]['target'] = vitrage_id_to_index[ node_link_data['links'][i]['target']] if kwargs.get('raw', False): return node_link_data else: return json.dumps(node_link_data)
Example #8
Source File: network.py From jsonmapping with MIT License | 5 votes |
def to_dict(self): return json_graph.node_link_data(self.graph)
Example #9
Source File: rede_cnpj.py From CNPJ-full with GNU General Public License v3.0 | 5 votes |
def json_G(self, G): return json_graph.node_link_data(G)
Example #10
Source File: __init__.py From EDeN with MIT License | 5 votes |
def serialize_graph(graph): """Make string.""" json_data = json_graph.node_link_data(graph) serial_data = json.dumps(json_data, separators=(',', ':'), indent=4, cls=SetEncoder) return serial_data
Example #11
Source File: viz_graph.py From ibeis with Apache License 2.0 | 5 votes |
def tryout_web_graphs(self, infr): """ https://plot.ly/python/ http://bokeh.pydata.org/en/latest/ pip install bokeh Notes: http://www.coppelia.io/2014/07/an-a-to-z-of-extra-features-for-the-d3-force-layout/ http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/ pip install plotly # eww need to sign up and get a key http://igraph.org/ import mpld3 mpld3.save_html(fig, open('fig.html', 'w')) mpld3.save_json(fig, open('fig.json', 'w')) fig = pt.gcf() """ #import plottool_ibeis as pt # http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/ from networkx.readwrite import json_graph G = infr.graph data = json_graph.node_link_data(G) json_text = ut.to_json(data, pretty=True) ut.writeto('graph.json', json_text) ut.editfile('graph.json') ut.startfile('d3_example.html') # d3_location = ut.grab_zipped_url('https://github.com/d3/d3/releases/download/v3.5.17/d3.zip') # python -m SimpleHTTPServer 8000
Example #12
Source File: views.py From arguman.org with GNU Affero General Public License v3.0 | 5 votes |
def get_bundle(self, user): graph = self.build_graph(user) return json_graph.node_link_data(graph)
Example #13
Source File: node_link_data.py From EDeN with MIT License | 5 votes |
def eden_to_node_link_file(graph_list, fname): """Takes a list of networkx graphs and writes a serialised node_link_data JSON file.""" with open(fname, 'w') as f: serials = eden_to_node_link_data(graph_list) for serial in serials: f.write('%s\n' % serial)
Example #14
Source File: node_link_data.py From EDeN with MIT License | 5 votes |
def eden_to_node_link_data(graph_list): """Takes a list of networkx graphs and yields serialised node_link_data JSON strings.""" for G in graph_list: json_data = json_graph.node_link_data(G) serial_data = json.dumps(json_data) yield serial_data
Example #15
Source File: node_link_data.py From EDeN with MIT License | 5 votes |
def _node_link_data_to_eden(serialized_list): """Takes a string list in the serialised node_link_data JSON format and yields networkx graphs.""" for serial_data in serialized_list: py_obj = json.loads(serial_data) graph = json_graph.node_link_graph(py_obj) yield graph
Example #16
Source File: node_link_data.py From EDeN with MIT License | 5 votes |
def node_link_data_to_eden(input=None, options=dict()): """ Takes a string list in the serialised node_link_data JSON format and yields networkx graphs. Parameters ---------- input : string A pointer to the data source. """ return _node_link_data_to_eden(util.read(input))
Example #17
Source File: vis_topic.py From corex_topic with Apache License 2.0 | 4 votes |
def vis_hierarchy(corexes, column_label=None, max_edges=100, prefix='topics', n_anchors=0): """Visualize a hierarchy of representations.""" if column_label is None: column_label = list(map(str, range(corexes[0].alpha.shape[1]))) # make l1 label alpha = corexes[0].alpha mis = corexes[0].mis l1_labels = [] annotate = lambda q, s: q if s > 0 else '~' + q for j in range(corexes[0].n_hidden): # inds = np.where(alpha[j] * mis[j] > 0)[0] inds = np.where(alpha[j] >= 1.)[0] inds = inds[np.argsort(-alpha[j, inds] * mis[j, inds])] group_number = str('red_') + str(j) if j < n_anchors else str(j) label = group_number + ':' + ' '.join([annotate(column_label[ind], corexes[0].sign[j,ind]) for ind in inds[:6]]) label = textwrap.fill(label, width=25) l1_labels.append(label) # Construct non-tree graph weights = [corex.alpha.clip(0, 1) * corex.mis for corex in corexes[1:]] node_weights = [corex.tcs for corex in corexes[1:]] g = make_graph(weights, node_weights, l1_labels, max_edges=max_edges) # Display pruned version h = g.copy() # trim(g.copy(), max_parents=max_parents, max_children=max_children) edge2pdf(h, prefix + '/graphs/graph_prune_' + str(max_edges), labels='label', directed=True, makepdf=True) # Display tree version tree = g.copy() tree = trim(tree, max_parents=1, max_children=False) edge2pdf(tree, prefix + '/graphs/tree', labels='label', directed=True, makepdf=True) # Output JSON files try: import os #copyfile(os.path.dirname(os.path.realpath(__file__)) + '/tests/d3_files/force.html', prefix + '/graphs/force.html') copyfile(os.path.dirname(os.path.realpath('tests')) + '/tests/d3_files/force.html', prefix + '/graphs/force.html') except: print("Couldn't find 'force.html' file for visualizing d3 output") import json from networkx.readwrite import json_graph mapping = dict([(n, tree.node[n].get('label', str(n))) for n in tree.nodes()]) tree = nx.relabel_nodes(tree, mapping) json.dump(json_graph.node_link_data(tree), safe_open(prefix + '/graphs/force.json', 'w+')) json.dump(json_graph.node_link_data(h), safe_open(prefix + '/graphs/force_nontree.json', 'w+')) return g
Example #18
Source File: Spreadsheet.py From koala with GNU General Public License v3.0 | 4 votes |
def asdict(self): data = json_graph.node_link_data(self.G) def cell_to_dict(cell): if isinstance(cell.range, RangeCore): range = cell.range value = { "cells": range.addresses, "values": range.values, "nrows": range.nrows, "ncols": range.ncols } else: value = cell.value node = { "address": cell.address(), "formula": cell.formula, "value": value, "python_expression": cell.python_expression, "is_named_range": cell.is_named_range, "should_eval": cell.should_eval } return node # save nodes as simple objects nodes = [] for node in data["nodes"]: cell = node["id"] nodes.append(cell.asdict()) links = [] for el in data['links']: link = {key: cell.address() for key, cell in el.items()} links.append(link) data["nodes"] = nodes data["links"] = links data["outputs"] = self.outputs data["inputs"] = self.inputs data["named_ranges"] = self.named_ranges return data
Example #19
Source File: io_autocnetgraph.py From plio with The Unlicense | 4 votes |
def save(network, projectname): """ Save an AutoCNet candiate graph to disk in a compressed file. The graph adjacency structure is stored as human readable JSON and all potentially large numpy arrays are stored as compressed binary. The project archive is a standard .zip file that can have any ending, e.g., <projectname>.project, <projectname>.zip, <projectname>.myname. TODO: This func. writes a intermediary .npz to disk when saving. Can we write the .npz to memory? Parameters ---------- network : object The AutoCNet Candidate Graph object projectname : str The PATH to the output file. """ # Convert the graph into json format js = json_graph.node_link_data(network) with ZipFile(projectname, 'w') as pzip: js_str = json.dumps(js, cls=NumpyEncoder, sort_keys=True, indent=4) pzip.writestr('graph.json', js_str) # Write the array node_attributes to hdf for n, data in network.nodes_iter(data=True): grp = data['node_id'] np.savez('{}.npz'.format(data['node_id']), descriptors=data.descriptors, _keypoints=data._keypoints, _keypoints_idx=data._keypoints.index, _keypoints_columns=data._keypoints.columns) pzip.write('{}.npz'.format(data['node_id'])) os.remove('{}.npz'.format(data['node_id'])) # Write the array edge attributes to hdf for s, d, data in network.edges_iter(data=True): if s > d: s, d = d, s grp = str((s,d)) np.savez('{}_{}.npz'.format(s, d), matches=data.matches, matches_idx=data.matches.index, matches_columns=data.matches.columns, _masks=data._masks, _masks_idx=data._masks.index, _masks_columns=data._masks.columns) pzip.write('{}_{}.npz'.format(s, d)) os.remove('{}_{}.npz'.format(s, d))
Example #20
Source File: app.py From dataiku-contrib with Apache License 2.0 | 4 votes |
def draw_graph(): #get data project_key = dataiku.default_project_key() similarity = float(request.args.get('similarity')) node_source = request.args.get('node_source') node_target = request.args.get('node_target') interactions = request.args.get('interactions') dataset = request.args.get('dataset') name=project_key+'.'+dataset print name df=dataiku.Dataset(name).get_dataframe() df=df[df[interactions]>similarity] df=df[[node_source,node_target,interactions]] df.columns=['source','target','weight'] print "%d rows" % df.shape[0] G=nx.Graph() G.add_edges_from(zip(df.source,df.target)) print nx.info(G) # degree for node, val in dict(nx.degree(G)).iteritems(): G.node[node]['degree'] = val # pagerank for node, val in dict(nx.pagerank(G)).iteritems(): G.node[node]['pagerank'] = val # connected components components = sorted(nx.connected_components(G), key = len, reverse=True) for component,nodes in enumerate(components): for node in nodes: G.node[node]['cc'] = component # community partition = best_partition(G) for node, cluster in dict(partition).iteritems(): G.node[node]['community'] = cluster # convert to JSON data = json_graph.node_link_data(G) #fix for networkx>=2.0 change of API if nx.__version__ > 2: dict_name_id = {data["nodes"][i]["id"] : i for i in xrange(len(data["nodes"]))} for link in data["links"]: link["source"] = dict_name_id[link["source"]] link["target"] = dict_name_id[link["target"]] return json.dumps({"status" : "ok", "graph": data})