Python tensorflow.contrib.tensorboard.plugins.projector.ProjectorConfig() Examples
The following are 30
code examples of tensorflow.contrib.tensorboard.plugins.projector.ProjectorConfig().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.contrib.tensorboard.plugins.projector
, or try the search function
.
Example #1
Source File: classifier.py From chicksexer with MIT License | 6 votes |
def _visualize_embedding(self, model_path, summary_writer): """Create metadata file (and its config file) for tensorboard's embedding visualization.""" metadata_path = os.path.join(model_path, self._tensorboard_dir, _EMBEDDING_METADATA_FILE) # create the metadata config file config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = self._nodes['embeddings'].name embedding.metadata_path = metadata_path projector.visualize_embeddings(summary_writer, config) # create metadata file with open(metadata_path, 'w', encoding='utf8') as metadata_file: metadata_file.write('Character\tID\n') for id_, char in enumerate(self._encoder.chars): metadata_file.write('{}\t{}\n'.format(char, id_))
Example #2
Source File: experiment.py From neuralmonkey with BSD 3-Clause "New" or "Revised" License | 6 votes |
def visualize_embeddings(self) -> None: """Insert visualization of embeddings in TensorBoard. Visualize the embeddings of `EmbeddedFactorSequence` objects specified in the `main.visualize_embeddings` config attribute. """ tb_projector = projector.ProjectorConfig() for sequence in self.model.visualize_embeddings: for i, (vocabulary, emb_matrix) in enumerate( zip(sequence.vocabularies, sequence.embedding_matrices)): # TODO when vocabularies will have name parameter, change it path = self.get_path("seq.{}-{}.tsv".format(sequence.name, i)) vocabulary.save_wordlist(path) embedding = tb_projector.embeddings.add() # pylint: disable=unsubscriptable-object embedding.tensor_name = emb_matrix.name embedding.metadata_path = path # pylint: enable=unsubscriptable-object summary_writer = tf.summary.FileWriter(self.model.output) projector.visualize_embeddings(summary_writer, tb_projector)
Example #3
Source File: experiment.py From neuralmonkey with BSD 3-Clause "New" or "Revised" License | 6 votes |
def visualize_embeddings(self) -> None: """Insert visualization of embeddings in TensorBoard. Visualize the embeddings of `EmbeddedFactorSequence` objects specified in the `main.visualize_embeddings` config attribute. """ tb_projector = projector.ProjectorConfig() for sequence in self.model.visualize_embeddings: for i, (vocabulary, emb_matrix) in enumerate( zip(sequence.vocabularies, sequence.embedding_matrices)): # TODO when vocabularies will have name parameter, change it path = self.get_path("seq.{}-{}.tsv".format(sequence.name, i)) vocabulary.save_wordlist(path) embedding = tb_projector.embeddings.add() # pylint: disable=unsubscriptable-object embedding.tensor_name = emb_matrix.name embedding.metadata_path = path # pylint: enable=unsubscriptable-object summary_writer = tf.summary.FileWriter(self.model.output) projector.visualize_embeddings(summary_writer, tb_projector)
Example #4
Source File: visualize-tensorboard.py From inferbeddings with MIT License | 6 votes |
def main(argv): embeddings = tf.get_variable('W', shape=[10, 100], initializer=tf.contrib.layers.xavier_initializer()) init_op = tf.global_variables_initializer() with tf.Session() as session: session.run(init_op) saver = tf.train.Saver() saver.save(session, "model.ckpt", 0) summary_writer = tf.summary.FileWriter('.') projector_config = projector.ProjectorConfig() embedding = projector_config.embeddings.add() embedding.tensor_name = embeddings.name projector.visualize_embeddings(summary_writer, projector_config)
Example #5
Source File: chatbot_model.py From seq2seq-chatbot with MIT License | 6 votes |
def save(self, filename): """Saves a checkpoint of the current model weights Args: filename: Checkpoint filename, such as best_model_checkpoint.ckpt. This file must exist within model_dir. """ filepath = path.join(self.model_dir, filename) self.saver.save(self.session, filepath) config = projector.ProjectorConfig() if self.model_hparams.share_embedding: shared_embedding = config.embeddings.add() shared_embedding.tensor_name = "model/encoder/shared_embeddings_matrix" shared_embedding.metadata_path = Vocabulary.SHARED_VOCAB_FILENAME else: encoder_embedding = config.embeddings.add() encoder_embedding.tensor_name = "model/encoder/encoder_embeddings_matrix" encoder_embedding.metadata_path = Vocabulary.INPUT_VOCAB_FILENAME decoder_embedding = config.embeddings.add() decoder_embedding.tensor_name = "model/decoder/decoder_embeddings_matrix" decoder_embedding.metadata_path = Vocabulary.OUTPUT_VOCAB_FILENAME projector.visualize_embeddings(self.summary_writer, config)
Example #6
Source File: embedding.py From tensorflow-tbcnn with MIT License | 6 votes |
def write_embedding_metadata(writer, word2int): metadata_path = os.path.join(hyper.train_dir, 'embedding_meta.tsv') # dump embedding mapping items = sorted(word2int.items(), key=operator.itemgetter(1)) with open(metadata_path, 'w') as f: for item in items: print(item[0], file=f) config = projector.ProjectorConfig() config.model_checkpoint_dir = hyper.train_dir # the above line not work yet. TF doesn't support model_checkpoint_dir # thus create a symlink from train_dir to log_dir os.symlink(os.path.join(hyper.train_dir, 'checkpoint'), os.path.join(hyper.log_dir, 'checkpoint')) embedding = config.embeddings.add() embedding.tensor_name = param.get('We').name # Link this tensor to its metadata file (e.g. labels). embedding.metadata_path = metadata_path # Saves a configuration file that TensorBoard will read during startup. projector.visualize_embeddings(writer, config)
Example #7
Source File: experiment.py From neuralmonkey with BSD 3-Clause "New" or "Revised" License | 6 votes |
def visualize_embeddings(self) -> None: """Insert visualization of embeddings in TensorBoard. Visualize the embeddings of `EmbeddedFactorSequence` objects specified in the `main.visualize_embeddings` config attribute. """ tb_projector = projector.ProjectorConfig() for sequence in self.model.visualize_embeddings: for i, (vocabulary, emb_matrix) in enumerate( zip(sequence.vocabularies, sequence.embedding_matrices)): # TODO when vocabularies will have name parameter, change it path = self.get_path("seq.{}-{}.tsv".format(sequence.name, i)) vocabulary.save_wordlist(path) embedding = tb_projector.embeddings.add() # pylint: disable=unsubscriptable-object embedding.tensor_name = emb_matrix.name embedding.metadata_path = path # pylint: enable=unsubscriptable-object summary_writer = tf.summary.FileWriter(self.model.output) projector.visualize_embeddings(summary_writer, tb_projector)
Example #8
Source File: tf_utils.py From ZNLP with MIT License | 6 votes |
def visualize_sample_embeddings(sess, log_dir, words, word2idx, embeddings): # embedding -> tf.get_variable() list_idx = map(lambda word: word2idx[word], words) # sample_embeddings = tf.gather(embeddings, list_idx, name="my_embeddings") # sample_embeddings = embeddings[list_idx] config = projector.ProjectorConfig() embedding_conf = config.embeddings.add() metadata_path = os.path.join(log_dir, 'metadata.tsv') with open(metadata_path, "w") as f: [f.write(word + "\n") for word in words] embedding_conf.tensor_name = embeddings.name # embeddings.name embedding_conf.metadata_path = os.path.join(log_dir, 'metadata.tsv') if not os.path.exists(log_dir): os.mkdir(log_dir) summary_writer = tf.summary.FileWriter(log_dir, graph=sess.graph) projector.visualize_embeddings(summary_writer, config) # summary_writer.close()
Example #9
Source File: model.py From long-summarization with Apache License 2.0 | 6 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path chkpt_dir = tf.train.latest_checkpoint(train_dir) print('chkpt_dir for embeddings: ', chkpt_dir) if chkpt_dir: config.model_checkpoint_path = chkpt_dir else: chkpt_dir = train_dir projector.visualize_embeddings(summary_writer, config)
Example #10
Source File: Model.py From natural-language-summary-generation-from-structured-data with MIT License | 6 votes |
def __get_tensorboard_writer(self, path): tensorboard_writer = tf.summary.FileWriter(path, graph=self.graph, filename_suffix=".bot") # set the projector's configuration to add the embedding summary also: conf = projector.ProjectorConfig() embedding_field = conf.embeddings.add() embedding_content_label = conf.embeddings.add() # set the tensors to these embedding matrices embedding_field.tensor_name = self.field_embedding_matrix.name embedding_content_label.tensor_name = self.content_label_embedding_matrix.name # add the metadata paths to these embedding_summaries: embedding_field.metadata_path = os.path.join("..", "Metadata/fields.vocab") embedding_content_label.metadata_path = os.path.join("..", "Metadata/content_labels.vocab") # save the configuration file for this projector.visualize_embeddings(tensorboard_writer, conf) # return the so created tensorboard_writer return tensorboard_writer # define the constructor of the graph
Example #11
Source File: autoencoder_t-sne.py From Autoencoder-TensorBoard-t-SNE with MIT License | 5 votes |
def generate_embeddings(): # Load data, train an autoencoder and transform data embedded_data, sess = train_autoencoder_and_embed() # Input set for Embedded TensorBoard visualization # Performed with cpu to conserve memory and processing power with tf.device("/cpu:0"): embedding = tf.Variable(tf.stack(embedded_data, axis=0), trainable=False, name='embedding') sess.run(tf.global_variables_initializer()) saver = tf.train.Saver() writer = tf.summary.FileWriter(FLAGS.log_dir + '/projector', sess.graph) # Add embedding tensorboard visualization. Need tensorflow version # >= 0.12.0RC0 config = projector.ProjectorConfig() embed= config.embeddings.add() embed.tensor_name = 'embedding:0' embed.metadata_path = os.path.join(FLAGS.log_dir + '/projector/metadata.tsv') embed.sprite.image_path = os.path.join(FLAGS.data_dir + '/mnist_10k_sprite.png') # Specify the width and height of a single thumbnail. embed.sprite.single_image_dim.extend([28, 28]) projector.visualize_embeddings(writer, config) # We save the embeddings for TensorBoard, setting the global step as # The number of data examples saver.save(sess, os.path.join( FLAGS.log_dir, 'projector/a_model.ckpt'), global_step=NB_TEST_DATA) sess.close()
Example #12
Source File: tensorboard_embedding.py From VAE-GAN with MIT License | 5 votes |
def __init__(self, config): super(TensorboardEmbedding, self).__init__(config) self.assets_dir = self.config['assets dir'] self.log_dir = self.config.get('log dir', 'embedding') self.log_dir = os.path.join(self.assets_dir, self.log_dir) self.z_shape = list(self.config['z shape']) self.x_shape = list(self.config['x shape']) self.nb_samples = self.config.get('nb samples', 1000) self.batch_size = self.config.get('batch_size', 100) self.nb_samples = self.nb_samples // self.batch_size * self.batch_size if not os.path.exists(self.log_dir): os.mkdir(self.log_dir) with open(os.path.join(self.log_dir, 'metadata.tsv'), 'w') as f: f.write("Index\tLabel\n") for i in range(self.nb_samples): f.write("%d\t%d\n"%(i, 0)) for i in range(self.nb_samples): f.write("%d\t%d\n"%(i+self.nb_samples, 1)) summary_writer = tf.summary.FileWriter(self.log_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = "test" embedding.metadata_path = "metadata.tsv" projector.visualize_embeddings(summary_writer, config) self.plot_array_var = tf.get_variable('test', shape=[self.nb_samples*2, int(np.product(self.x_shape))]) self.saver = tf.train.Saver([self.plot_array_var])
Example #13
Source File: visualize_tb.py From Jacinle with MIT License | 5 votes |
def visualize_word_embedding_tb(emb, log_dir): # https://stackoverflow.com/questions/41258391/tensorboard-embedding-example#answer-42676076 if isinstance(emb, tuple): # embedding, word2idx words = sorted(emb[1].keys(), key=lambda x: emb[1][x]) embedding = np.array(emb[0]) else: words = emb.keys() embedding = np.stack([emb[key] for key in words]) # setup a TensorFlow session tf.reset_default_graph() embedding_var = tf.Variable(embedding, name='embedding') with open(osp.join(log_dir, 'metadata.tsv'), 'w') as f: for w in words: f.write(w + '\n') # create a TensorFlow summary writer summary_writer = tf.summary.FileWriter(log_dir, tf.get_default_graph()) config = projector.ProjectorConfig() embedding_conf = config.embeddings.add() embedding_conf.tensor_name = embedding_var.name embedding_conf.metadata_path = 'metadata.tsv' projector.visualize_embeddings(summary_writer, config) # save the model saver = tf.train.Saver() with tf.Session() as sess: sess.run(embedding_var.initializer) saver.save(sess, osp.join(log_dir, "model.ckpt"))
Example #14
Source File: tensorboard_embedding.py From keras_experiments with The Unlicense | 5 votes |
def set_model(self, model): if self.embeddings_freq: self.saver = tf.train.Saver() embeddings_layer_names = self.embeddings_layer_names elayers = find_embedding_layers(model.layers) if not embeddings_layer_names: embeddings_layer_names = [layer.name for layer in elayers] embeddings = {layer.name: layer.weights[0] for layer in elayers if layer.name in embeddings_layer_names} embeddings_metadata = {} if not isinstance(self.embeddings_metadata, str): embeddings_metadata = self.embeddings_metadata else: embeddings_metadata = {layer_name: self.embeddings_metadata for layer_name in embeddings.keys()} config = projector.ProjectorConfig() self.embeddings_logs = [] for layer_name, tensor in embeddings.items(): embedding = config.embeddings.add() embedding.tensor_name = tensor.name self.embeddings_logs.append(os.path.join(self.log_dir, layer_name + '.ckpt')) if layer_name in embeddings_metadata: embedding.metadata_path = embeddings_metadata[layer_name] projector.visualize_embeddings(self.writer, config)
Example #15
Source File: components.py From -Learn-Artificial-Intelligence-with-TensorFlow with MIT License | 5 votes |
def init_embeddings_projector(vocab_path, tensor_name, logdir): """Saves the projector_config.pbtxt file in `logdir`, to be used by the TensorBoard Projector. """ # Define the protobuf object that will eventually be saved in text format # as projector_config.pbtxt, within `logdir`. TensorBoard will automatically # read that file and associate the elements of `tensor_name` with the # vocabulary words specified in the `vocab_path` file. config = projector.ProjectorConfig(embeddings=[projector.EmbeddingInfo( tensor_name=tensor_name, metadata_path=vocab_path)]) # Call visualize_embeddings to execute the saving of the .pbtxt file. writer = tf.summary.FileWriter(logdir) projector.visualize_embeddings(writer, config)
Example #16
Source File: embeddings.py From tf_autoencoder with Apache License 2.0 | 5 votes |
def save_as_embedding(data, save_path, metadata_path=None, sprite_image_path=None): """Save data as embedding in checkpoint. Parameters ---------- data : ndarray Data to store as embedding. save_path : str Path to the checkpoint filename. metadata_path : str|None Path to meta-data file. sprite_image_path : str|None Path to sprite images. """ checkpoint_dir = dirname(save_path) with tf.Graph().as_default() as g: with tf.Session() as sess: embedding_var = tf.Variable(data, name='embedding') writer = tf.summary.FileWriter(checkpoint_dir, g) sess.run(embedding_var.initializer) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name if metadata_path is not None: embedding.metadata_path = metadata_path if sprite_image_path is not None: embedding.sprite.image_path = sprite_image_path # Specify the width and height of a single thumbnail. embedding.sprite.single_image_dim.extend([28, 28]) projector.visualize_embeddings(writer, config) saver_embed = tf.train.Saver([embedding_var]) saver_embed.save(sess, save_path, 1) writer.close()
Example #17
Source File: model.py From unified-summarization with MIT License | 5 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path projector.visualize_embeddings(summary_writer, config)
Example #18
Source File: model.py From unified-summarization with MIT License | 5 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path projector.visualize_embeddings(summary_writer, config)
Example #19
Source File: model.py From pointer-generator with Apache License 2.0 | 5 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path projector.visualize_embeddings(summary_writer, config)
Example #20
Source File: 04_word2vec_visualize.py From stanford-tensorflow-tutorials with MIT License | 5 votes |
def visualize(self, visual_fld, num_visualize): """ run "'tensorboard --logdir='visualization'" to see the embeddings """ # create the list of num_variable most common words to visualize word2vec_utils.most_common_words(visual_fld, num_visualize) saver = tf.train.Saver() with tf.Session() as sess: sess.run(tf.global_variables_initializer()) ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/checkpoint')) # if that checkpoint exists, restore from checkpoint if ckpt and ckpt.model_checkpoint_path: saver.restore(sess, ckpt.model_checkpoint_path) final_embed_matrix = sess.run(self.embed_matrix) # you have to store embeddings in a new variable embedding_var = tf.Variable(final_embed_matrix[:num_visualize], name='embedding') sess.run(embedding_var.initializer) config = projector.ProjectorConfig() summary_writer = tf.summary.FileWriter(visual_fld) # add embedding to the config file embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name # link this tensor to its metadata file, in this case the first NUM_VISUALIZE words of vocab embedding.metadata_path = 'vocab_' + str(num_visualize) + '.tsv' # saves a configuration file that TensorBoard will read during startup. projector.visualize_embeddings(summary_writer, config) saver_embed = tf.train.Saver([embedding_var]) saver_embed.save(sess, os.path.join(visual_fld, 'model.ckpt'), 1)
Example #21
Source File: model.py From RLSeq2Seq with MIT License | 5 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path projector.visualize_embeddings(summary_writer, config)
Example #22
Source File: 11_w2v_visual.py From deep-learning-note with MIT License | 5 votes |
def visualize(self, visual_fld, num_visualize): """ run "'tensorboard --logdir='visualization'" to see the embeddings """ # create the list of num_variable most common words to visualize w2v_utils.most_common_words(visual_fld, num_visualize) saver = tf.train.Saver() with tf.Session() as sess: sess.run(tf.global_variables_initializer()) ckpt = tf.train.get_checkpoint_state(os.path.dirname('data/checkpoints/checkpoint')) # if that checkpoint exists, restore from checkpoint if ckpt and ckpt.model_checkpoint_path: saver.restore(sess, ckpt.model_checkpoint_path) final_embed_matrix = sess.run(self.embed_matrix) # you have to store embeddings in a new variable embedding_var = tf.Variable(final_embed_matrix[:num_visualize], name='embedding') sess.run(embedding_var.initializer) config = projector.ProjectorConfig() summary_writer = tf.summary.FileWriter(visual_fld) # add embedding to the config file embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name # link this tensor to its metadata file, in this case the first NUM_VISUALIZE words of vocab embedding.metadata_path = 'vocab_' + str(num_visualize) + '.tsv' # saves a configuration file that TensorBoard will read during startup. projector.visualize_embeddings(summary_writer, config) saver_embed = tf.train.Saver([embedding_var]) saver_embed.save(sess, os.path.join(visual_fld, 'model.ckpt'), 1)
Example #23
Source File: embedding_visualization.py From HardRLWithYoutube with MIT License | 5 votes |
def visualize_embeddings(embeddings, experiment_name='default'): """Save the embeddings to be visualised using t-sne on TensorBoard Based on https://medium.com/@vegi/visualizing-higher-dimensional-data-using-t-sne-on-tensorboard-7dbf22682cf2 """ tf_embeddings = tf.Variable(np.concatenate(embeddings, 0)) # Generate metadata metadata = 'video_index\tframe_index\n' for video_index in range(len(embeddings)): for frame_index in range(embeddings[video_index].shape[0]): metadata += '{}\t{}\n'.format(video_index, frame_index) metadata_path = 'embeddings/{}/labels.tsv'.format(experiment_name) with open(metadata_path, 'w') as metadata_file: metadata_file.write(metadata) with tf.Session() as sess: saver = tf.train.Saver([tf_embeddings]) sess.run(tf_embeddings.initializer) saver.save(sess, 'embeddings/{}/embeddings.ckpt'.format(experiment_name)) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = tf_embeddings.name embedding.metadata_path = metadata_path.split('/')[-1] projector.visualize_embeddings(tf.summary.FileWriter('embeddings/{}'.format(experiment_name)), config)
Example #24
Source File: model.py From TransferRL with MIT License | 5 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path projector.visualize_embeddings(summary_writer, config)
Example #25
Source File: embeddings_formatter.py From embeddingsviz with MIT License | 5 votes |
def add_multiple_embeddings(log_dir, file_list, name_list): """ Creates the files necessary for the multiple embeddings :param log_dir: destination directory for the model and metadata (the one to which TensorBoard points) :param file_list: list of embeddings files :param name_list: names of the embeddings files :return: """ # setup a TensorFlow session tf.reset_default_graph() sess = tf.InteractiveSession() sess.run(tf.global_variables_initializer()) config = projector.ProjectorConfig() for i, file in enumerate(file_list): tensor_name = name_list[i] print('creating the embedding with the name ' + tensor_name) create_embeddings(sess, log_dir, embedding_file=file, tensor_name=tensor_name) # create a TensorFlow summary writer summary_writer = tf.summary.FileWriter(log_dir, sess.graph) embedding_conf = config.embeddings.add() embedding_conf.tensor_name = tensor_name + ':0' embedding_conf.metadata_path = os.path.join(tensor_name + '_' + 'metadata.tsv') projector.visualize_embeddings(summary_writer, config) # save the model saver = tf.train.Saver() saver.save(sess, os.path.join(log_dir, tensor_name + '_' + "model.ckpt")) print('finished successfully!')
Example #26
Source File: mnist_projector_show.py From deep-learning-note with MIT License | 5 votes |
def visualisation(final_result): # 使用一个新的变量来保存最终输出层向量的结果 # 因为 embedding 是通过 Tensorflow 中的变量完成的,所以 PROJECTOR 可视化的都是 TF 变量 y = tf.Variable(final_result, name = TENSOR_NAME) summary_writer = tf.summary.FileWriter(LOG_DIR) # 通过 PROJECTOR 生成日志 config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = y.name # 指定 embedding 对应的原始数据信息 embedding.metadata_path = META_FILE # 指定 sprite 图像及大小 embedding.sprite.image_path = SPRITE_FILE embedding.sprite.single_image_dim.extend([28, 28]) # 写入日志 projector.visualize_embeddings(summary_writer, config) # 生成会话,写入文件 sess = tf.InteractiveSession() sess.run(tf.global_variables_initializer()) saver = tf.train.Saver() saver.save(sess, os.path.join(LOG_DIR, "model"), TRAINING_STEPS) summary_writer.close() # 主函数先调用模型训练,再处理测试数据,最后将输出矩阵输出到 PROJECTOR 需要的日志文件中
Example #27
Source File: train.py From vqvae-speech with MIT License | 5 votes |
def visualize_embeddings(logdir, var_list, tsv_list): assert len(var_list) == len(tsv_list), 'Inconsistent length of lists' config = projector.ProjectorConfig() for v, f in zip(var_list, tsv_list): embedding = config.embeddings.add() embedding.tensor_name = v.name if f is not None: _, filename = os.path.split(f) meta_tsv = os.path.join(logdir, filename) tf.gfile.Copy(f, meta_tsv) embedding.metadata_path = filename # save relative path writer = SummaryWriterCache.get(logdir) projector.visualize_embeddings(writer, config)
Example #28
Source File: model.py From MAX-Text-Summarizer with Apache License 2.0 | 5 votes |
def _add_emb_vis(self, embedding_var): """Do setup so that we can view word embedding visualization in Tensorboard, as described here: https://www.tensorflow.org/get_started/embedding_viz Make the vocab metadata file, then make the projector config file pointing to it.""" train_dir = os.path.join(FLAGS.log_root, "train") vocab_metadata_path = os.path.join(train_dir, "vocab_metadata.tsv") self._vocab.write_metadata(vocab_metadata_path) # write metadata file summary_writer = tf.summary.FileWriter(train_dir) config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name embedding.metadata_path = vocab_metadata_path projector.visualize_embeddings(summary_writer, config)
Example #29
Source File: tf.py From mirror with MIT License | 5 votes |
def tensorboard_projector_visualize(sess, all_feat, log_dir, sprite_image=None, image_width=-1, image_height=-1): """ generate necessary model data and config files for tensorboard projector :param sess: the current computing session :param all_feat: a 2d numpy feature vector of shape [num_features, feature_dimension] :param log_dir: the output log folder :param sprite_image: (optional) the big image in a row-major fashion for visualization :param image_width: (optional) the width for a single data point in sprite image :param image_height: (optional) the height for a single data point in sprite image :return: None """ # create embedding summary and run it summary_writer = tf.summary.FileWriter(log_dir) embedding_var = tf.Variable(all_feat, name='feature_embedding') sess.run(embedding_var.initializer) # create projector config config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = embedding_var.name # adding sprite images if sprite_image != None and image_width != -1 and image_height != -1: embedding.sprite.image_path = sprite_image embedding.sprite.single_image_dim.extend([image_width, image_height]) projector.visualize_embeddings(summary_writer, config) # save the embedding saver_embed = tf.train.Saver([embedding_var]) saver_embed.save(sess, os.path.join(log_dir, 'embedding_test.ckpt'))
Example #30
Source File: mnist_t-sne.py From mnist-tensorboard-embeddings with MIT License | 5 votes |
def generate_embeddings(): # Import data mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True, fake_data=FLAGS.fake_data) sess = tf.InteractiveSession() # Input set for Embedded TensorBoard visualization # Performed with cpu to conserve memory and processing power with tf.device("/cpu:0"): embedding = tf.Variable(tf.stack(mnist.test.images[:FLAGS.max_steps], axis=0), trainable=False, name='embedding') tf.global_variables_initializer().run() saver = tf.train.Saver() writer = tf.summary.FileWriter(FLAGS.log_dir + '/projector', sess.graph) # Add embedding tensorboard visualization. Need tensorflow version # >= 0.12.0RC0 config = projector.ProjectorConfig() embed= config.embeddings.add() embed.tensor_name = 'embedding:0' embed.metadata_path = os.path.join(FLAGS.log_dir + '/projector/metadata.tsv') embed.sprite.image_path = os.path.join(FLAGS.data_dir + '/mnist_10k_sprite.png') # Specify the width and height of a single thumbnail. embed.sprite.single_image_dim.extend([28, 28]) projector.visualize_embeddings(writer, config) saver.save(sess, os.path.join( FLAGS.log_dir, 'projector/a_model.ckpt'), global_step=FLAGS.max_steps)