Python tensorflow.python.platform.tf_logging.set_verbosity() Examples
The following are 30
code examples of tensorflow.python.platform.tf_logging.set_verbosity().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.platform.tf_logging
, or try the search function
.
Example #1
Source File: conll2tree.py From DOTA_models with Apache License 2.0 | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #2
Source File: conll2tree.py From hands-detection with MIT License | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #3
Source File: conll2tree.py From object_detection_kitti with Apache License 2.0 | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #4
Source File: conll2tree.py From ECO-pytorch with BSD 2-Clause "Simplified" License | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #5
Source File: conll2tree.py From object_detection_with_tensorflow with MIT License | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #6
Source File: conll2tree.py From Action_Recognition_Zoo with MIT License | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #7
Source File: conll2tree.py From AI_Reader with Apache License 2.0 | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' print tr(d) if finished: break
Example #8
Source File: conll2tree.py From HumanRecognition with MIT License | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #9
Source File: conll2tree.py From Gun-Detector with Apache License 2.0 | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print('Input: %s' % sentence.text) print('Parse:') tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print(pat.sub('', tr_ln)) if finished: break
Example #10
Source File: conll2tree.py From g-tensorflow-models with Apache License 2.0 | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #11
Source File: conll2tree.py From yolo_v2 with Apache License 2.0 | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #12
Source File: conll2tree.py From multilabel-image-classification-tensorflow with MIT License | 6 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
Example #13
Source File: dragnn_parser.py From syntaxnet-rest-api with GNU General Public License v3.0 | 5 votes |
def load_model(self,base_dir, master_spec_name, checkpoint_name): # Read the master spec master_spec = spec_pb2.MasterSpec() with open(os.path.join(base_dir, master_spec_name), "r") as f: text_format.Merge(f.read(), master_spec) spec_builder.complete_master_spec(master_spec, None, base_dir) logging.set_verbosity(logging.WARN) # Turn off TensorFlow spam. # Initialize a graph graph = tf.Graph() with graph.as_default(): hyperparam_config = spec_pb2.GridPoint() builder = graph_builder.MasterBuilder(master_spec, hyperparam_config) # This is the component that will annotate test sentences. annotator = builder.add_annotation(enable_tracing=True) builder.add_saver() # "Savers" can save and load models; here, we're only going to load. sess = tf.Session(graph=graph) with graph.as_default(): # sess.run(tf.global_variables_initializer()) # sess.run('save/restore_all', {'save/Const:0': os.path.join(base_dir, checkpoint_name)}) builder.saver.restore(sess, os.path.join(base_dir, checkpoint_name)) def annotate_sentence(sentence): with graph.as_default(): return sess.run([annotator['annotations'], annotator['traces']], feed_dict={annotator['input_batch']: [sentence]}) return annotate_sentence
Example #14
Source File: parser_eval.py From AI_Reader with Apache License 2.0 | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: feature_sizes, domain_sizes, embedding_dims, num_actions = sess.run( gen_parser_ops.feature_size(task_context=FLAGS.task_context, arg_prefix=FLAGS.arg_prefix)) with tf.Session() as sess: Eval(sess, num_actions, feature_sizes, domain_sizes, embedding_dims)
Example #15
Source File: parser_eval.py From HumanRecognition with MIT License | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #16
Source File: parser_eval.py From object_detection_with_tensorflow with MIT License | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #17
Source File: parser_eval.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #18
Source File: parser_eval.py From object_detection_kitti with Apache License 2.0 | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #19
Source File: parser_eval.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #20
Source File: parser_eval.py From hands-detection with MIT License | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #21
Source File: parser_eval.py From DOTA_models with Apache License 2.0 | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #22
Source File: parser_eval.py From ECO-pytorch with BSD 2-Clause "Simplified" License | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #23
Source File: parser_eval.py From Action_Recognition_Zoo with MIT License | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #24
Source File: parser_eval.py From yolo_v2 with Apache License 2.0 | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #25
Source File: ParseyPredFace.py From PredPatt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def load_model(base_dir, master_spec_name, checkpoint_name): """ Function to load the syntaxnet models. Highly specific to the tutorial format right now. """ # Read the master spec master_spec = spec_pb2.MasterSpec() with open(os.path.join(base_dir, master_spec_name), "r") as f: text_format.Merge(f.read(), master_spec) spec_builder.complete_master_spec(master_spec, None, base_dir) logging.set_verbosity(logging.WARN) # Turn off TensorFlow spam. # Initialize a graph graph = tf.Graph() with graph.as_default(): hyperparam_config = spec_pb2.GridPoint() builder = graph_builder.MasterBuilder(master_spec, hyperparam_config) # This is the component that will annotate test sentences. annotator = builder.add_annotation(enable_tracing=True) builder.add_saver() # "Savers" can save and load models; here, we're only going to load. sess = tf.Session(graph=graph) with graph.as_default(): #sess.run(tf.global_variables_initializer()) #sess.run('save/restore_all', {'save/Const:0': os.path.join(base_dir, checkpoint_name)}) builder.saver.restore(sess, os.path.join(base_dir, checkpoint_name)) def annotate_sentence(sentence): with graph.as_default(): return sess.run([annotator['annotations'], annotator['traces']], feed_dict={annotator['input_batch']: [sentence]}) return annotate_sentence
Example #26
Source File: parser_eval.py From Gun-Detector with Apache License 2.0 | 5 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: Eval(sess)
Example #27
Source File: parser_trainer.py From DOTA_models with Apache License 2.0 | 4 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) if not gfile.IsDirectory(OutputPath('')): gfile.MakeDirs(OutputPath('')) # Rewrite context. RewriteContext() # Creates necessary term maps. if FLAGS.compute_lexicon: logging.info('Computing lexicon...') with tf.Session(FLAGS.tf_master) as sess: gen_parser_ops.lexicon_builder(task_context=OutputPath('context'), corpus_name=FLAGS.training_corpus).run() with tf.Session(FLAGS.tf_master) as sess: feature_sizes, domain_sizes, embedding_dims, num_actions = sess.run( gen_parser_ops.feature_size(task_context=OutputPath('context'), arg_prefix=FLAGS.arg_prefix)) # Well formed and projectivize. if FLAGS.projectivize_training_set: logging.info('Preprocessing...') with tf.Session(FLAGS.tf_master) as sess: source, last = gen_parser_ops.document_source( task_context=OutputPath('context'), batch_size=FLAGS.batch_size, corpus_name=FLAGS.training_corpus) sink = gen_parser_ops.document_sink( task_context=OutputPath('context'), corpus_name='projectivized-training-corpus', documents=gen_parser_ops.projectivize_filter( gen_parser_ops.well_formed_filter(source, task_context=OutputPath( 'context')), task_context=OutputPath('context'))) while True: tf_last, _ = sess.run([last, sink]) if tf_last: break logging.info('Training...') with tf.Session(FLAGS.tf_master) as sess: Train(sess, num_actions, feature_sizes, domain_sizes, embedding_dims)
Example #28
Source File: parser_trainer.py From multilabel-image-classification-tensorflow with MIT License | 4 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) if not gfile.IsDirectory(OutputPath('')): gfile.MakeDirs(OutputPath('')) # Rewrite context. RewriteContext() # Creates necessary term maps. if FLAGS.compute_lexicon: logging.info('Computing lexicon...') with tf.Session(FLAGS.tf_master) as sess: gen_parser_ops.lexicon_builder(task_context=OutputPath('context'), corpus_name=FLAGS.training_corpus).run() with tf.Session(FLAGS.tf_master) as sess: feature_sizes, domain_sizes, embedding_dims, num_actions = sess.run( gen_parser_ops.feature_size(task_context=OutputPath('context'), arg_prefix=FLAGS.arg_prefix)) # Well formed and projectivize. if FLAGS.projectivize_training_set: logging.info('Preprocessing...') with tf.Session(FLAGS.tf_master) as sess: source, last = gen_parser_ops.document_source( task_context=OutputPath('context'), batch_size=FLAGS.batch_size, corpus_name=FLAGS.training_corpus) sink = gen_parser_ops.document_sink( task_context=OutputPath('context'), corpus_name='projectivized-training-corpus', documents=gen_parser_ops.projectivize_filter( gen_parser_ops.well_formed_filter(source, task_context=OutputPath( 'context')), task_context=OutputPath('context'))) while True: tf_last, _ = sess.run([last, sink]) if tf_last: break logging.info('Training...') with tf.Session(FLAGS.tf_master) as sess: Train(sess, num_actions, feature_sizes, domain_sizes, embedding_dims)
Example #29
Source File: parser_trainer.py From yolo_v2 with Apache License 2.0 | 4 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) if not gfile.IsDirectory(OutputPath('')): gfile.MakeDirs(OutputPath('')) # Rewrite context. RewriteContext() # Creates necessary term maps. if FLAGS.compute_lexicon: logging.info('Computing lexicon...') with tf.Session(FLAGS.tf_master) as sess: gen_parser_ops.lexicon_builder(task_context=OutputPath('context'), corpus_name=FLAGS.training_corpus).run() with tf.Session(FLAGS.tf_master) as sess: feature_sizes, domain_sizes, embedding_dims, num_actions = sess.run( gen_parser_ops.feature_size(task_context=OutputPath('context'), arg_prefix=FLAGS.arg_prefix)) # Well formed and projectivize. if FLAGS.projectivize_training_set: logging.info('Preprocessing...') with tf.Session(FLAGS.tf_master) as sess: source, last = gen_parser_ops.document_source( task_context=OutputPath('context'), batch_size=FLAGS.batch_size, corpus_name=FLAGS.training_corpus) sink = gen_parser_ops.document_sink( task_context=OutputPath('context'), corpus_name='projectivized-training-corpus', documents=gen_parser_ops.projectivize_filter( gen_parser_ops.well_formed_filter(source, task_context=OutputPath( 'context')), task_context=OutputPath('context'))) while True: tf_last, _ = sess.run([last, sink]) if tf_last: break logging.info('Training...') with tf.Session(FLAGS.tf_master) as sess: Train(sess, num_actions, feature_sizes, domain_sizes, embedding_dims)
Example #30
Source File: parser_trainer.py From g-tensorflow-models with Apache License 2.0 | 4 votes |
def main(unused_argv): logging.set_verbosity(logging.INFO) if not gfile.IsDirectory(OutputPath('')): gfile.MakeDirs(OutputPath('')) # Rewrite context. RewriteContext() # Creates necessary term maps. if FLAGS.compute_lexicon: logging.info('Computing lexicon...') with tf.Session(FLAGS.tf_master) as sess: gen_parser_ops.lexicon_builder(task_context=OutputPath('context'), corpus_name=FLAGS.training_corpus).run() with tf.Session(FLAGS.tf_master) as sess: feature_sizes, domain_sizes, embedding_dims, num_actions = sess.run( gen_parser_ops.feature_size(task_context=OutputPath('context'), arg_prefix=FLAGS.arg_prefix)) # Well formed and projectivize. if FLAGS.projectivize_training_set: logging.info('Preprocessing...') with tf.Session(FLAGS.tf_master) as sess: source, last = gen_parser_ops.document_source( task_context=OutputPath('context'), batch_size=FLAGS.batch_size, corpus_name=FLAGS.training_corpus) sink = gen_parser_ops.document_sink( task_context=OutputPath('context'), corpus_name='projectivized-training-corpus', documents=gen_parser_ops.projectivize_filter( gen_parser_ops.well_formed_filter(source, task_context=OutputPath( 'context')), task_context=OutputPath('context'))) while True: tf_last, _ = sess.run([last, sink]) if tf_last: break logging.info('Training...') with tf.Session(FLAGS.tf_master) as sess: Train(sess, num_actions, feature_sizes, domain_sizes, embedding_dims)