Python tensor2tensor.models.transformer.transformer_big() Examples
The following are 16
code examples of tensor2tensor.models.transformer.transformer_big().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensor2tensor.models.transformer
, or try the search function
.
Example #1
Source File: transformer_revnet.py From fine-lm with MIT License | 5 votes |
def transformer_revnet_base(): """Base hparams for TransformerRevnet.""" hparams = transformer.transformer_big() # Use settings from transformer_n_da hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.learning_rate = 0.4 return hparams
Example #2
Source File: universal_transformer.py From fine-lm with MIT License | 5 votes |
def universal_transformer_base(): hparams = transformer.transformer_big() hparams = update_hparams_for_universal_transformer(hparams) return hparams
Example #3
Source File: universal_transformer.py From fine-lm with MIT License | 5 votes |
def universal_transformer_big(): hparams = transformer.transformer_big() hparams = update_hparams_for_universal_transformer(hparams) hparams.hidden_size = 2048 hparams.filter_size = 8192 return hparams
Example #4
Source File: transformer_revnet.py From tensor2tensor with Apache License 2.0 | 5 votes |
def transformer_revnet_base(): """Base hparams for TransformerRevnet.""" hparams = transformer.transformer_big() # Use settings from transformer_n_da hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.learning_rate = 0.4 return hparams
Example #5
Source File: transformer_parallel.py From tensor2tensor with Apache License 2.0 | 5 votes |
def transformer_big_bs1(): hparams = transformer.transformer_big() hparams.add_hparam("block_size", 1) return hparams
Example #6
Source File: evolved_transformer.py From tensor2tensor with Apache License 2.0 | 5 votes |
def evolved_transformer_big(): """Big parameters for Evolved Transformer model on WMT.""" return add_evolved_transformer_hparams(transformer.transformer_big())
Example #7
Source File: evolved_transformer.py From tensor2tensor with Apache License 2.0 | 5 votes |
def evolved_transformer_deep(): """Deep parameters for Evolved Transformer model on WMT.""" hparams = add_evolved_transformer_hparams(transformer.transformer_big()) hparams.num_encoder_layers = 9 hparams.num_decoder_layers = 10 hparams.hidden_size = 640 return hparams
Example #8
Source File: transformer_revnet.py From BERT with Apache License 2.0 | 5 votes |
def transformer_revnet_base(): """Base hparams for TransformerRevnet.""" hparams = transformer.transformer_big() # Use settings from transformer_n_da hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.learning_rate = 0.4 return hparams
Example #9
Source File: transformer_parallel.py From BERT with Apache License 2.0 | 5 votes |
def transformer_big_bs1(): hparams = transformer.transformer_big() hparams.add_hparam("block_size", 1) return hparams
Example #10
Source File: evolved_transformer.py From BERT with Apache License 2.0 | 5 votes |
def evolved_transformer_big(): """Big parameters for Evolved Transformer model on WMT.""" return add_evolved_transformer_hparams(transformer.transformer_big())
Example #11
Source File: evolved_transformer.py From BERT with Apache License 2.0 | 5 votes |
def evolved_transformer_deep(): """Deep parameters for Evolved Transformer model on WMT.""" hparams = add_evolved_transformer_hparams(transformer.transformer_big()) hparams.num_encoder_layers = 9 hparams.num_decoder_layers = 10 hparams.hidden_size = 640 return hparams
Example #12
Source File: transformer_revnet.py From training_results_v0.5 with Apache License 2.0 | 5 votes |
def transformer_revnet_base(): """Base hparams for TransformerRevnet.""" hparams = transformer.transformer_big() # Use settings from transformer_n_da hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.learning_rate = 0.4 return hparams
Example #13
Source File: universal_transformer.py From training_results_v0.5 with Apache License 2.0 | 5 votes |
def universal_transformer_base(): hparams = transformer.transformer_big() hparams = update_hparams_for_universal_transformer(hparams) return hparams
Example #14
Source File: universal_transformer.py From training_results_v0.5 with Apache License 2.0 | 5 votes |
def universal_transformer_big(): hparams = transformer.transformer_big() hparams = update_hparams_for_universal_transformer(hparams) hparams.hidden_size = 2048 hparams.filter_size = 8192 return hparams
Example #15
Source File: universal_transformer_modified.py From Graph-Transformer with Apache License 2.0 | 5 votes |
def universal_transformer_base1(): hparams = transformer.transformer_big() hparams = update_hparams_for_universal_transformer(hparams) return hparams
Example #16
Source File: universal_transformer_modified.py From Graph-Transformer with Apache License 2.0 | 5 votes |
def universal_transformer_big1(): hparams = transformer.transformer_big() hparams = update_hparams_for_universal_transformer(hparams) hparams.hidden_size = 2048 hparams.filter_size = 8192 return hparams