Python cntk.combine() Examples
The following are 5
code examples of cntk.combine().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
cntk
, or try the search function
.
Example #1
Source File: driver.py From DevOps-For-AI-Apps with MIT License | 6 votes |
def init(): """ Initialise ResNet 152 model """ global trainedModel, labelLookup, mem_after_init start = t.default_timer() # Load the model and labels from disk with open(LABEL_FILE, 'r') as f: labelLookup = [l.rstrip() for l in f] # Load model and load the model from brainscript (3rd index) trainedModel = load_model(MODEL_FILE) trainedModel = combine([trainedModel.outputs[3].owner]) end = t.default_timer() loadTimeMsg = "Model loading time: {0} ms".format(round((end - start) * 1000, 2)) logger.info(loadTimeMsg)
Example #2
Source File: train_end2end.py From end2end_AU_speech with MIT License | 5 votes |
def audio_encoder_3(input, model_file, cloning=False): # Load and freeze pre-trained encoder last_layer_name = "t_conv3" model = C.load_model(model_file) input_node = model.find_by_name("input") last_conv = model.find_by_name(last_layer_name) if not last_conv: raise ValueError("the layer does not exist") h = C.combine([last_conv.owner]).clone(C.CloneMethod.clone if cloning else C.CloneMethod.freeze, {input_node: input}) return h
Example #3
Source File: language_understanding.py From nlp-services with MIT License | 5 votes |
def create_criterion_function(model): labels = C.placeholder(name='labels') ce = C.cross_entropy_with_softmax(model, labels) errs = C.classification_error(model, labels) return C.combine([ce, errs]) # (features, labels) -> (loss, metric)
Example #4
Source File: models_setup.py From dnn-model-services with MIT License | 5 votes |
def create_model(model_details, num_classes, input_features, new_prediction_node_name="prediction", freeze=False): # Load the pre-trained classification net and find nodes base_model = cntk.load_model(model_details["model_file"]) feature_node = cntk.logging.find_by_name(base_model, model_details["feature_node_name"]) last_node = cntk.logging.find_by_name(base_model, model_details["last_hidden_node_name"]) if model_details["inception"]: node_outputs = cntk.logging.get_node_outputs(base_model) last_node = node_outputs[5] feature_node = cntk.logging.find_all_with_name(base_model, "")[-5] if model_details["vgg"]: last_node = cntk.logging.find_by_name(base_model, "prob") feature_node = cntk.logging.find_by_name(base_model, "data") # Clone the desired layers with fixed weights cloned_layers = cntk.combine([last_node.owner]).clone( cntk.CloneMethod.freeze if freeze else cntk.CloneMethod.clone, {feature_node: cntk.placeholder(name="features")}, ) # Add new dense layer for class prediction feat_norm = input_features - cntk.Constant(114) cloned_out = cloned_layers(feat_norm) z = cntk.layers.Dense(num_classes, activation=None, name=new_prediction_node_name)(cloned_out) return z # Trains a transfer learning model
Example #5
Source File: cntk_backend.py From keras-lambda with MIT License | 5 votes |
def stop_gradient(variables): return C.stop_gradient(C.combine(variables))