org.nd4j.linalg.learning.AdamUpdater Java Examples

The following examples show how to use org.nd4j.linalg.learning.AdamUpdater. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AdamLearnerTestCase.java    From jstarcraft-ai with Apache License 2.0 5 votes vote down vote up
@Override
protected GradientUpdater<?> getOldFunction(long[] shape) {
    Adam configuration = new Adam();
    GradientUpdater<?> oldFunction = new AdamUpdater(configuration);
    int length = (int) (shape[0] * configuration.stateSize(shape[1]));
    INDArray view = Nd4j.zeros(length);
    oldFunction.setStateViewArray(view, shape, 'c', true);
    return oldFunction;
}
 
Example #2
Source File: NeuralStyleTransfer.java    From Java-Machine-Learning-for-Computer-Vision with MIT License 5 votes vote down vote up
private AdamUpdater createADAMUpdater() {
    AdamUpdater adamUpdater = new AdamUpdater(new Adam(LEARNING_RATE, BETA_MOMENTUM, BETA2_MOMENTUM, EPSILON));
    adamUpdater.setStateViewArray(Nd4j.zeros(1, 2 * CHANNELS * WIDTH * HEIGHT),
            new int[]{1, CHANNELS, HEIGHT, WIDTH}, 'c',
            true);
    return adamUpdater;
}
 
Example #3
Source File: NeuralStyleTransfer.java    From dl4j-tutorials with MIT License 5 votes vote down vote up
private void transferStyle() throws IOException {

        ComputationGraph vgg16FineTune = loadModel();
        INDArray content = loadImage(CONTENT_FILE);
        INDArray style = loadImage(STYLE_FILE);
        INDArray combination = createCombinationImage();
        Map<String, INDArray> activationsContentMap = vgg16FineTune.feedForward(content, true);
        Map<String, INDArray> activationsStyleMap = vgg16FineTune.feedForward(style, true);
        HashMap<String, INDArray> activationsStyleGramMap = buildStyleGramValues(activationsStyleMap);
        AdamUpdater adamUpdater = createADAMUpdater();
        for (int iteration = 0; iteration < ITERATIONS; iteration++) {
            log.info("iteration  " + iteration);

            INDArray[] input = new INDArray[] { combination };
            Map<String, INDArray> activationsCombMap = vgg16FineTune.feedForward(input, true, false);

            INDArray styleBackProb = backPropagateStyles(vgg16FineTune, activationsStyleGramMap, activationsCombMap);
            INDArray backPropContent = backPropagateContent(vgg16FineTune, activationsContentMap, activationsCombMap);
            INDArray backPropAllValues = backPropContent.muli(ALPHA).addi(styleBackProb.muli(BETA));
            adamUpdater.applyUpdater(backPropAllValues, iteration, 0);
            combination.subi(backPropAllValues);

            log.info("Total Loss: " + totalLoss(activationsStyleMap, activationsCombMap, activationsContentMap));
            if (iteration % SAVE_IMAGE_CHECKPOINT == 0) {
                //save image can be found at target/classes/styletransfer/out
                saveImage(combination.dup(), iteration);
            }
        }

    }
 
Example #4
Source File: NeuralStyleTransfer.java    From dl4j-tutorials with MIT License 5 votes vote down vote up
private AdamUpdater createADAMUpdater() {
    AdamUpdater adamUpdater = new AdamUpdater(new Adam(LEARNING_RATE, BETA_MOMENTUM, BETA2_MOMENTUM, EPSILON));
    adamUpdater.setStateViewArray(Nd4j.zeros(1, 2 * CHANNELS * WIDTH * HEIGHT),
        new long[]{1, CHANNELS, HEIGHT, WIDTH}, 'c',
        true);
    return adamUpdater;
}
 
Example #5
Source File: Adam.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public GradientUpdater instantiate(INDArray viewArray, boolean initializeViewArray) {
    AdamUpdater u = new AdamUpdater(this);
    long[] gradientShape = viewArray.shape();
    gradientShape = Arrays.copyOf(gradientShape, gradientShape.length);
    gradientShape[1] /= 2;
    u.setStateViewArray(viewArray, gradientShape, viewArray.ordering(), initializeViewArray);
    return u;
}
 
Example #6
Source File: Adam.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public GradientUpdater instantiate(INDArray viewArray, boolean initializeViewArray) {
    AdamUpdater u = new AdamUpdater(this);
    long[] gradientShape = viewArray.shape();
    gradientShape = Arrays.copyOf(gradientShape, gradientShape.length);
    gradientShape[1] /= 2;
    u.setStateViewArray(viewArray, gradientShape, viewArray.ordering(), initializeViewArray);
    return u;
}
 
Example #7
Source File: Adam.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public GradientUpdater instantiate(Map<String, INDArray> updaterState, boolean initializeStateArrays) {
    AdamUpdater u = new AdamUpdater(this);
    u.setState(updaterState, initializeStateArrays);
    return u;
}