org.nd4j.linalg.schedule.StepSchedule Java Examples
The following examples show how to use
org.nd4j.linalg.schedule.StepSchedule.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DL4JSequenceRecommender.java From inception with Apache License 2.0 | 4 votes |
private MultiLayerNetwork createConfiguredNetwork(DL4JSequenceRecommenderTraits aTraits, int aEmbeddingsDim) { long start = System.currentTimeMillis(); // Set up network configuration MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(aTraits.getOptimizationAlgorithm()) .updater(new Nesterovs( new StepSchedule(ScheduleType.ITERATION, 1e-2, 0.1, 100000), 0.9)) .biasUpdater(new Nesterovs( new StepSchedule(ScheduleType.ITERATION, 2e-2, 0.1, 100000), 0.9)) .l2(aTraits.getL2()) .weightInit(aTraits.getWeightInit()) .gradientNormalization(aTraits.getGradientNormalization()) .gradientNormalizationThreshold(aTraits.getGradientNormalizationThreshold()) .list() .layer(0, new Bidirectional(Bidirectional.Mode.ADD, new LSTM.Builder() .nIn(aEmbeddingsDim) .nOut(200) .activation(aTraits.getActivationL0()) .build())) .layer(1, new RnnOutputLayer.Builder() .nIn(200) .nOut(aTraits.getMaxTagsetSize()) .activation(aTraits.getActivationL1()) .lossFunction(aTraits.getLossFunction()) .build()) .build(); // log.info("Network configuration: {}", conf.toYaml()); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); // net.setListeners(new ScoreIterationListener(1)); log.trace("Setting up the model took {}ms", System.currentTimeMillis() - start); return net; }
Example #2
Source File: AlexNetTrain.java From dl4j-tutorials with MIT License | 4 votes |
public static MultiLayerNetwork alexnetModel() { /** * AlexNet model interpretation based on the original paper ImageNet Classification with Deep Convolutional Neural Networks * and the imagenetExample code referenced. * http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf **/ double nonZeroBias = 1; double dropOut = 0.8; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(seed) .weightInit(WeightInit.DISTRIBUTION) .dist(new NormalDistribution(0.0, 0.01)) .activation(Activation.RELU) .updater(new Nesterovs(new StepSchedule(ScheduleType.ITERATION, 0.1, 0.1, 100000), 0.9)) .biasUpdater(new Nesterovs(new StepSchedule(ScheduleType.ITERATION, 0.2, 0.1, 100000), 0.9)) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer) // normalize to prevent vanishing or exploding gradients //.l2(5 * 1e-4) .list() .layer(0, convInit("cnn1", channels, 96, new int[]{11, 11}, new int[]{4, 4}, new int[]{3, 3}, 0)) .layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(2, maxPool("maxpool1", new int[]{3,3})) .layer(3, conv5x5("cnn2", 256, new int[] {1,1}, new int[] {2,2}, nonZeroBias)) .layer(4, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(5, maxPool("maxpool2", new int[]{3,3})) .layer(6,conv3x3("cnn3", 384, 0)) .layer(7,conv3x3("cnn4", 384, nonZeroBias)) .layer(8,conv3x3("cnn5", 256, nonZeroBias)) .layer(9, maxPool("maxpool3", new int[]{3,3})) .layer(10, fullyConnected("ffn1", 4096, nonZeroBias, dropOut, new GaussianDistribution(0, 0.005))) .layer(11, fullyConnected("ffn2", 4096, nonZeroBias, dropOut, new GaussianDistribution(0, 0.005))) .layer(12, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .name("output") .nOut(numLabels) .activation(Activation.SOFTMAX) .build()) .backprop(true) .pretrain(false) .setInputType(InputType.convolutional(height, width, channels)) .build(); return new MultiLayerNetwork(conf); }
Example #3
Source File: ValidateCuDNN.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void validateConvLayersSimpleBN() { //Test ONLY BN - no other CuDNN functionality (i.e., DL4J impls for everything else) Nd4j.getRandom().setSeed(12345); int minibatch = 8; int numClasses = 10; //imageHeight,imageWidth,channels int imageHeight = 48; int imageWidth = 48; int channels = 3; IActivation activation = new ActivationIdentity(); MultiLayerConfiguration multiLayerConfiguration = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .weightInit(WeightInit.XAVIER).seed(42) .activation(new ActivationELU()) .updater(Nesterovs.builder() .momentum(0.9) .learningRateSchedule(new StepSchedule( ScheduleType.EPOCH, 1e-2, 0.1, 20)).build()).list( new Convolution2D.Builder().nOut(96) .kernelSize(11, 11).biasInit(0.0) .stride(4, 4).build(), new ActivationLayer.Builder().activation(activation).build(), new BatchNormalization.Builder().build(), new Pooling2D.Builder() .poolingType(SubsamplingLayer.PoolingType.MAX) .kernelSize(3, 3).stride(2, 2) .build(), new DenseLayer.Builder() .nOut(128) .biasInit(0.0) .build(), new ActivationLayer.Builder().activation(activation).build(), new OutputLayer.Builder().activation(new ActivationSoftmax()) .lossFunction(new LossNegativeLogLikelihood()) .nOut(numClasses) .biasInit(0.0) .build()) .setInputType(InputType.convolutionalFlat(imageHeight, imageWidth, channels)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(multiLayerConfiguration); net.init(); int[] fShape = new int[]{minibatch, channels, imageHeight, imageWidth}; int[] lShape = new int[]{minibatch, numClasses}; List<Class<?>> classesToTest = new ArrayList<>(); classesToTest.add(org.deeplearning4j.nn.layers.normalization.BatchNormalization.class); validateLayers(net, classesToTest, false, fShape, lShape, CuDNNValidationUtil.MAX_REL_ERROR, CuDNNValidationUtil.MIN_ABS_ERROR); }
Example #4
Source File: ValidateCuDNN.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore //AB 2019/05/20 - https://github.com/deeplearning4j/deeplearning4j/issues/5088 - ignored to get to "all passing" state for CI, and revisit later public void validateConvLayersLRN() { //Test ONLY LRN - no other CuDNN functionality (i.e., DL4J impls for everything else) Nd4j.getRandom().setSeed(12345); int minibatch = 8; int numClasses = 10; //imageHeight,imageWidth,channels int imageHeight = 48; int imageWidth = 48; int channels = 3; IActivation activation = new ActivationIdentity(); MultiLayerConfiguration multiLayerConfiguration = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .weightInit(WeightInit.XAVIER).seed(42) .activation(new ActivationELU()) .updater(Nesterovs.builder() .momentum(0.9) .learningRateSchedule(new StepSchedule( ScheduleType.EPOCH, 1e-2, 0.1, 20)).build()).list( new Convolution2D.Builder().nOut(96) .kernelSize(11, 11).biasInit(0.0) .stride(4, 4).build(), new ActivationLayer.Builder().activation(activation).build(), new LocalResponseNormalization.Builder() .alpha(1e-3).beta(0.75).k(2) .n(5).build(), new Pooling2D.Builder() .poolingType(SubsamplingLayer.PoolingType.MAX) .kernelSize(3, 3).stride(2, 2) .build(), new Convolution2D.Builder().nOut(256) .kernelSize(5, 5).padding(2, 2) .biasInit(0.0) .stride(1, 1).build(), new ActivationLayer.Builder().activation(activation).build(), new OutputLayer.Builder().activation(new ActivationSoftmax()) .lossFunction(new LossNegativeLogLikelihood()) .nOut(numClasses) .biasInit(0.0) .build()) .setInputType(InputType.convolutionalFlat(imageHeight, imageWidth, channels)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(multiLayerConfiguration); net.init(); int[] fShape = new int[]{minibatch, channels, imageHeight, imageWidth}; int[] lShape = new int[]{minibatch, numClasses}; List<Class<?>> classesToTest = new ArrayList<>(); classesToTest.add(org.deeplearning4j.nn.layers.normalization.LocalResponseNormalization.class); validateLayers(net, classesToTest, false, fShape, lShape, 1e-2, 1e-2); }
Example #5
Source File: StepScheduleSpace.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ISchedule getValue(double[] parameterValues) { return new StepSchedule(scheduleType, initialValue.getValue(parameterValues), decayRate.getValue(parameterValues), step.getValue(parameterValues)); }