Java Code Examples for org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner#execute()
The following examples show how to use
org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner#execute() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestErrors.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(timeout = 20000L) public void testAllInvalidConfigCG() throws Exception { //Invalid config - basically check that this actually terminates File f = temp.newFolder(); ComputationGraphSpace mls = new ComputationGraphSpace.Builder() .addInputs("in") .layer("0", new DenseLayerSpace.Builder().nIn(4).nOut(new FixedValue<>(0)) //INVALID: nOut of 0 .activation(Activation.TANH) .build(), "in") .layer("1", new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "0") .setOutputs("1") .build(); CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(new TestDataProviderMnist(32, 3)) .modelSaver(new FileModelSaver(f)).scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions(new MaxCandidatesCondition(5)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration); runner.execute(); }
Example 2
Source File: TestGeneticSearch.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void GeneticSearchCandidateGenerator_getCandidate_ShouldGenerateCandidates() throws Exception { ScoreFunction scoreFunction = new BraninFunction.BraninScoreFunction(); //Define configuration: CandidateGenerator candidateGenerator = new GeneticSearchCandidateGenerator.Builder(new BraninFunction.BraninSpace(), scoreFunction) .build(); TestTerminationCondition testTerminationCondition = new TestTerminationCondition(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).scoreFunction(scoreFunction) .terminationConditions(new MaxCandidatesCondition(50), testTerminationCondition).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new BraninFunction.BraninTaskCreator()); runner.addListeners(new LoggingStatusListener()); runner.execute(); Assert.assertFalse(testTerminationCondition.hasAFailedCandidate); }
Example 3
Source File: TestRandomSearch.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void test() throws Exception { Map<String, Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, new HashMap<>()); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(new BraninFunction.BraninSpace(), commands); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).scoreFunction(new BraninFunction.BraninScoreFunction()) .terminationConditions(new MaxCandidatesCondition(50)).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new BraninFunction.BraninTaskCreator()); runner.addListeners(new LoggingStatusListener()); runner.execute(); // System.out.println("----- Complete -----"); }
Example 4
Source File: TestGeneticSearch.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void GeneticSearchCandidateGenerator_getCandidate_GeneticExceptionShouldMarkCandidateAsFailed() { ScoreFunction scoreFunction = new BraninFunction.BraninScoreFunction(); //Define configuration: CandidateGenerator candidateGenerator = new GeneticSearchCandidateGenerator.Builder(new BraninFunction.BraninSpace(), scoreFunction) .selectionOperator(new TestSelectionOperator()).build(); TestTerminationCondition testTerminationCondition = new TestTerminationCondition(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).scoreFunction(scoreFunction) .terminationConditions(testTerminationCondition).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new BraninFunction.BraninTaskCreator()); runner.addListeners(new LoggingStatusListener()); runner.execute(); Assert.assertTrue(testTerminationCondition.hasAFailedCandidate); }
Example 5
Source File: TestErrors.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(timeout = 20000L) public void testAllInvalidDataConfigMismatchCG() throws Exception { //Valid config - but mismatched with provided data File f = temp.newFolder(); ComputationGraphSpace mls = new ComputationGraphSpace.Builder() .addInputs("in") .layer("0", new DenseLayerSpace.Builder().nIn(4).nOut(10) .activation(Activation.TANH).build(), "in") .addLayer("1", new OutputLayerSpace.Builder().nIn(10).nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "0") .setOutputs("1") .build(); CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(new TestDataProviderMnist(32, 3)) .modelSaver(new FileModelSaver(f)).scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions( new MaxCandidatesCondition(5)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator()); runner.execute(); }
Example 6
Source File: TestErrors.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(timeout = 20000L) public void testAllInvalidDataConfigMismatch() throws Exception { //Valid config - but mismatched with provided data File f = temp.newFolder(); MultiLayerSpace mls = new MultiLayerSpace.Builder() .addLayer(new DenseLayerSpace.Builder().nIn(4).nOut(10) //INVALID: nOut of 0 .activation(Activation.TANH) .build()) .addLayer(new OutputLayerSpace.Builder().nIn(10).nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(new TestDataProviderMnist(32, 3)) .modelSaver(new FileModelSaver(f)).scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions( new MaxCandidatesCondition(5)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration); runner.execute(); }
Example 7
Source File: TestErrors.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(timeout = 20000L) public void testAllInvalidConfig() throws Exception { //Invalid config - basically check that this actually terminates File f = temp.newFolder(); MultiLayerSpace mls = new MultiLayerSpace.Builder() .addLayer(new DenseLayerSpace.Builder().nIn(4).nOut(new FixedValue<>(0)) //INVALID: nOut of 0 .activation(Activation.TANH) .build()) .addLayer(new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(new TestDataProviderMnist(32, 3)) .modelSaver(new FileModelSaver(f)).scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions( new MaxCandidatesCondition(5)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration); runner.execute(); }
Example 8
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testInputTypeBasic() throws Exception { ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(20, 60); MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder().l2(0.0001) .weightInit(WeightInit.XAVIER).updater(new Nesterovs()) .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5).nIn(1).stride(1, 1) .nOut(layerSizeHyperparam).activation(Activation.IDENTITY).build()) .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5) //Note that nIn need not be specified in later layers .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build()) .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .addLayer(new DenseLayerSpace.Builder().activation(Activation.RELU).nOut(500).build()) .addLayer(new OutputLayerSpace.Builder() .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10) .activation(Activation.SOFTMAX).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)).build(); DataProvider dataProvider = new TestDataSetProvider(); File f = testDir.newFolder(); if (f.exists()) f.delete(); f.mkdir(); ResultSaver modelSaver = new FileModelSaver(f.getAbsolutePath()); ScoreFunction scoreFunction = new TestSetAccuracyScoreFunction(); int maxCandidates = 4; TerminationCondition[] terminationConditions; terminationConditions = new TerminationCondition[] {new MaxCandidatesCondition(maxCandidates)}; //Given these configuration options, let's put them all together: OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(new RandomSearchGenerator(hyperparameterSpace, null)) .dataProvider(dataProvider).modelSaver(modelSaver).scoreFunction(scoreFunction) .terminationConditions(terminationConditions).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator()); runner.execute(); assertEquals(maxCandidates, runner.getResults().size()); }
Example 9
Source File: MNISTOptimizationTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(3)) .iterationTerminationConditions( new MaxTimeIterationTerminationCondition(5, TimeUnit.MINUTES), new MaxScoreIterationTerminationCondition(4.6) //Random score: -log_e(0.1) ~= 2.3 ).scoreCalculator(new DataSetLossCalculator(new MnistDataSetIterator(64, 2000, false, false, true, 123), true)).modelSaver(new InMemoryModelSaver()).build(); //Define: network config (hyperparameter space) MultiLayerSpace mls = new MultiLayerSpace.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2))) .l2(new ContinuousParameterSpace(0.0001, 0.05)) .addLayer( new ConvolutionLayerSpace.Builder().nIn(1) .nOut(new IntegerParameterSpace(5, 30)) .kernelSize(new DiscreteParameterSpace<>(new int[] {3, 3}, new int[] {4, 4}, new int[] {5, 5})) .stride(new DiscreteParameterSpace<>(new int[] {1, 1}, new int[] {2, 2})) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.SOFTPLUS, Activation.LEAKYRELU)) .build(), new IntegerParameterSpace(1, 2)) //1-2 identical layers .addLayer(new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), new IntegerParameterSpace(0, 1)) //0 to 1 layers .addLayer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .earlyStoppingConfiguration(esConf).build(); Map<String, Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName()); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands); DataProvider dataProvider = new MnistDataSetProvider(); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterMNISTSmall\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions(new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(100)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator()); // ArbiterUIServer server = ArbiterUIServer.getInstance(); // runner.addListeners(new UIOptimizationRunnerStatusListener(server)); runner.execute(); System.out.println("----- COMPLETE -----"); }
Example 10
Source File: TestBasic.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testBasicMnistCompGraph() throws Exception { ComputationGraphSpace cgs = new ComputationGraphSpace.Builder() .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2))) .l2(new ContinuousParameterSpace(0.0001, 0.05)) .addInputs("in") .addLayer("0", new ConvolutionLayerSpace.Builder().nIn(1) .nOut(new IntegerParameterSpace(5, 30)) .kernelSize(new DiscreteParameterSpace<>(new int[]{3, 3}, new int[]{4, 4}, new int[]{5, 5})) .stride(new DiscreteParameterSpace<>(new int[]{1, 1}, new int[]{2, 2})) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.SOFTPLUS, Activation.LEAKYRELU)) .build(), "in") .addLayer("1", new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(32, 128)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), "0") .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1") .setOutputs("out") .setInputTypes(InputType.convolutionalFlat(28, 28, 1)) .build(); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs); DataProvider dataProvider = new MnistDataSetProvider(); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnistCG\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelSavePath)) .scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions(new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(100)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator()); StatsStorage ss = new InMemoryStatsStorage(); StatusListener sl = new ArbiterStatusListener(ss); runner.addListeners(sl); UIServer.getInstance().attach(ss); runner.execute(); Thread.sleep(100000); }
Example 11
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 12
Source File: TestBasic.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testBasicMnistDataSource() throws InterruptedException { ParameterSpace<Double> learningRateHyperparam = new ContinuousParameterSpace(0.0001, 0.1); ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(16, 256); MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder() .weightInit(WeightInit.XAVIER) .l2(0.0001) .updater(new SgdSpace(learningRateHyperparam)) .addLayer(new DenseLayerSpace.Builder() .nIn(784) .activation(Activation.LEAKYRELU) .nOut(layerSizeHyperparam) .build()) .addLayer(new OutputLayerSpace.Builder() .nOut(10) .activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT) .build()) .build(); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperparameterSpace, null); ScoreFunction scoreFunction = new EvaluationScoreFunction(Evaluation.Metric.ACCURACY); TerminationCondition[] terminationConditions = { new MaxTimeCondition(5, TimeUnit.MINUTES), new MaxCandidatesCondition(2)}; String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnist\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); if (!f.exists()) throw new RuntimeException(); Class<? extends DataSource> ds = MnistDataSource.class; Properties dsp = new Properties(); dsp.setProperty("minibatch", "8"); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataSource(ds, dsp) .modelSaver(new FileModelSaver(modelSavePath)) .scoreFunction(scoreFunction) .terminationConditions(terminationConditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator()); StatsStorage ss = new InMemoryStatsStorage(); StatusListener sl = new ArbiterStatusListener(ss); runner.addListeners(sl); UIServer.getInstance().attach(ss); runner.execute(); Thread.sleep(90000); }
Example 13
Source File: TestScoreFunctions.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testROCScoreFunctions() throws Exception { for (boolean auc : new boolean[]{true, false}) { for (ROCScoreFunction.ROCType rocType : ROCScoreFunction.ROCType.values()) { String msg = (auc ? "AUC" : "AUPRC") + " - " + rocType; log.info("Starting: " + msg); ParameterSpace<Double> lr = new ContinuousParameterSpace(1e-5, 1e-3); int nOut = (rocType == ROCScoreFunction.ROCType.ROC ? 2 : 10); LossFunctions.LossFunction lf = (rocType == ROCScoreFunction.ROCType.BINARY ? LossFunctions.LossFunction.XENT : LossFunctions.LossFunction.MCXENT); Activation a = (rocType == ROCScoreFunction.ROCType.BINARY ? Activation.SIGMOID : Activation.SOFTMAX); MultiLayerSpace mls = new MultiLayerSpace.Builder() .trainingWorkspaceMode(WorkspaceMode.NONE) .inferenceWorkspaceMode(WorkspaceMode.NONE) .updater(new AdamSpace(lr)) .weightInit(WeightInit.XAVIER) .layer(new OutputLayerSpace.Builder().nIn(784).nOut(nOut) .activation(a) .lossFunction(lf).build()) .build(); CandidateGenerator cg = new RandomSearchGenerator(mls); ResultSaver rs = new InMemoryResultSaver(); ScoreFunction sf = new ROCScoreFunction(rocType, (auc ? ROCScoreFunction.Metric.AUC : ROCScoreFunction.Metric.AUPRC)); OptimizationConfiguration oc = new OptimizationConfiguration.Builder() .candidateGenerator(cg) .dataProvider(new DP(rocType)) .modelSaver(rs) .scoreFunction(sf) .terminationConditions(new MaxCandidatesCondition(3)) .rngSeed(12345) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(oc, new MultiLayerNetworkTaskCreator()); runner.execute(); List<ResultReference> list = runner.getResults(); for (ResultReference rr : list) { DataSetIterator testIter = new MnistDataSetIterator(4, 16, false, false, false, 12345); testIter.setPreProcessor(new PreProc(rocType)); OptimizationResult or = rr.getResult(); MultiLayerNetwork net = (MultiLayerNetwork) or.getResultReference().getResultModel(); double expScore; switch (rocType){ case ROC: if(auc){ expScore = net.doEvaluation(testIter, new ROC())[0].calculateAUC(); } else { expScore = net.doEvaluation(testIter, new ROC())[0].calculateAUCPR(); } break; case BINARY: if(auc){ expScore = net.doEvaluation(testIter, new ROCBinary())[0].calculateAverageAuc(); } else { expScore = net.doEvaluation(testIter, new ROCBinary())[0].calculateAverageAUCPR(); } break; case MULTICLASS: if(auc){ expScore = net.doEvaluation(testIter, new ROCMultiClass())[0].calculateAverageAUC(); } else { expScore = net.doEvaluation(testIter, new ROCMultiClass())[0].calculateAverageAUCPR(); } break; default: throw new RuntimeException(); } DataSetIterator iter = new MnistDataSetIterator(4, 16, false, false, false, 12345); iter.setPreProcessor(new PreProc(rocType)); assertEquals(msg, expScore, or.getScore(), 1e-4); } } } }
Example 14
Source File: TestDL4JLocalExecution.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @org.junit.Ignore public void testLocalExecutionGridSearch() throws Exception { //Define: network config (hyperparameter space) MultiLayerSpace mls = new MultiLayerSpace.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2))) .l2(new ContinuousParameterSpace(0.0001, 0.01)) .addLayer( new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), new IntegerParameterSpace(1, 2)) //1-2 identical layers (except nIn) .addLayer(new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .numEpochs(3).build(); Map<String, Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName()); CandidateGenerator candidateGenerator = new GridSearchCandidateGenerator(mls, 5, GridSearchCandidateGenerator.Mode.Sequential, commands); DataProvider dataProvider = new DataSetIteratorFactoryProvider(); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest/").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); f.deleteOnExit(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction()) .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES), new MaxCandidatesCondition(100)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator(new ClassificationEvaluator())); runner.execute(); System.out.println("----- COMPLETE -----"); }
Example 15
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 16
Source File: TestGraphLocalExecution.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testLocalExecution() throws Exception { Map<String, Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName()); //Define: network config (hyperparameter space) ComputationGraphSpace mls = new ComputationGraphSpace.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1))) .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in") .setInputTypes(InputType.feedForward(4)) .addLayer("layer0", new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), "in") .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "layer0") .setOutputs("out").numEpochs(3).build(); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands); DataProvider dataProvider = new DataSetIteratorFactoryProvider(); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); f.deleteOnExit(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(ScoreFunctions.testSetLoss(true)) .terminationConditions(new MaxTimeCondition(30, TimeUnit.SECONDS), new MaxCandidatesCondition(3)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator(new ClassificationEvaluator())); runner.execute(); assertEquals(0, runner.numCandidatesFailed()); assertTrue(runner.numCandidatesCompleted() > 0); }
Example 17
Source File: TestGraphLocalExecution.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testLocalExecutionMDS() throws Exception { //Define: network config (hyperparameter space) ComputationGraphSpace mls = new ComputationGraphSpace.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1))) .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in") .setInputTypes(InputType.feedForward(784)) .addLayer("layer0", new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), "in") .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "layer0") .setOutputs("out").numEpochs(3).build(); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, null); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); f.deleteOnExit(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(new TestMdsDataProvider(1, 32)) .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(ScoreFunctions.testSetLoss(true)) .terminationConditions(new MaxTimeCondition(30, TimeUnit.SECONDS), new MaxCandidatesCondition(3)) .scoreFunction(ScoreFunctions.testSetAccuracy()) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator()); runner.execute(); assertEquals(0, runner.numCandidatesFailed()); assertTrue(runner.numCandidatesCompleted() > 0); }
Example 18
Source File: TestGraphLocalExecution.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testLocalExecutionEarlyStopping() throws Exception { EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>() .epochTerminationConditions(new MaxEpochsTerminationCondition(2)) .scoreCalculator(new ScoreProvider()) .modelSaver(new InMemoryModelSaver()).build(); Map<String, Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName()); //Define: network config (hyperparameter space) ComputationGraphSpace cgs = new ComputationGraphSpace.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new AdamSpace(new ContinuousParameterSpace(0.0001, 0.1))) .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in") .setInputTypes(InputType.feedForward(784)) .addLayer("first", new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), "in") //1-2 identical layers (except nIn) .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "first") .setOutputs("out").earlyStoppingConfiguration(esConf).build(); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs, commands); DataProvider dataProvider = new DataSetIteratorFactoryProvider(); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest2CG\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); f.deleteOnExit(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(dataProvider) .scoreFunction(ScoreFunctions.testSetF1()) .modelSaver(new FileModelSaver(modelSavePath)) .terminationConditions(new MaxTimeCondition(15, TimeUnit.SECONDS), new MaxCandidatesCondition(3)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator()); runner.execute(); assertEquals(0, runner.numCandidatesFailed()); assertTrue(runner.numCandidatesCompleted() > 0); }
Example 19
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 20
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }