Java Code Examples for org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner#addListeners()
The following examples show how to use
org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner#addListeners() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestRandomSearch.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void test() throws Exception { Map<String, Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, new HashMap<>()); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(new BraninFunction.BraninSpace(), commands); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).scoreFunction(new BraninFunction.BraninScoreFunction()) .terminationConditions(new MaxCandidatesCondition(50)).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new BraninFunction.BraninTaskCreator()); runner.addListeners(new LoggingStatusListener()); runner.execute(); // System.out.println("----- Complete -----"); }
Example 2
Source File: TestGeneticSearch.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void GeneticSearchCandidateGenerator_getCandidate_ShouldGenerateCandidates() throws Exception { ScoreFunction scoreFunction = new BraninFunction.BraninScoreFunction(); //Define configuration: CandidateGenerator candidateGenerator = new GeneticSearchCandidateGenerator.Builder(new BraninFunction.BraninSpace(), scoreFunction) .build(); TestTerminationCondition testTerminationCondition = new TestTerminationCondition(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).scoreFunction(scoreFunction) .terminationConditions(new MaxCandidatesCondition(50), testTerminationCondition).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new BraninFunction.BraninTaskCreator()); runner.addListeners(new LoggingStatusListener()); runner.execute(); Assert.assertFalse(testTerminationCondition.hasAFailedCandidate); }
Example 3
Source File: TestGeneticSearch.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void GeneticSearchCandidateGenerator_getCandidate_GeneticExceptionShouldMarkCandidateAsFailed() { ScoreFunction scoreFunction = new BraninFunction.BraninScoreFunction(); //Define configuration: CandidateGenerator candidateGenerator = new GeneticSearchCandidateGenerator.Builder(new BraninFunction.BraninSpace(), scoreFunction) .selectionOperator(new TestSelectionOperator()).build(); TestTerminationCondition testTerminationCondition = new TestTerminationCondition(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).scoreFunction(scoreFunction) .terminationConditions(testTerminationCondition).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new BraninFunction.BraninTaskCreator()); runner.addListeners(new LoggingStatusListener()); runner.execute(); Assert.assertTrue(testTerminationCondition.hasAFailedCandidate); }
Example 4
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 5
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 6
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 7
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 8
Source File: TestBasic.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testBasicMnistDataSource() throws InterruptedException { ParameterSpace<Double> learningRateHyperparam = new ContinuousParameterSpace(0.0001, 0.1); ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(16, 256); MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder() .weightInit(WeightInit.XAVIER) .l2(0.0001) .updater(new SgdSpace(learningRateHyperparam)) .addLayer(new DenseLayerSpace.Builder() .nIn(784) .activation(Activation.LEAKYRELU) .nOut(layerSizeHyperparam) .build()) .addLayer(new OutputLayerSpace.Builder() .nOut(10) .activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT) .build()) .build(); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperparameterSpace, null); ScoreFunction scoreFunction = new EvaluationScoreFunction(Evaluation.Metric.ACCURACY); TerminationCondition[] terminationConditions = { new MaxTimeCondition(5, TimeUnit.MINUTES), new MaxCandidatesCondition(2)}; String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnist\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); if (!f.exists()) throw new RuntimeException(); Class<? extends DataSource> ds = MnistDataSource.class; Properties dsp = new Properties(); dsp.setProperty("minibatch", "8"); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataSource(ds, dsp) .modelSaver(new FileModelSaver(modelSavePath)) .scoreFunction(scoreFunction) .terminationConditions(terminationConditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator()); StatsStorage ss = new InMemoryStatsStorage(); StatusListener sl = new ArbiterStatusListener(ss); runner.addListeners(sl); UIServer.getInstance().attach(ss); runner.execute(); Thread.sleep(90000); }
Example 9
Source File: TestBasic.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testBasicMnistCompGraph() throws Exception { ComputationGraphSpace cgs = new ComputationGraphSpace.Builder() .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2))) .l2(new ContinuousParameterSpace(0.0001, 0.05)) .addInputs("in") .addLayer("0", new ConvolutionLayerSpace.Builder().nIn(1) .nOut(new IntegerParameterSpace(5, 30)) .kernelSize(new DiscreteParameterSpace<>(new int[]{3, 3}, new int[]{4, 4}, new int[]{5, 5})) .stride(new DiscreteParameterSpace<>(new int[]{1, 1}, new int[]{2, 2})) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.SOFTPLUS, Activation.LEAKYRELU)) .build(), "in") .addLayer("1", new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(32, 128)) .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH)) .build(), "0") .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1") .setOutputs("out") .setInputTypes(InputType.convolutionalFlat(28, 28, 1)) .build(); //Define configuration: CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs); DataProvider dataProvider = new MnistDataSetProvider(); String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnistCG\\").getAbsolutePath(); File f = new File(modelSavePath); if (f.exists()) f.delete(); f.mkdir(); if (!f.exists()) throw new RuntimeException(); OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator).dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelSavePath)) .scoreFunction(new TestSetLossScoreFunction(true)) .terminationConditions(new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(100)) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator()); StatsStorage ss = new InMemoryStatsStorage(); StatusListener sl = new ArbiterStatusListener(ss); runner.addListeners(sl); UIServer.getInstance().attach(ss); runner.execute(); Thread.sleep(100000); }