org.deeplearning4j.arbiter.optimize.api.saving.ResultSaver Java Examples
The following examples show how to use
org.deeplearning4j.arbiter.optimize.api.saving.ResultSaver.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example #2
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example #3
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example #4
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example #5
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testInputTypeBasic() throws Exception { ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(20, 60); MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder().l2(0.0001) .weightInit(WeightInit.XAVIER).updater(new Nesterovs()) .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5).nIn(1).stride(1, 1) .nOut(layerSizeHyperparam).activation(Activation.IDENTITY).build()) .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5) //Note that nIn need not be specified in later layers .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build()) .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .addLayer(new DenseLayerSpace.Builder().activation(Activation.RELU).nOut(500).build()) .addLayer(new OutputLayerSpace.Builder() .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10) .activation(Activation.SOFTMAX).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)).build(); DataProvider dataProvider = new TestDataSetProvider(); File f = testDir.newFolder(); if (f.exists()) f.delete(); f.mkdir(); ResultSaver modelSaver = new FileModelSaver(f.getAbsolutePath()); ScoreFunction scoreFunction = new TestSetAccuracyScoreFunction(); int maxCandidates = 4; TerminationCondition[] terminationConditions; terminationConditions = new TerminationCondition[] {new MaxCandidatesCondition(maxCandidates)}; //Given these configuration options, let's put them all together: OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(new RandomSearchGenerator(hyperparameterSpace, null)) .dataProvider(dataProvider).modelSaver(modelSaver).scoreFunction(scoreFunction) .terminationConditions(terminationConditions).build(); IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator()); runner.execute(); assertEquals(maxCandidates, runner.getResults().size()); }
Example #6
Source File: TestScoreFunctions.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testROCScoreFunctions() throws Exception { for (boolean auc : new boolean[]{true, false}) { for (ROCScoreFunction.ROCType rocType : ROCScoreFunction.ROCType.values()) { String msg = (auc ? "AUC" : "AUPRC") + " - " + rocType; log.info("Starting: " + msg); ParameterSpace<Double> lr = new ContinuousParameterSpace(1e-5, 1e-3); int nOut = (rocType == ROCScoreFunction.ROCType.ROC ? 2 : 10); LossFunctions.LossFunction lf = (rocType == ROCScoreFunction.ROCType.BINARY ? LossFunctions.LossFunction.XENT : LossFunctions.LossFunction.MCXENT); Activation a = (rocType == ROCScoreFunction.ROCType.BINARY ? Activation.SIGMOID : Activation.SOFTMAX); MultiLayerSpace mls = new MultiLayerSpace.Builder() .trainingWorkspaceMode(WorkspaceMode.NONE) .inferenceWorkspaceMode(WorkspaceMode.NONE) .updater(new AdamSpace(lr)) .weightInit(WeightInit.XAVIER) .layer(new OutputLayerSpace.Builder().nIn(784).nOut(nOut) .activation(a) .lossFunction(lf).build()) .build(); CandidateGenerator cg = new RandomSearchGenerator(mls); ResultSaver rs = new InMemoryResultSaver(); ScoreFunction sf = new ROCScoreFunction(rocType, (auc ? ROCScoreFunction.Metric.AUC : ROCScoreFunction.Metric.AUPRC)); OptimizationConfiguration oc = new OptimizationConfiguration.Builder() .candidateGenerator(cg) .dataProvider(new DP(rocType)) .modelSaver(rs) .scoreFunction(sf) .terminationConditions(new MaxCandidatesCondition(3)) .rngSeed(12345) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(oc, new MultiLayerNetworkTaskCreator()); runner.execute(); List<ResultReference> list = runner.getResults(); for (ResultReference rr : list) { DataSetIterator testIter = new MnistDataSetIterator(4, 16, false, false, false, 12345); testIter.setPreProcessor(new PreProc(rocType)); OptimizationResult or = rr.getResult(); MultiLayerNetwork net = (MultiLayerNetwork) or.getResultReference().getResultModel(); double expScore; switch (rocType){ case ROC: if(auc){ expScore = net.doEvaluation(testIter, new ROC())[0].calculateAUC(); } else { expScore = net.doEvaluation(testIter, new ROC())[0].calculateAUCPR(); } break; case BINARY: if(auc){ expScore = net.doEvaluation(testIter, new ROCBinary())[0].calculateAverageAuc(); } else { expScore = net.doEvaluation(testIter, new ROCBinary())[0].calculateAverageAUCPR(); } break; case MULTICLASS: if(auc){ expScore = net.doEvaluation(testIter, new ROCMultiClass())[0].calculateAverageAUC(); } else { expScore = net.doEvaluation(testIter, new ROCMultiClass())[0].calculateAverageAUCPR(); } break; default: throw new RuntimeException(); } DataSetIterator iter = new MnistDataSetIterator(4, 16, false, false, false, 12345); iter.setPreProcessor(new PreProc(rocType)); assertEquals(msg, expScore, or.getScore(), 1e-4); } } } }
Example #7
Source File: OptimizationConfiguration.java From deeplearning4j with Apache License 2.0 | 4 votes |
public Builder modelSaver(ResultSaver resultSaver) { this.resultSaver = resultSaver; return this; }