Java Code Examples for org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner#bestScoreCandidateIndex()
The following examples show how to use
org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner#bestScoreCandidateIndex() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 2
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 3
Source File: HyperParameterTuningArbiterUiExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); runner.addListeners(new ArbiterStatusListener(ss)); UIServer.getInstance().attach(ss); //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }
Example 4
Source File: HyperParameterTuning.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) { ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01); ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11); MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder() .updater(new AdamSpace(learningRateParam)) // .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(11) .nOut(layerSizeParam) .build()) .addLayer(new DenseLayerSpace.Builder() .activation(Activation.RELU) .nIn(layerSizeParam) .nOut(layerSizeParam) .build()) .addLayer(new OutputLayerSpace.Builder() .activation(Activation.SIGMOID) .lossFunction(LossFunctions.LossFunction.XENT) .nOut(1) .build()) .build(); Map<String,Object> dataParams = new HashMap<>(); dataParams.put("batchSize",new Integer(10)); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName()); CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams); Properties dataSourceProperties = new Properties(); dataSourceProperties.setProperty("minibatchSize", "64"); ResultSaver modelSaver = new FileModelSaver("resources/"); ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY); TerminationCondition[] conditions = { new MaxTimeCondition(120, TimeUnit.MINUTES), new MaxCandidatesCondition(30) }; OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataSource(ExampleDataSource.class,dataSourceProperties) .modelSaver(modelSaver) .scoreFunction(scoreFunction) .terminationConditions(conditions) .build(); IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator()); //Uncomment this if you want to store the model. //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j")); //runner.addListeners(new ArbiterStatusListener(ss)); //UIServer.getInstance().attach(ss); runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss) runner.execute(); //Print the best hyper params double bestScore = runner.bestScore(); int bestCandidateIndex = runner.bestScoreCandidateIndex(); int numberOfConfigsEvaluated = runner.numCandidatesCompleted(); String s = "Best score: " + bestScore + "\n" + "Index of model with best score: " + bestCandidateIndex + "\n" + "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n"; System.out.println(s); }