Java Code Examples for org.deeplearning4j.ui.api.UIServer#attach()
The following examples show how to use
org.deeplearning4j.ui.api.UIServer#attach() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestVertxUI.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testUICompGraph() { StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in") .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(), "in") .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0") .setOutputs("L1").build(); ComputationGraph net = new ComputationGraph(conf); net.init(); net.setListeners(new StatsListener(ss), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 100; i++) { net.fit(iter); } }
Example 2
Source File: Main.java From twse-captcha-solver-dl4j with MIT License | 5 votes |
public static void main(String[] args) throws Exception { long startTime = System.currentTimeMillis(); logger.info("start up time: " + startTime); File modelDir = new File(modelDirPath); // create dir boolean hasDir = modelDir.exists() || modelDir.mkdirs(); logger.info(modelPath); // create model ComputationGraph model = createModel(); // monitor the model score UIServer uiServer = UIServer.getInstance(); StatsStorage statsStorage = new InMemoryStatsStorage(); uiServer.attach(statsStorage); model.setListeners(new ScoreIterationListener(36), new StatsListener(statsStorage)); // construct the iterator MultiDataSetIterator trainMulIterator = new CaptchaSetIterator(batchSize, "train"); MultiDataSetIterator testMulIterator = new CaptchaSetIterator(batchSize, "test"); MultiDataSetIterator validateMulIterator = new CaptchaSetIterator(batchSize, "validate"); // fit for (int i = 0; i < epochs; i++) { System.out.println("Epoch=====================" + i); model.fit(trainMulIterator); } ModelSerializer.writeModel(model, modelPath, true); long endTime = System.currentTimeMillis(); System.out.println("=============run time=====================" + (endTime - startTime)); System.out.println("=====eval model=====test=================="); modelPredict(model, testMulIterator); System.out.println("=====eval model=====validate=================="); modelPredict(model, validateMulIterator); }
Example 3
Source File: TestVertxUI.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testUI_VAE() throws Exception { //Variational autoencoder - for unsupervised layerwise pretraining StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd(1e-5)) .list().layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(10, 11) .decoderLayerSizes(12, 13).weightInit(WeightInit.XAVIER) .pzxActivationFunction(Activation.IDENTITY) .reconstructionDistribution( new GaussianReconstructionDistribution()) .activation(Activation.LEAKYRELU).build()) .layer(1, new VariationalAutoencoder.Builder().nIn(3).nOut(3).encoderLayerSizes(7) .decoderLayerSizes(8).weightInit(WeightInit.XAVIER) .pzxActivationFunction(Activation.IDENTITY) .reconstructionDistribution(new GaussianReconstructionDistribution()) .activation(Activation.LEAKYRELU).build()) .layer(2, new OutputLayer.Builder().nIn(3).nOut(3).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setListeners(new StatsListener(ss), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 50; i++) { net.fit(iter); Thread.sleep(100); } }
Example 4
Source File: TestVertxUI.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testUIMultipleSessions() throws Exception { for (int session = 0; session < 3; session++) { StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build()) .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(4).nOut(3).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setListeners(new StatsListener(ss, 1), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 20; i++) { net.fit(iter); Thread.sleep(100); } } }
Example 5
Source File: TestVertxUI.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testUIAttachDetach() throws Exception { StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); assertFalse(uiServer.getStatsStorageInstances().isEmpty()); uiServer.detach(ss); assertTrue(uiServer.getStatsStorageInstances().isEmpty()); }
Example 6
Source File: TestParallelEarlyStoppingUI.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test @Ignore //To be run manually public void testParallelStatsListenerCompatibility() throws Exception { UIServer uiServer = UIServer.getInstance(); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd()).weightInit(WeightInit.XAVIER).list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new OutputLayer.Builder().nIn(3).nOut(3) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); // it's important that the UI can report results from parallel training // there's potential for StatsListener to fail if certain properties aren't set in the model StatsStorage statsStorage = new InMemoryStatsStorage(); net.setListeners(new StatsListener(statsStorage)); uiServer.attach(statsStorage); DataSetIterator irisIter = new IrisDataSetIterator(50, 500); EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) .scoreCalculator(new DataSetLossCalculator(irisIter, true)) .evaluateEveryNEpochs(2).modelSaver(saver).build(); IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 3, 6, 2); EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit(); System.out.println(result); assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason()); }
Example 7
Source File: CustomerRetentionPredictionExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) throws IOException, InterruptedException { final int labelIndex=11; final int batchSize=8; final int numClasses=2; final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75}); final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile()); final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize) .classification(labelIndex,numClasses) .build(); final DataNormalization dataNormalization = new NormalizerStandardize(); dataNormalization.fit(dataSetIterator); dataSetIterator.setPreProcessor(dataNormalization); final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8); log.info("Building Model------------------->>>>>>>>>"); final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.RELU_UNIFORM) .updater(new Adam(0.015D)) .list() .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build()) .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build()) .build(); final UIServer uiServer = UIServer.getInstance(); final StatsStorage statsStorage = new InMemoryStatsStorage(); final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration); multiLayerNetwork.init(); multiLayerNetwork.setListeners(new ScoreIterationListener(100), new StatsListener(statsStorage)); uiServer.attach(statsStorage); multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100); final Evaluation evaluation = multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1")); System.out.println(evaluation.stats()); final File file = new File("model.zip"); ModelSerializer.writeModel(multiLayerNetwork,file,true); ModelSerializer.addNormalizerToModel(file,dataNormalization); }
Example 8
Source File: CustomerRetentionPredictionExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) throws IOException, InterruptedException { final int labelIndex=11; final int batchSize=8; final int numClasses=2; final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75}); final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile()); final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize) .classification(labelIndex,numClasses) .build(); final DataNormalization dataNormalization = new NormalizerStandardize(); dataNormalization.fit(dataSetIterator); dataSetIterator.setPreProcessor(dataNormalization); final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8); log.info("Building Model------------------->>>>>>>>>"); final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.RELU_UNIFORM) .updater(new Adam(0.015D)) .list() .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build()) .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build()) .build(); final UIServer uiServer = UIServer.getInstance(); final StatsStorage statsStorage = new InMemoryStatsStorage(); final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration); multiLayerNetwork.init(); multiLayerNetwork.setListeners(new ScoreIterationListener(100), new StatsListener(statsStorage)); uiServer.attach(statsStorage); multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100); final Evaluation evaluation = multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1")); System.out.println(evaluation.stats()); final File file = new File("model.zip"); ModelSerializer.writeModel(multiLayerNetwork,file,true); ModelSerializer.addNormalizerToModel(file,dataNormalization); }
Example 9
Source File: MLPMnistUIExample.java From dl4j-tutorials with MIT License | 4 votes |
public static void main(String[] args) throws IOException { //number of rows and columns in the input pictures final int numRows = 28; final int numColumns = 28; int outputNum = 10; // number of output classes int batchSize = 128; // batch size for each epoch int rngSeed = 123; // random number seed for reproducibility int numEpochs = 15; // number of epochs to perform int listenerFrequency = 1; //Get the DataSetIterators: DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, rngSeed); DataSetIterator mnistTest = new MnistDataSetIterator(batchSize, false, rngSeed); log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(rngSeed) //include a random seed for reproducibility // use stochastic gradient descent as an optimization algorithm .updater(new Nesterovs(0.006, 0.9)) .l2(1e-4) .list() .layer(0, new DenseLayer.Builder() //create the first, input layer with xavier initialization // batchSize, features .nIn(numRows * numColumns) .nOut(1000) .activation(Activation.RELU) .weightInit(WeightInit.XAVIER) .build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) //create hidden layer .nIn(1000) .nOut(outputNum) .activation(Activation.SOFTMAX) .weightInit(WeightInit.XAVIER) .build()) .pretrain(false).backprop(true) //use backpropagation to adjust weights .build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); //Initialize the user interface backend // 获取一个UI实例 UIServer uiServer = UIServer.getInstance(); //Configure where the network information (gradients, activations, score vs. time etc) is to be stored //Then add the StatsListener to collect this information from the network, as it trains // 训练的存储位置 StatsStorage statsStorage = new InMemoryStatsStorage(); //Alternative: new FileStatsStorage(File) - see UIStorageExample //Attach the StatsStorage instance to the UI: this allows the contents of the StatsStorage to be visualized uiServer.attach(statsStorage); model.init(); //print the score with every 1 iteration model.setListeners(new StatsListener(statsStorage, listenerFrequency) ,new ScoreIterationListener(1) ); log.info("Train model...."); for( int i=0; i<numEpochs; i++ ){ model.fit(mnistTrain); } log.info("Evaluate model...."); Evaluation eval = new Evaluation(outputNum); //create an evaluation object with 10 possible classes while(mnistTest.hasNext()){ DataSet next = mnistTest.next(); INDArray output = model.output(next.getFeatures(), false); //get the networks prediction eval.eval(next.getLabels(), output); //check the prediction against the true class } log.info(eval.stats()); log.info("****************Example finished********************"); }
Example 10
Source File: TestVertxUIMultiSession.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testUIAutoAttach() throws Exception { HashMap<String, StatsStorage> statsStorageForSession = new HashMap<>(); Function<String, StatsStorage> statsStorageProvider = statsStorageForSession::get; UIServer uIServer = UIServer.getInstance(true, statsStorageProvider); for (int session = 0; session < 3; session++) { int layerSize = session + 4; InMemoryStatsStorage ss = new InMemoryStatsStorage(); String sessionId = Integer.toString(session); statsStorageForSession.put(sessionId, ss); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(layerSize).build()) .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(3).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); StatsListener statsListener = new StatsListener(ss, 1); statsListener.setSessionID(sessionId); net.setListeners(statsListener, new ScoreIterationListener(1)); uIServer.attach(ss); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 20; i++) { net.fit(iter); } assertTrue(uIServer.isAttached(statsStorageForSession.get(sessionId))); uIServer.detach(ss); assertFalse(uIServer.isAttached(statsStorageForSession.get(sessionId))); /* * Visiting /train/:sessionId to auto-attach StatsStorage */ String sessionUrl = trainingSessionUrl(uIServer.getAddress(), sessionId); HttpURLConnection conn = (HttpURLConnection) new URL(sessionUrl).openConnection(); conn.connect(); assertEquals(HttpResponseStatus.OK.code(), conn.getResponseCode()); assertTrue(uIServer.isAttached(statsStorageForSession.get(sessionId))); } }