org.neuroph.util.ConnectionFactory Java Examples

The following examples show how to use org.neuroph.util.ConnectionFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PoolingLayer.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Creates connections with shared weights between two feature maps
 * Assumes that toMap is from Pooling layer.
 * <p/>
 * In this implementation, there is no overlapping between kernel positions.
 *
 * @param fromMap source feature map
 * @param toMap   destination feature map
 */
@Override
public void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap) {
    int kernelWidth = kernel.getWidth();
    int kernelHeight = kernel.getHeight();
    Weight weight = new Weight(1);
    for (int x = 0; x < fromMap.getWidth() - kernelWidth + 1; x += kernelWidth) { // < da li step treba da je kernel
        for (int y = 0; y < fromMap.getHeight() - kernelHeight + 1; y += kernelHeight) {

            Neuron toNeuron = toMap.getNeuronAt(x / kernelWidth, y / kernelHeight);
            for (int dy = 0; dy < kernelHeight; dy++) {
                for (int dx = 0; dx < kernelWidth; dx++) {
                    int fromX = x + dx;
                    int fromY = y + dy;
                    Neuron fromNeuron = fromMap.getNeuronAt(fromX, fromY);
                    ConnectionFactory.createConnection(fromNeuron, toNeuron, weight);
                }
            }
        }
    }
}
 
Example #2
Source File: Hopfield.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Creates Hopfield network architecture
 * 
 * @param neuronsCount
 *            neurons number in Hopfied network
 * @param neuronProperties
 *            neuron properties
 */
private void createNetwork(int neuronsCount, NeuronProperties neuronProperties) {

	// set network type
	this.setNetworkType(NeuralNetworkType.HOPFIELD);

	// createLayer neurons in layer
	Layer layer = LayerFactory.createLayer(neuronsCount, neuronProperties);

	// createLayer full connectivity in layer
	ConnectionFactory.fullConnect(layer, 0.1);

	// add layer to network
	this.addLayer(layer);

	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);

	// set Hopfield learning rule for this network
	//this.setLearningRule(new HopfieldLearning(this));	
	this.setLearningRule(new BinaryHebbianLearning());			
}
 
Example #3
Source File: Instar.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Creates Instar architecture with specified number of input neurons
 * 
 * @param inputNeuronsCount
 *            number of neurons in input layer
 */
private void createNetwork(int inputNeuronsCount ) {

	// set network type
	this.setNetworkType(NeuralNetworkType.INSTAR);

	// init neuron settings for this type of network
	NeuronProperties neuronProperties = new NeuronProperties();
	neuronProperties.setProperty("transferFunction", TransferFunctionType.STEP);
	
	// create input layer
	Layer inputLayer = LayerFactory.createLayer(inputNeuronsCount, neuronProperties);
	this.addLayer(inputLayer);

	// createLayer output layer
	neuronProperties.setProperty("transferFunction", TransferFunctionType.STEP);
	Layer outputLayer = LayerFactory.createLayer(1,	neuronProperties);
	this.addLayer(outputLayer);

	// create full conectivity between input and output layer
	ConnectionFactory.fullConnect(inputLayer, outputLayer);

	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);

	// set appropriate learning rule for this network
	this.setLearningRule(new InstarLearning());
}
 
Example #4
Source File: NeurophXOR.java    From tutorials with MIT License 5 votes vote down vote up
public static NeuralNetwork assembleNeuralNetwork() {

        Layer inputLayer = new Layer();
        inputLayer.addNeuron(new Neuron());
        inputLayer.addNeuron(new Neuron());

        Layer hiddenLayerOne = new Layer();
        hiddenLayerOne.addNeuron(new Neuron());
        hiddenLayerOne.addNeuron(new Neuron());
        hiddenLayerOne.addNeuron(new Neuron());
        hiddenLayerOne.addNeuron(new Neuron());

        Layer hiddenLayerTwo = new Layer();
        hiddenLayerTwo.addNeuron(new Neuron());
        hiddenLayerTwo.addNeuron(new Neuron());
        hiddenLayerTwo.addNeuron(new Neuron());
        hiddenLayerTwo.addNeuron(new Neuron());

        Layer outputLayer = new Layer();
        outputLayer.addNeuron(new Neuron());

        NeuralNetwork ann = new NeuralNetwork();

        ann.addLayer(0, inputLayer);
        ann.addLayer(1, hiddenLayerOne);
        ConnectionFactory.fullConnect(ann.getLayerAt(0), ann.getLayerAt(1));
        ann.addLayer(2, hiddenLayerTwo);
        ConnectionFactory.fullConnect(ann.getLayerAt(1), ann.getLayerAt(2));
        ann.addLayer(3, outputLayer);
        ConnectionFactory.fullConnect(ann.getLayerAt(2), ann.getLayerAt(3));
        ConnectionFactory.fullConnect(ann.getLayerAt(0), ann.getLayerAt(ann.getLayersCount() - 1), false);

        ann.setInputNeurons(inputLayer.getNeurons());
        ann.setOutputNeurons(outputLayer.getNeurons());

        ann.setNetworkType(NeuralNetworkType.MULTI_LAYER_PERCEPTRON);
        return ann;
    }
 
Example #5
Source File: Model.java    From o2oa with GNU Affero General Public License v3.0 5 votes vote down vote up
public NeuralNetwork<MomentumBackpropagation> createNeuralNetwork(Integer inValueCount, Integer outValueCount,
		Integer hiddenLayerCount) {
	NeuronProperties inputNeuronProperties = new NeuronProperties(InputNeuron.class, Linear.class);
	NeuronProperties hiddenNeuronProperties = new NeuronProperties(InputOutputNeuron.class, WeightedSum.class,
			Sigmoid.class);
	NeuronProperties outputNeuronProperties = new NeuronProperties(InputOutputNeuron.class, WeightedSum.class,
			Sigmoid.class);
	NeuralNetwork<MomentumBackpropagation> neuralNetwork = new NeuralNetwork<>();
	neuralNetwork.setNetworkType(NeuralNetworkType.MULTI_LAYER_PERCEPTRON);
	Layer inputLayer = LayerFactory.createLayer(inValueCount, inputNeuronProperties);
	inputLayer.addNeuron(new BiasNeuron());
	neuralNetwork.addLayer(inputLayer);
	List<Integer> hiddenNeurons = this.hiddenNeurons(inValueCount, outValueCount, hiddenLayerCount);
	for (Integer count : hiddenNeurons) {
		Layer layer = LayerFactory.createLayer(count, hiddenNeuronProperties);
		layer.addNeuron(new BiasNeuron());
		neuralNetwork.addLayer(layer);
	}
	Layer outputLayer = LayerFactory.createLayer(outValueCount, outputNeuronProperties);
	neuralNetwork.addLayer(outputLayer);
	for (int i = 0; i < (neuralNetwork.getLayersCount() - 1); i++) {
		Layer prevLayer = neuralNetwork.getLayers().get(i);
		Layer nextLayer = neuralNetwork.getLayers().get(i + 1);
		ConnectionFactory.fullConnect(prevLayer, nextLayer);
	}
	neuralNetwork.setLearningRule(this.createMomentumBackpropagation(
			MapTools.getDouble(this.getPropertyMap(), PROPERTY_MLP_MAXERROR, DEFAULT_MLP_MAXERROR),
			MapTools.getInteger(this.getPropertyMap(), PROPERTY_MLP_MAXITERATION, DEFAULT_MLP_MAXITERATION),
			MapTools.getDouble(this.getPropertyMap(), PROPERTY_MLP_LEARNINGRATE, DEFAULT_MLP_LEARNINGRATE),
			MapTools.getDouble(this.getPropertyMap(), PROPERTY_MLP_MOMENTUM, DEFAULT_MLP_MOMENTUM)));
	NeuralNetworkFactory.setDefaultIO(neuralNetwork);
	neuralNetwork.randomizeWeights();
	return neuralNetwork;
}
 
Example #6
Source File: MaxNet.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Creates MaxNet network architecture
 * 
 * @param neuronNum
 *            neuron number in network
 * @param neuronProperties
 *            neuron properties
 */
private void createNetwork(int neuronsCount) {

	// set network type
	this.setNetworkType(NeuralNetworkType.MAXNET);

	// createLayer input layer in layer
	Layer inputLayer = LayerFactory.createLayer(neuronsCount,
			new NeuronProperties());
	this.addLayer(inputLayer);

	// createLayer properties for neurons in output layer
	NeuronProperties neuronProperties = new NeuronProperties();
	neuronProperties.setProperty("neuronType", CompetitiveNeuron.class);
	neuronProperties.setProperty("transferFunction", TransferFunctionType.RAMP);

	// createLayer full connectivity in competitive layer
	CompetitiveLayer competitiveLayer = new CompetitiveLayer(neuronsCount, neuronProperties);

	// add competitive layer to network
	this.addLayer(competitiveLayer);

	double competitiveWeight = -(1 / (double) neuronsCount);
	// createLayer full connectivity within competitive layer
	ConnectionFactory.fullConnect(competitiveLayer, competitiveWeight, 1);

	// createLayer forward connectivity from input to competitive layer
	ConnectionFactory.forwardConnect(inputLayer, competitiveLayer, 1);

	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);
}
 
Example #7
Source File: CompetitiveNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Creates Competitive network architecture
 * 
 * @param inputNeuronsCount
 *            input neurons number
        * @param outputNeuronsCount
        *            output neurons number
 * @param neuronProperties
 *            neuron properties
 */
private void createNetwork(int inputNeuronsCount, int outputNeuronsCount) {
	// set network type
	this.setNetworkType(NeuralNetworkType.COMPETITIVE);

	// createLayer input layer
	Layer inputLayer = LayerFactory.createLayer(inputNeuronsCount, new NeuronProperties());
	this.addLayer(inputLayer);

	// createLayer properties for neurons in output layer
	NeuronProperties neuronProperties = new NeuronProperties();
	neuronProperties.setProperty("neuronType", CompetitiveNeuron.class);
	neuronProperties.setProperty("inputFunction",	WeightedSum.class);
	neuronProperties.setProperty("transferFunction",TransferFunctionType.RAMP);

	// createLayer full connectivity in competitive layer
	CompetitiveLayer competitiveLayer = new CompetitiveLayer(outputNeuronsCount, neuronProperties);

	// add competitive layer to network
	this.addLayer(competitiveLayer);

	double competitiveWeight = -(1 / (double) outputNeuronsCount);
	// createLayer full connectivity within competitive layer
	ConnectionFactory.fullConnect(competitiveLayer, competitiveWeight, 1);

	// createLayer full connectivity from input to competitive layer
	ConnectionFactory.fullConnect(inputLayer, competitiveLayer);

	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);

	this.setLearningRule(new CompetitiveLearning());
}
 
Example #8
Source File: RectifierNeuralNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public RectifierNeuralNetwork(List<Integer> neuronsInLayers) {
	//this.setNetworkType(NeuralNetworkType.RECTIFIER);

	NeuronProperties inputNeuronProperties = new NeuronProperties(InputNeuron.class, Linear.class);
       Layer layer = LayerFactory.createLayer(neuronsInLayers.get(0), inputNeuronProperties);

       this.addLayer(layer);

       // create layers
       Layer prevLayer = layer;

       for (int layerIdx = 1; layerIdx < neuronsInLayers.size()-1; layerIdx++) {
           Integer neuronsNum = neuronsInLayers.get(layerIdx);
           layer = LayerFactory.createLayer(neuronsNum, RectifiedLinear.class);

           this.addLayer(layer);
           ConnectionFactory.fullConnect(prevLayer, layer);

           prevLayer = layer;
       }

       int numberOfOutputNeurons = neuronsInLayers.get(neuronsInLayers.size() - 1);
       Layer outputLayer = LayerFactory.createLayer(numberOfOutputNeurons, Sigmoid.class);
       this.addLayer(outputLayer);
       ConnectionFactory.fullConnect(prevLayer, outputLayer);

       NeuralNetworkFactory.setDefaultIO(this); // set input and output cells for network
       this.setLearningRule(new MomentumBackpropagation());
       this.randomizeWeights(new HeZhangRenSunUniformWeightsRandomizer());
}
 
Example #9
Source File: Outstar.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Creates Outstar architecture with specified number of neurons in 
 * output layer
 * 
 * @param outputNeuronsCount
 *            number of neurons in output layer
 */
private void createNetwork(int outputNeuronsCount ) {

	// set network type
	this.setNetworkType(NeuralNetworkType.OUTSTAR);

	// init neuron settings for this type of network
	NeuronProperties neuronProperties = new NeuronProperties();
	neuronProperties.setProperty("transferFunction", TransferFunctionType.STEP);
	
	// create input layer
	Layer inputLayer = LayerFactory.createLayer(1, neuronProperties);
	this.addLayer(inputLayer);

	// createLayer output layer
	neuronProperties.setProperty("transferFunction", TransferFunctionType.RAMP);
	Layer outputLayer = LayerFactory.createLayer(outputNeuronsCount, neuronProperties);
	this.addLayer(outputLayer);

	// create full conectivity between input and output layer
	ConnectionFactory.fullConnect(inputLayer, outputLayer);

	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);

	// set outstar learning rule for this network
	this.setLearningRule(new OutstarLearning());
}
 
Example #10
Source File: SupervisedHebbianNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 *Creates an instance of Supervised Hebbian Network with specified number
 * of neurons in input layer, output layer and transfer function
 * 
 * @param inputNeuronsNum
 *            number of neurons in input layer
 * @param outputNeuronsNum
 *            number of neurons in output layer
 * @param transferFunctionType
 *            transfer function type
 */
private void createNetwork(int inputNeuronsNum, int outputNeuronsNum,
	TransferFunctionType transferFunctionType) {

	// init neuron properties
	NeuronProperties neuronProperties = new NeuronProperties();
	neuronProperties.setProperty("transferFunction", transferFunctionType);
	neuronProperties.setProperty("transferFunction.slope", new Double(1));
	neuronProperties.setProperty("transferFunction.yHigh", new Double(1));
	neuronProperties.setProperty("transferFunction.xHigh", new Double(1));		
	neuronProperties.setProperty("transferFunction.yLow", new Double(-1));
	neuronProperties.setProperty("transferFunction.xLow", new Double(-1));
	
	// set network type code
	this.setNetworkType(NeuralNetworkType.SUPERVISED_HEBBIAN_NET);

	// createLayer input layer
	Layer inputLayer = LayerFactory.createLayer(inputNeuronsNum,
		neuronProperties);
	this.addLayer(inputLayer);

	// createLayer output layer
	Layer outputLayer = LayerFactory.createLayer(outputNeuronsNum,
		neuronProperties);
	this.addLayer(outputLayer);

	// createLayer full conectivity between input and output layer
	ConnectionFactory.fullConnect(inputLayer, outputLayer);

	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);

	// set appropriate learning rule for this network
	this.setLearningRule(new SupervisedHebbianLearning());
}
 
Example #11
Source File: BAM.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Creates BAM network architecture
 * 
 * @param inputNeuronsCount
 *            number of neurons in input layer
 * @param outputNeuronsCount
 *            number of neurons in output layer
 * @param neuronProperties
 *            neuron properties
 */
private void createNetwork(int inputNeuronsCount, int outputNeuronsCount,  NeuronProperties neuronProperties) {

               // set network type
	this.setNetworkType(NeuralNetworkType.BAM);

	// create input layer
	Layer inputLayer = LayerFactory.createLayer(inputNeuronsCount, neuronProperties);
	// add input layer to network
	this.addLayer(inputLayer);

	// create output layer
	Layer outputLayer = LayerFactory.createLayer(outputNeuronsCount, neuronProperties);	
	// add output layer to network
	this.addLayer(outputLayer);
	
	// create full connectivity from in to out layer	
	ConnectionFactory.fullConnect(inputLayer, outputLayer);		
	// create full connectivity from out to in layer
	ConnectionFactory.fullConnect(outputLayer, inputLayer);
			
	// set input and output cells for this network
	NeuralNetworkFactory.setDefaultIO(this);

	// set Hebbian learning rule for this network
	this.setLearningRule(new BinaryHebbianLearning());			
}
 
Example #12
Source File: Perceptron.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
	 * Creates perceptron architecture with specified number of neurons in input
	 * and output layer, specified transfer function
	 * 
	 * @param inputNeuronsCount
	 *            number of neurons in input layer
	 * @param outputNeuronsCount
	 *            number of neurons in output layer
	 * @param transferFunctionType
	 *            neuron transfer function type
	 */
	private void createNetwork(int inputNeuronsCount, int outputNeuronsCount, TransferFunctionType transferFunctionType) {
		// set network type
		this.setNetworkType(NeuralNetworkType.PERCEPTRON);

                Layer inputLayer = new InputLayer(inputNeuronsCount);
		this.addLayer(inputLayer);

		NeuronProperties outputNeuronProperties = new NeuronProperties();
		outputNeuronProperties.setProperty("neuronType", ThresholdNeuron.class);
		outputNeuronProperties.setProperty("thresh", new Double(Math.abs(Math.random())));
		outputNeuronProperties.setProperty("transferFunction", transferFunctionType);
		// for sigmoid and tanh transfer functions set slope propery
		outputNeuronProperties.setProperty("transferFunction.slope", new Double(1));

		// createLayer output layer
		Layer outputLayer = LayerFactory.createLayer(outputNeuronsCount, outputNeuronProperties);
		this.addLayer(outputLayer);

		// create full conectivity between input and output layer
		ConnectionFactory.fullConnect(inputLayer, outputLayer);

		// set input and output cells for this network
		NeuralNetworkFactory.setDefaultIO(this);
                
                this.setLearningRule(new BinaryDeltaRule());
		// set appropriate learning rule for this network
//		if (transferFunctionType == TransferFunctionType.STEP) {
//			this.setLearningRule(new BinaryDeltaRule(this));
//		} else if (transferFunctionType == TransferFunctionType.SIGMOID) {
//			this.setLearningRule(new SigmoidDeltaRule(this));
//		} else if (transferFunctionType == TransferFunctionType.TANH) {
//			this.setLearningRule(new SigmoidDeltaRule(this));
//		} else {
//			this.setLearningRule(new PerceptronLearning(this));
//		}
	}
 
Example #13
Source File: UnsupervisedHebbianNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
	 * Creates an instance of Unsuervised Hebian net with specified number
	 * of neurons in input layer and output layer, and transfer function
	 * 
	 * @param inputNeuronsNum
	 *            number of neurons in input layer
	 * @param outputNeuronsNum
	 *            number of neurons in output layer
	 * @param transferFunctionType
	 *            transfer function type
	 */
	private void createNetwork(int inputNeuronsNum, int outputNeuronsNum,
		TransferFunctionType transferFunctionType) {

		// init neuron properties
		NeuronProperties neuronProperties = new NeuronProperties();
//		neuronProperties.setProperty("bias", new Double(-Math
//				.abs(Math.random() - 0.5))); // Hebbian network cann not work
		// without bias
		neuronProperties.setProperty("transferFunction", transferFunctionType);
		neuronProperties.setProperty("transferFunction.slope", new Double(1));

		// set network type code
		this.setNetworkType(NeuralNetworkType.UNSUPERVISED_HEBBIAN_NET);

		// createLayer input layer
		Layer inputLayer = LayerFactory.createLayer(inputNeuronsNum,
			neuronProperties);
		this.addLayer(inputLayer);

		// createLayer output layer
		Layer outputLayer = LayerFactory.createLayer(outputNeuronsNum,
			neuronProperties);
		this.addLayer(outputLayer);

		// createLayer full conectivity between input and output layer
		ConnectionFactory.fullConnect(inputLayer, outputLayer);

		// set input and output cells for this network
		NeuralNetworkFactory.setDefaultIO(this);

		// set appropriate learning rule for this network
		this.setLearningRule(new UnsupervisedHebbianLearning());
	//this.setLearningRule(new OjaLearning(this));
	}
 
Example #14
Source File: Adaline.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
	 * Creates adaline network architecture with specified number of input neurons
	 *
	 * @param inputNeuronsCount
         *              number of neurons in input layer
	 */
	private void createNetwork(int inputNeuronsCount) {
		// set network type code
		this.setNetworkType(NeuralNetworkType.ADALINE);

                // create input layer neuron settings for this network
		NeuronProperties inNeuronProperties = new NeuronProperties();
		inNeuronProperties.setProperty("transferFunction", TransferFunctionType.LINEAR);

		// createLayer input layer with specified number of neurons
		//Layer inputLayer = LayerFactory.createLayer(inputNeuronsCount, inNeuronProperties);
                Layer inputLayer = new InputLayer(inputNeuronsCount);
                inputLayer.addNeuron(new BiasNeuron()); // add bias neuron (always 1, and it will act as bias input for output neuron)
		this.addLayer(inputLayer);

               // create output layer neuron settings for this network
		NeuronProperties outNeuronProperties = new NeuronProperties();
		outNeuronProperties.setProperty("transferFunction", TransferFunctionType.LINEAR); // was RAMP
//		outNeuronProperties.setProperty("transferFunction.slope", new Double(1));
//		outNeuronProperties.setProperty("transferFunction.yHigh", new Double(1));
//		outNeuronProperties.setProperty("transferFunction.xHigh", new Double(1));
//		outNeuronProperties.setProperty("transferFunction.yLow", new Double(-1));
//		outNeuronProperties.setProperty("transferFunction.xLow", new Double(-1));

		// createLayer output layer (only one neuron)
		Layer outputLayer = LayerFactory.createLayer(1, outNeuronProperties);
		this.addLayer(outputLayer);

		// createLayer full conectivity between input and output layer
		ConnectionFactory.fullConnect(inputLayer, outputLayer);

		// set input and output cells for network
		NeuralNetworkFactory.setDefaultIO(this);

		// set LMS learning rule for this network
		this.setLearningRule(new LMS());
	}
 
Example #15
Source File: ConvolutionalNetwork.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public Builder withFullConnectedLayer(Layer layer) {
    Layer lastLayer = getLastLayer();
    network.addLayer(layer);
    ConnectionFactory.fullConnect(lastLayer, layer);
    return this;
}
 
Example #16
Source File: MultiLayerPerceptron.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     * Creates MultiLayerPerceptron Network architecture - fully connected
     * feed forward with specified number of neurons in each layer
     *
     * @param neuronsInLayers  collection of neuron numbers in getLayersIterator
     * @param neuronProperties neuron properties
     */
    private void createNetwork(List<Integer> neuronsInLayers, NeuronProperties neuronProperties) {

        // set network type
        this.setNetworkType(NeuralNetworkType.MULTI_LAYER_PERCEPTRON);

        // create input layer
        NeuronProperties inputNeuronProperties = new NeuronProperties(InputNeuron.class, Linear.class);
        Layer layer = LayerFactory.createLayer(neuronsInLayers.get(0), inputNeuronProperties);

        boolean useBias = true; // use bias neurons by default
        if (neuronProperties.hasProperty("useBias")) {
            useBias = (Boolean) neuronProperties.getProperty("useBias");
        }

        if (useBias) {
            layer.addNeuron(new BiasNeuron());
        }

        this.addLayer(layer);

        // create layers
        Layer prevLayer = layer;

        //for(Integer neuronsNum : neuronsInLayers)
        for (int layerIdx = 1; layerIdx < neuronsInLayers.size(); layerIdx++) {
            Integer neuronsNum = neuronsInLayers.get(layerIdx);
            // createLayer layer
            layer = LayerFactory.createLayer(neuronsNum, neuronProperties);

            if (useBias && (layerIdx < (neuronsInLayers.size() - 1))) {
                layer.addNeuron(new BiasNeuron());
            }

            // add created layer to network
            this.addLayer(layer);
            // createLayer full connectivity between previous and this layer
            if (prevLayer != null) {
                ConnectionFactory.fullConnect(prevLayer, layer);
            }

            prevLayer = layer;
        }

        // set input and output cells for network
        NeuralNetworkFactory.setDefaultIO(this);

        // set learnng rule
//        this.setLearningRule(new BackPropagation());
        this.setLearningRule(new MomentumBackpropagation());
        // this.setLearningRule(new DynamicBackPropagation());

        this.randomizeWeights(new RangeRandomizer(-0.7, 0.7));

    }
 
Example #17
Source File: MultiLayerPerceptron.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void connectInputsToOutputs() {
    // connect first and last layer
    ConnectionFactory.fullConnect(getLayerAt(0), getLayerAt(getLayersCount() - 1), false);
}
 
Example #18
Source File: ConvolutionalLayer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     * Creates connections with shared weights between two feature maps Assumes
     * that toMap is from Convolutional layer.
     * <p/>
     * Kernel is used as a sliding window, and kernel positions overlap. Kernel
     * is shifting right by one position at a time. Neurons at the same kernel
     * position share the same weights
     *
     * @param fromMap source feature map
     * @param toMap destination feature map
     */
    @Override
    public void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap) {

        Kernel kernel = toMap.getKernel();
        kernel.initWeights(-0.15, 0.15); // zasto ove vrednosti ???
      //  int numberOfSharedWeights = kernel.getArea();
//        Weight[][] weights = new Weight[kernel.getHeight()][kernel.getWidth()];
//        //double coefficient = getWeightCoeficient(toMap);
//        // initialize kernel with random weights
//        // ovo prebaciti u kernel
//        for (int i = 0; i < kernel.getHeight(); i++) {
//            for (int j = 0; j < kernel.getWidth(); j++) {
//                Weight weight = new Weight();
//                weight.randomize(-0.15, 0.15); // zasto ove vrednosti?
//                weights[i][j] = weight;
//            }
//        }
//        kernel.setWeights(weights); // na kraju svi kerneli od svih feature mapa imaju iste tezine jer gadjaju istu instancu kernela od nadklase!!!!
//                                    // kernel prebaciti u Layer2D preimenovati ga u FeatureMapLayer i dodati mu kernel...
//                                    // pored kernela dodati mu i BiasNeuron...
        BiasNeuron biasNeuron = new BiasNeuron();
        fromMap.addNeuron(biasNeuron);
                                    
                                    
        // ovo se koristi samo za povezivanje dva konvoluciona sloja !!! 
        // dodati step za from - ne mora da bude samo 1
        // ostaje pitanje kako se primenjuje na ivici - trebalo bi od centra - dodati onaj okvir sa strane!!!!
        for (int y = 0; y < toMap.getHeight(); y++) { // iterate all neurons by height in toMap  -- verovatno bi i ovde trebalo zameniti redosled x i y!!!
            for (int x = 0; x < toMap.getWidth(); x++) { // iterate all neurons by width in toMap
                Neuron toNeuron = toMap.getNeuronAt(x, y); // get neuron at specified position in toMap
                for (int ky = 0; ky < kernel.getHeight(); ky++) { // iterate kernel positions by y
                    for (int kx = 0; kx < kernel.getWidth(); kx++) { // iterate kernel positions by x
                        int fromX = x + kx; // calculate the x position of from neuron
                        int fromY = y + ky; // calculate the y position of from neuron
                        //int currentWeightIndex = kx + ky * kernel.getHeight(); // find the idx of the shared weight
                        Weight[][] concreteKernel = kernel.getWeights();
                        Neuron fromNeuron = fromMap.getNeuronAt(fromX, fromY);
                        ConnectionFactory.createConnection(fromNeuron, toNeuron, concreteKernel[kx][ky]);  // - da li je ovo dobro ???
                        // also create connection from bias
                        ConnectionFactory.createConnection(biasNeuron, toNeuron);
                    }
                }
            }
        }
    }
 
Example #19
Source File: JordanNetwork.java    From NeurophFramework with Apache License 2.0 3 votes vote down vote up
private void createNetwork(int inputNeuronsCount, int hiddenNeuronsCount, int contextNeuronsCount, int outputNeuronsCount) {

                // create input layer
                InputLayer inputLayer = new InputLayer(inputNeuronsCount);
                inputLayer.addNeuron(new BiasNeuron());
                addLayer(inputLayer);
                
		NeuronProperties neuronProperties = new NeuronProperties();
               // neuronProperties.setProperty("useBias", true);
		neuronProperties.setProperty("transferFunction", TransferFunctionType.SIGMOID);      // use linear or logitic function! (TR-8604.pdf)          
            
                Layer hiddenLayer = new Layer(hiddenNeuronsCount, neuronProperties); 
                hiddenLayer.addNeuron(new BiasNeuron());
                addLayer(hiddenLayer);
                
                ConnectionFactory.fullConnect(inputLayer, hiddenLayer);
                
                Layer contextLayer = new Layer(contextNeuronsCount, neuronProperties); 
                addLayer(contextLayer); // we might also need bias for context neurons?
                                                                               
                Layer outputLayer = new Layer(outputNeuronsCount, neuronProperties); 
                addLayer(outputLayer);
                
                ConnectionFactory.fullConnect(hiddenLayer, outputLayer);
                
                ConnectionFactory.fullConnect(outputLayer, contextLayer);
                ConnectionFactory.fullConnect(contextLayer, hiddenLayer);
                
                                
		// set input and output cells for network
                  NeuralNetworkFactory.setDefaultIO(this);

                  // set learnng rule
		this.setLearningRule(new BackPropagation());
				
	}
 
Example #20
Source File: ConvolutionalNetwork.java    From NeurophFramework with Apache License 2.0 3 votes vote down vote up
public Builder withFullConnectedLayer(int numberOfNeurons) {
    Layer lastLayer = getLastLayer();

    Layer fullConnectedLayer = new Layer(numberOfNeurons, DEFAULT_FULL_CONNECTED_NEURON_PROPERTIES);
    network.addLayer(fullConnectedLayer);

    ConnectionFactory.fullConnect(lastLayer, fullConnectedLayer);

    return this;
}
 
Example #21
Source File: ElmanNetwork.java    From NeurophFramework with Apache License 2.0 3 votes vote down vote up
private void createNetwork(int inputNeuronsCount, int hiddenNeuronsCount, int contextNeuronsCount, int outputNeuronsCount) {

                // create input layer
                InputLayer inputLayer = new InputLayer(inputNeuronsCount);
                inputLayer.addNeuron(new BiasNeuron());
                addLayer(inputLayer);
                
		NeuronProperties neuronProperties = new NeuronProperties();
               // neuronProperties.setProperty("useBias", true);
		neuronProperties.setProperty("transferFunction", TransferFunctionType.SIGMOID);                
            
                Layer hiddenLayer = new Layer(hiddenNeuronsCount, neuronProperties); 
                hiddenLayer.addNeuron(new BiasNeuron());
                addLayer(hiddenLayer);
                
                ConnectionFactory.fullConnect(inputLayer, hiddenLayer);
                
                Layer contextLayer = new Layer(contextNeuronsCount, neuronProperties); 
                addLayer(contextLayer); // we might also need bias for context neurons?
                                                                               
                Layer outputLayer = new Layer(outputNeuronsCount, neuronProperties); 
                addLayer(outputLayer);
                
                ConnectionFactory.fullConnect(hiddenLayer, outputLayer);
                
                ConnectionFactory.forwardConnect(hiddenLayer, contextLayer); // forward or full connectivity?
                ConnectionFactory.fullConnect(contextLayer, hiddenLayer);
                
                                
		// set input and output cells for network
                  NeuralNetworkFactory.setDefaultIO(this);

                  // set learnng rule
		this.setLearningRule(new BackPropagation());
				
	}