package uk.ac.leeds.ccg.projects.MedAction.NeuralNetwork; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import uk.ac.leeds.ccg.andyt.grids.AbstractGrid2DSquareCellDouble; /** * A class to hold references to all the neurons arranged in layers and organise * their training and processing of data. This is a multi-layer perceptron type * arrangement. */ public class NeuralNetwork extends Object implements java.io.Serializable{ /** * For storing references to all the neurons. */ private Neuron[][] neurons; /** * Contructs NeuralNetwork from neurons * @param neurons the Neuron[][] of the network arranged in layers. */ public NeuralNetwork( Neuron[][] neurons ) { setNeurons( neurons ); } /** * Contructs a new NeuralNetwork from fis * @param fis the FileInputStream for initialising this. */ public NeuralNetwork( FileInputStream fis ) throws IOException,ClassNotFoundException { ObjectInputStream objectStream = new ObjectInputStream( new BufferedInputStream( fis ) ); Object object = objectStream.readObject(); objectStream.close(); NeuralNetwork dummy = ( NeuralNetwork ) object; setNeurons( dummy.getNeurons() ); setWeights( dummy.getWeights() ); } /* public static NeuroNetwork load( File nnParameterObjectFile ) throws IOException,ClassNotFoundException{ ObjectInputStream objectStream = new ObjectInputStream( new BufferedInputStream( new FileInputStream( nnParameterObjectFile ) ) ); Object nn = objectStream.readObject(); objectStream.close(); return new NeuroNetwork( ( Object[] ) nn ); } */ /** * Processes the input x through the neurons. * @param x double[] of inputs to process. */ public void classify( double[] inputs ) { Neuron[][] neurons = getNeurons(); int numberOfLayers = neurons.length; // Handle input layer Neuron[] layer = neurons[ 0 ]; int numberOfNeurons = layer.length; for ( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { layer[ neuronID ].classify( inputs ); } // Handle non-input layers // Set input for next layer double[] input = new double[ numberOfNeurons ]; for ( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { input[ neuronID ] = layer[ neuronID ].getFire(); } for ( int layerID = 1; layerID < numberOfLayers; layerID ++ ) { layer = neurons[ layerID ]; numberOfNeurons = layer.length; for ( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { layer[ neuronID ].classify( input ); } // Set input for next layer input = new double[ numberOfNeurons ]; for ( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { input[ neuronID ] = layer[ neuronID ].getFire(); } } } /** * trains the */ private void adjustWeightsInBackPropogationTraining( double[] y, double[] fire ) { Neuron[][] neurons = getNeurons(); int numberOfLayers = neurons.length; int numberOfNeurons = y.length; Neuron[] layer = neurons[ numberOfLayers - 1 ]; double[] lastLayerErrors = new double[ numberOfNeurons ]; //double[][] lastLayerWeights = new double[ numberOfNeurons ][]; double[] lastLayerWeights; for( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { lastLayerErrors[ neuronID ] = y[ neuronID ] - fire[ neuronID ]; //lastLayerWeights[ neuronID ] = layer[ neuronID ].getWeights(); } for ( int layerID = numberOfLayers - 2; layerID > -1; layerID -- ) { layer = neurons[ layerID ]; numberOfNeurons = layer.length; for( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { lastLayerWeights = neurons[ layerID + 1 ][ neuronID ].getWeights(); layer[ neuronID ].adjustWeightsInBackPropogationTraining( lastLayerErrors, lastLayerWeights ); } for( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { lastLayerErrors[ neuronID ] = layer[ neuronID ].getErrorSignal(); } } } /** * @TODO docs public double[] training( double[] x, double[] y ) { process( x ); Neuron[][] neurons = getNeurons(); int numberOfLayers = neurons.length; Neuron[] layer = neurons[ numberOfLayers - 1 ]; int numberOfNeurons = layer.length; double[] fire = new double[ numberOfNeurons ]; for ( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { fire[ neuronID ] = layer[ neuronID ].getFire(); } adjustWeights( y, fire ); return fire; } */ /** * Sets the weights in the Neurons to be those passed in. These are ordered * by layer, by location in layer and by location of connecting neuron in * the previous layer. * @param weights a double[][][] of weights passed in. */ public void setWeights( double[][][] weights ) { Neuron[][] neurons = getNeurons(); int numberOfLayers = neurons.length; int numberOfNeurons; Neuron[] layer; for ( int layerID = 0; layerID < numberOfLayers; layerID ++ ) { layer = neurons[ layerID ]; numberOfNeurons = layer.length; for( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { layer[ neuronID ].setWeights( weights[ layerID ][ neuronID ] ); } } } /** * Returns the weights in the Neurons as a double[][][]. These are ordered * by layer, by location in layer and by location of connecting neuron in * the previous layer. */ public double[][][] getWeights() { double[][][] weights = null; Neuron[][] neurons = getNeurons(); int numberOfLayers = neurons.length; int numberOfNeurons; Neuron[] layer; for ( int layerID = 0; layerID < numberOfLayers; layerID ++ ) { layer = neurons[ layerID ]; numberOfNeurons = layer.length; for( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { weights[ layerID ][ neuronID ] = layer[ neuronID ].getWeights(); } } return weights; } /** * Returns a double[][] of outputs from processing. These are ordered by the * inputs processed (i.e. the cellIDs) and then the output variables. * @param x the double[][] of inputs ordered by the cellID and then each * input variable. */ public double[][] runNN( double[][] x ) { Neuron[][] neurons = getNeurons(); int numberOfLayers = neurons.length; Neuron[] layer = neurons[ numberOfLayers - 1 ]; int numberOfNeurons = layer.length; double[][] fire = new double[ x.length ][ numberOfNeurons ]; for ( int cellID = 0; cellID < x.length; cellID ++ ) { classify( x[ cellID ] ); for ( int neuronID = 0; neuronID < numberOfNeurons; neuronID ++ ) { fire[ cellID ][ neuronID ] = layer[ neuronID ].getFire(); } } return fire; } /** * Returns neurons that make up this network. */ public Neuron[][] getNeurons() { return this.neurons; } /** * Sets the neurons of this network to be those passed in. * @param neurons a Neurons[][] that comprises this network ordered in * layers and by position in layer. */ public void setNeurons( Neuron[][] neurons ) { this.neurons = neurons; } /** * Saves this NeuralNetwork to file * @param file the File to save to. */ public void save( File file ) throws IOException { save( new FileOutputStream( file ) ); } /** * Saves this NeuralNetwork to fos * @param fos the FileOutputStream for saving this to. */ public void save( FileOutputStream fos ) throws IOException { ObjectOutputStream objectStream = new ObjectOutputStream( new BufferedOutputStream( fos ) ); objectStream.writeObject( this ); objectStream.close(); } /* public AbstractGrid2DSquareCellDouble runNN( AbstractGrid2DSquareCellDouble[] xGrid, String tempGridFileDirectory ) throws IllegalArgumentException { if( getInputNumber() != xGrid.length) { throw new IllegalArgumentException("Input number does not match:" + "InputNumber in Neural Network is " + getInputNumber() + " but now inputis " + xGrid.length);} Grid2DSquareCellDoubleFactory f; Grid2DSquareCellDoubleFileFactory ff; AbstractGrid2DSquareCellDouble fireGrid = null; int inputLength = xGrid[0].getNcols() * xGrid[0].getNrows(); Utilities u = new Utilities(); // initialising an empty fireGrid with the same extent of the input Grids try{ System.out.println( "trying to initialising fireGrid in memory..." ); f = new Grid2DSquareCellDoubleFactory(); fireGrid = f.createGrid2DSquareCellDouble( xGrid[0] ); System.out.println( "...done" ); } catch ( java.lang.OutOfMemoryError e ) { System.out.println( e + " initialising fireGrid as file..." ); ff = new Grid2DSquareCellDoubleFileFactory(); ff.setFile( u.makeTemporaryFile( tempGridFileDirectory ) ); fireGrid = ff.createGrid2DSquareCellDouble( xGrid[0] ); System.out.println( "...done" ); } double[] max = new double[xGrid.length]; double[] min = new double[xGrid.length]; double[] nodatavalue = new double[xGrid.length]; for( int i = 0; i < xGrid.length; i++ ){ max[i] = xGrid[i].getGridStatistics().getMax(); min[i] = xGrid[i].getGridStatistics().getMin(); nodatavalue[i] = xGrid[i].getNoDataValue(); } double[] x = new double[xGrid.length]; for( int i = 0; i < inputLength; i++ ){ for( int m = 0; m < xGrid.length; m++){ x[m] = xGrid[m].getCell( i ); x[m]=NNUtility.getRerangedValue( x[m] , max[m] , min[m] , nodatavalue[m] ); } double[] y = getFire( x ); fireGrid.setCell( i, y[0] ); } return fireGrid; } /* public AbstractGrid2DSquareCellDouble[] runNN(AbstractGrid2DSquareCellDouble[] xGrid, String predictionFileDir, int outputNumber) throws IllegalArgumentException{ if( getInputNumber()!=xGrid.length ) {throw new IllegalArgumentException( "Input number does not match:" + "InputNumber in Neural Network is " + getInputNumber() + " but now inputis " + xGrid.length );} Grid2DSquareCellDoubleFactory f; Grid2DSquareCellDoubleFileFactory ff; AbstractGrid2DSquareCellDouble[] fireGrid = new AbstractGrid2DSquareCellDouble[outputNumber]; int inputLength = xGrid[0].getNcols() * xGrid[0].getNrows(); // initialising an empty fireGrid with the same extent of the input Grids for(int i = 0 ; i < outputNumber ; i++ ){ Utilities u = new Utilities(); //System.out.println( e + " initialising fireGrid as file..." ); System.out.println( i + " initialising fireGrid as file..." ); ff = new Grid2DSquareCellDoubleFileFactory(); // ff.setFile( u.makeTemporaryFile( tempGridFileDirectory, Integer.toString(i) ) ); ff.setFile( new File( predictionFileDir + "Prediction" + i + ".grd" ) ); fireGrid[i] = ff.createGrid2DSquareCellDouble( xGrid[0] ); System.out.println( "...done" ); //} } double[] max = new double[xGrid.length]; double[] min = new double[xGrid.length]; double[] nodatavalue = new double[xGrid.length]; for( int i = 0; i < max.length; i++ ){ max[ i ] = xGrid[ i ].getGridStatistics().getMax(); min[ i ] = xGrid[ i ].getGridStatistics().getMin(); nodatavalue[i] = xGrid[ i ].getNoDataValue(); } double[] x = new double[xGrid.length]; for( int i = 0; i < inputLength; i++ ){ for( int m = 0; m < xGrid.length; m++){ x[m] = xGrid[ m ].getCell( i ); x[m] = NNUtility.getRerangedValue( x[ m ] , max[ m ] , min[ m ] , nodatavalue[ m ] ); } double[] y = getFire( x ); for( int j = 0; j < y.length ; j++ ){ fireGrid[j].setCell(i, y[j] ); } } for( int i = 0; i < xGrid.length; i++ ) { xGrid[i].clear(); } return fireGrid; } */ /* public double[][] trainNN( double[][] x, double[] y ) throws Exception { double[][] newY = new double[ y.length ][ 1 ]; for ( int i = 0; i < y.length; i ++ ) { newY[ i ][ 0 ] = y[ i ]; } double[][] fire = trainNN( x, newY ); setRMSE( NNUtility.getRMS( fire , newY ) ); return fire; } */ /* public double[][] trainNN( double[][] x, double[][] y ) throws Exception { double[][] fire = null ; //double[] oldWeight = getWeightForGeneticLearning(); //double oldRMSE = NNUtility.getRMS( runNN( x ), y ); for ( int i = 0; i < x.length; i ++ ) { training( x[ i ], y[ i ] ); } fire = runNN( x ); double newRMSE = NNUtility.getRMS( fire , y ); //setLearningRate( newRMSE ); setRMSE( newRMSE ); return fire; } */ /** * return a string look like "input=3&layernumber=2&layer0=3&layer1=5&outputnumber=2&rate=1.0&alfa=1.0&genetic=100&back=100" * it is a string used as query parameter in servlet request URL, but don't have the character of "?" */ /*public String toURLQueryParameterString( int geneticRound, int backRound ) { Layer[] layers = getLayer(); Layer outputLayer = getOutputLayer(); int inputNumber = getInputNumber(); String parameter = "input=" + inputNumber; parameter = parameter + "&" + "layernumber=" + layers.length; for ( int i = 0; i < layers.length; i ++ ) { parameter = parameter + "&" + "layer" + i + "=" + layers[ i ].getNeuroNumber(); } parameter = parameter + "&" + "output=" + outputLayer.getNeuroNumber(); parameter = parameter + "&" + "rate=" + getLearningRate(); parameter = parameter + "&" + "alfa=" + getAlfaValue(); parameter = parameter + "&" + "genetic=" + geneticRound; parameter = parameter + "&" + "back=" + backRound; return parameter; } */ /** set all the parameter to the object[] * the first object is a int[layer][layer's parameter], int[0][0] is the first layer neuro number\n * int[0][1] is the input number , int[0][2] is the next layer neuro number * the second object include learning rate is Double object \n * alfa value is Double object \n * the above 2 object will be in a object[] which is the second object \n * weight[] will be the third object \n * the return is a object[3] \n public Object[] getNNParameter() { Layer[] layers=this.getLayer(); Layer outLayer = this.getOutputLayer(); //OutNeuro outNeuron=this.getOutputNeuron(); int[][] object1=new int[layers.length+1][2]; for ( int i = 0; i < layers.length; i ++ ) { object1[i][0] = layers[i].getNeuroNumber(); object1[i][1] = layers[i].getInputNumber(); } object1[object1.length-1][0] = outLayer.getNeuroNumber(); object1[object1.length-1][1] = outLayer.getInputNumber(); Object[] object2= {new Double(getLearningRate()),new Double(getAlfaValue())}; Object[] ob= new Object[3]; ob[0]=object1; ob[1]=object2; ob[2]=this.getWeightForGeneticLearning(); return ob; } */ }