package uk.ac.leeds.ccg.projects.MedAction.NeuralNetwork; import java.io.File; /** * Class for handling Neural Network data IO. */ //import java.io.*; import java.util.Hashtable; import java.util.Vector; import java.util.Enumeration; import java.util.HashSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.Iterator; import java.util.Map; //import uk.ac.leeds.ccg.andyt.grids.*; import uk.ac.leeds.ccg.andyt.grids.AbstractGrid2DSquareCellDouble; import uk.ac.leeds.ccg.andyt.grids.AbstractGrid2DSquareCellDoubleFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleFileFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleJAIFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleProcessor; public class IO { private AbstractGrid2DSquareCellDouble[] inputGrids; private AbstractGrid2DSquareCellDouble[] outputGrids; private HashSet cellIDsOFInputAndOutputGridsNotNoDataValues; private double[] inputGridsMax; private double[] inputGridsMin; private double[] outputGridsMax; private double[] outputGridsMin; private HashSet trainingData; /* private double[][] x = null; private double[][] y = null; private double[][] partX = null; private double[][] partY = null; private Hashtable partTrainingInput = new Hashtable(); private Hashtable partTrainingOutput = new Hashtable(); private AbstractGrid2DSquareCellDouble[] grids = null; private Hashtable xFile = new Hashtable(); private Hashtable yFile = new Hashtable(); private Hashtable idCluster = new Hashtable(); private Hashtable inUsingIDCluster = null; private int[] trainingDataID = null; private int[] nRowAndNCol = new int[2]; private String trainingInputFileDir = null; private String trainingOutputFileDir = null; private double size = 0.0; */ public IO() {} /** * Creates inputGrids and outputGrids using gridFactory from the files in * trainingInputDir and trainingOutputDir respectively. Then chains to * the next preprocessData method. * @param trainingInputDir * @param trainingOutputDir * @param gridFactory */ public void preprocessData( File trainingInputDir, File trainingOutputDir, AbstractGrid2DSquareCellDoubleFactory gridFactory ) { File[] trainingInputFiles = trainingInputDir.listFiles(); File[] trainingOutputFiles = trainingOutputDir.listFiles(); AbstractGrid2DSquareCellDouble[] inputGrids = new AbstractGrid2DSquareCellDouble[ trainingInputFiles.length ]; AbstractGrid2DSquareCellDouble[] outputGrids = new AbstractGrid2DSquareCellDouble[ trainingOutputFiles.length ]; for ( int i = 0; i < trainingInputFiles.length; i ++ ) { inputGrids[ i ] = gridFactory.createGrid2DSquareCellDouble( trainingInputFiles[ i ] ); } for ( int i = 0; i < trainingOutputFiles.length; i ++ ) { outputGrids[ i ] = gridFactory.createGrid2DSquareCellDouble( trainingOutputFiles[ i ] ); } preprocessData( inputGrids, outputGrids ); } public void preprocessData( AbstractGrid2DSquareCellDouble[] inputGrids, AbstractGrid2DSquareCellDouble[] outputGrids ) { setInputGrids( inputGrids ); setOutputGrids( outputGrids ); setCellIDsOFInputAndOutputGridsNotNoDataValues( inputGrids, outputGrids ); } /** * setHashsetOfCellIDsWhereAllInputAndOutputGridsAreNotNoDataValues * @param inputGrids * @param outputGrids */ public void setCellIDsOFInputAndOutputGridsNotNoDataValues( AbstractGrid2DSquareCellDouble[] inputGrids, AbstractGrid2DSquareCellDouble[] outputGrids ) { HashSet withDataCellIDs = new HashSet(); int nrows = inputGrids[ 0 ].getNrows(); int ncols = inputGrids[ 0 ].getNcols(); double[] inputGridsNoDataValue = new double[ inputGrids.length ]; double[] outputGridsNoDataValue = new double[ outputGrids.length ]; // Check all grids have same number of rows and columns and initialise // inputGridsNoDataValues and outputGridsNoDataValues inputGridsNoDataValue[ 0 ] = inputGrids[ 0 ].getNoDataValue(); for ( int grid = 1; grid < inputGrids.length; grid ++ ) { if ( inputGrids[ grid ].getNrows() != nrows || inputGrids[ grid ].getNcols() != ncols ) { System.out.println("Warning: inputGrid " + inputGrids[ grid ].toString() + " has different nrows or ncols!" ); } inputGridsNoDataValue[ grid ] = inputGrids[ grid ].getNoDataValue(); } for ( int grid = 0; grid < outputGrids.length; grid ++ ) { if ( outputGrids[ grid ].getNrows() != nrows || outputGrids[ grid ].getNcols() != ncols ) { System.out.println("Warning: outputGrid " + outputGrids[ grid ].toString() + " has different nrows or ncols!" ); } outputGridsNoDataValue[ grid ] = outputGrids[ grid ].getNoDataValue(); } boolean hasData = true; int row; int col; for ( row = 0; row < nrows; row ++ ) { for ( col = 0; col < ncols; col ++ ) { hasData = true; for ( int grid = 0; grid < inputGrids.length; grid ++ ) { if ( inputGrids[ grid ].getCell( row, col ) == inputGridsNoDataValue[ grid ] ) { hasData = false; } } for ( int grid = 0; grid < outputGrids.length; grid ++ ) { if ( outputGrids[ grid ].getCell( row, col ) == outputGridsNoDataValue[ grid ] ) { hasData = false; } } if ( hasData ) { withDataCellIDs.add( new Integer( ( row * ncols ) + row ) ); } } } System.out.println( "There are " + withDataCellIDs.size() + " cells with data." ); this.cellIDsOFInputAndOutputGridsNotNoDataValues = withDataCellIDs; /* this.partX = new double[ withDataCellIDs.size() ][ trainingInputFile.length ]; this.partY = new double[ withDataCellIDs.size() ][ trainingOutputFile.length ]; int cellID; Iterator ite = withDataCellIDs.iterator(); int i = 0; while( ite.hasNext() ) { cellID = ( ( Integer ) ite.next() ).intValue(); for ( int grid = 0; grid < trainingInputFile.length; grid ++ ) { this.partX[ i ][ grid ] = grids[ grid ].getCell( cellID ); } for ( int grid = trainingInputFile.length; grid < trainingInputFile.length + trainingOutputFile.length; grid ++ ) { this.partY[ i ][ grid - trainingInputFile.length ] = grids[ grid ].getCell( cellID ); } i ++; } Object[] allData = new Object[ 2 ]; allData[ 0 ] = partX; allData[ 1 ] = partY; System.out.println("Returning contructed data."); return allData; */ } private void initPartTrainingDataReading( String trainingInputFileDir, String trainingOutputFileDir, AbstractGrid2DSquareCellDoubleFactory factory, double proportion ) { this.trainingInputFileDir = trainingInputFileDir; this.trainingOutputFileDir = trainingOutputFileDir; this.size = proportion; File trainingInputDir = new File( trainingInputFileDir ); File trainingOutputDir = new File( trainingOutputFileDir ); File[] trainingInputFile = trainingInputDir.listFiles(); File[] trainingOutputFile = trainingOutputDir.listFiles(); //double[][] partX = null; //double[][] partY = null; System.out.println("File Listed " ); AbstractGrid2DSquareCellDouble grid = factory.createGrid2DSquareCellDouble( trainingInputFile[ 0 ] ); int dataLength = grid.getNrows() * grid.getNcols(); int randomSize = new Double( dataLength * proportion ).intValue(); System.out.println("Training Data Size is " + randomSize); partX = new double[randomSize][ trainingInputFile.length ]; partY = new double[randomSize][ trainingOutputFile.length ]; int[] idArray = new int[ randomSize ]; if ( proportion > 0.9 ) { int randomStart = new Double( Math.random() * dataLength * ( 1.0d - proportion ) ).intValue(); for ( int i = 0; i < idArray.length; i ++ ) { idArray[ i ] = i ; } } else { for ( int i = 0; i < idArray.length; i ++ ) { idArray[ i ] = new Double( Math.random() * dataLength ).intValue(); } } //doIDArraySorting( idArray ); //System.out.println( " do id array sorting " ); setTrainingDataID( idArray ); System.out.println( " Training Set ID Array Constructed. " ); //setXFile( trainingInputFile ); //setYFile( trainingOutputFile ); double max = 1.0; double min = 0.0; double noDataValue = -9999.0; System.out.println("training input grid number: " + trainingInputDir.listFiles().length ); for ( int i = 0; i < trainingInputFile.length; i ++ ) { grid = factory.createGrid2DSquareCellDouble( trainingInputFile[ i ] ); max = grid.getGridStatistics().getMax(); min = grid.getGridStatistics().getMin(); grid.setNoDataValue( -9999.0 ); noDataValue = grid.getNoDataValue(); System.out.println("read input grid done" + i + " " + trainingInputFile[ i ].getName() ); for ( int j = 0; j < this.partX.length; j ++ ) { partX[ j ][ i ] = Utilities.getRerangedValue( grid.getCell( idArray[ j ] ), max, min, noDataValue ); } grid.clear(); } System.out.println("read input grid done"); System.out.println("training ouput grid number: " + trainingOutputDir.listFiles().length ); for ( int i = 0; i < trainingOutputFile.length; i ++ ) { grid = factory.createGrid2DSquareCellDouble( trainingOutputFile[ i ] ); max = grid.getGridStatistics().getMax(); min = grid.getGridStatistics().getMin(); grid.setNoDataValue( -9999.0 ); noDataValue = grid.getNoDataValue(); System.out.println("read output grid done" + i + " " + trainingOutputFile[ i ].getName() ); for ( int j = 0; j < this.partY.length; j ++ ) { partY[ j ][ i ] = Utilities.getRerangedValue( grid.getCell( idArray[ j ] ), max, min, noDataValue ); } grid.clear(); } setPartTrainingInput( constructTrainingDataHashtable(idArray, partX ) ); setPartTrainingOutput( constructTrainingDataHashtable(idArray, partY ) ); partX = null; partY = null; System.gc(); System.out.println( " do no data value filtering " ); //doNoDataValueFiltering( getPartTrainingInput(), getPartTrainingOutput() ); System.out.println( " do KMeansClustering " ); Hashtable idCluster = doKMeansClustering( 50 ); setIDCluster( idCluster ); //setInUsingIDCluster( new Hashtable( idCluster ) ); System.out.println("read output grid done"); System.gc(); } private Hashtable constructTrainingDataHashtable( int[] idArray, double[][] data ) { Hashtable dataHt = new Hashtable(); if ( idArray.length != data.length ) return null; for ( int i = 0; i < idArray.length; i ++ ) { dataHt.put( new Integer( idArray[ i ] ), ( Object ) data[ i ] ); } return dataHt; } public void setPartTrainingInput( Hashtable inputHt ) { this.partTrainingInput = inputHt; } public void setPartTrainingOutput( Hashtable outputHt ) { this.partTrainingOutput = outputHt; } public Hashtable getPartTrainingInput() { return this.partTrainingInput; } public Hashtable getPartTrainingOutput() { return this.partTrainingOutput; } /** * return small proportion of the training data selected, usually is 40% * so if the small proportion is 5%, the data for training will be 5% * 40% = 2% * return Object[], Object[0] is x[][], Object[1] is y[][] * private Hashtable doKMeansClustering( int clusterNumber ) { Hashtable input = getPartTrainingInput(); // all input data Iterator keys = input.keySet().iterator(); int dimension = ( (double[]) input.elements().nextElement() ).length; Hashtable oldMean = new Hashtable(); // using the cluster id to get its means Hashtable mean = new Hashtable(); // using the cluster id to get its mean Hashtable distance = new Hashtable(); // using the data id to get the distance array object of this data to all clusters means Hashtable idCluster = new Hashtable(); // using the cluster id to get the data ids Vector it has. double gridSize = nrowAndncol[ 0 ] * nrowAndncol[ 1 ]; // initialise and random the oldMean, newMean // get a pixel from input randomly as mean for initialisation for ( int i = 0; i < clusterNumber; i ++ ) { double[] temp = null; do { int randomIDForMeanInit = new Double( Math.random() * gridSize ).intValue(); temp = (double[]) input.get( new Integer( randomIDForMeanInit ) ); } while ( temp == null ); mean.put( new Integer( i ), (Object) temp ); oldMean.put( new Integer( i ), (Object) temp ); idCluster.put( new Integer( i ), new Vector() ); } System.out.println( "means initialised " ); double difference = 0.0d; int round = 0; // do until mean changes less than 5% or round do { round ++; // estimate the distances for all inputs from those means while( keys.hasNext() ) { Object key = keys.next(); double[] pixelValue = (double[]) input.get( key ); double[] distanceArray = new double[ clusterNumber ]; for ( int i = 0; i < clusterNumber; i ++ ) { // estimate one input one distance from one cluster mean double dist = 0.0d; double[] m = (double[]) mean.get( new Integer( i ) ); for ( int j = 0; j < dimension; j ++ ) { dist = dist + ( pixelValue[ j ] - m[ j ] ) * ( pixelValue[ j ] - m[ j ] ); } dist = Math.sqrt( dist / dimension ); distanceArray[ i ] = dist; } distance.put( key, distanceArray ); } for ( int i = 0; i < clusterNumber; i ++ ) { ( (Vector) idCluster.get( new Integer( i ) ) ).removeAllElements(); } // classify all input into clusters using those means keys = input.keySet().iterator();// get new iterator while( keys.hasNext() ) { Object key = keys.next(); double[] distArray = (double[]) distance.get( key ); int minimumDistClusterID = 0; for ( int i = 1; i < clusterNumber; i ++ ) { if ( distArray[ i ] < distArray[ minimumDistClusterID ] ) { minimumDistClusterID = i; } } ( (Vector) idCluster.get( new Integer( minimumDistClusterID ) ) ).add( key ); } // estimate new mean using those clusters Iterator clusterKey = idCluster.keySet().iterator(); oldMean = new Hashtable( (Map) mean ); int deadClusterCount = 0; while ( clusterKey.hasNext() ) { Object key = clusterKey.next(); Vector idVector = (Vector) idCluster.get( key ); // if the class dead, no pixel belong to it. // get a pixel for input randomly, assign it to a dead cluster as new mean if ( idVector.size() == 0 ) { if ( idVector.size() == 0 ) { double[] temp = null; deadClusterCount ++; do { int randomIDForMeanInit = new Double( Math.random() * gridSize ).intValue(); temp = (double[]) input.get( new Integer( randomIDForMeanInit ) ); } while ( temp == null ); mean.put( key, temp); } else { Iterator idIterator = idVector.iterator(); double[] newMean = new double[ dimension ]; while( idIterator.hasNext() ) { double[] d1 = (double[]) input.get( idIterator.next() ); for ( int i = 0; i < dimension; i ++ ) { newMean[ i ] = newMean[ i ] + d1[ i ]; } } for ( int i = 0; i < dimension; i ++ ) { newMean[ i ] = newMean[ i ] / idVector.size(); } mean.put( key, newMean ); } } System.out.println( "KMeans Round: " + round ); System.out.println( " Dead Cluster: " + deadClusterCount ); difference = getDifferenceBetweenMeans( oldMean, mean ); System.out.println("differnce: " + difference ); } while ( difference > 0.01 ); // condition for stop running // while ( round < 50 ); // condition for stop running return idCluster; } */ /* private double getDifferenceBetweenMeans( Hashtable oldM, Hashtable newM ) { Iterator oldMI = oldM.values().iterator(); int dimension = ( (double[]) oldMI.next() ).length; oldMI = oldM.values().iterator(); Iterator newMI = newM.values().iterator(); double totalDifference = 0.0d; while( oldMI.hasNext() ) { double[] oldMOne = (double[]) oldMI.next(); double[] newMOne = (double[]) newMI.next(); double difference = 0.0d; for ( int i = 0; i < dimension; i ++ ) { difference = difference + Math.abs( oldMOne[ i ] - newMOne[ i ] ); } totalDifference = totalDifference + difference/ dimension; } return totalDifference / oldM.size(); } */ /** * return a proportion of the selected and KMeaned Training Data, usually small amount for initial training * it will random select data from each Cluster with the same proportion and at the same time remove it from * the inUsingIDCluster, so the same data won't be gotten next time. User want to get all training data ID and its Clusters should call * getIDCluster() which keep the copy of the original IDCluster Hashtable for further training use. **/ public Object[] getRandomSmallAmountPartialTrainingData( AbstractGrid2DSquareCellDoubleFactory factory, double proportion ) { Hashtable xInput = getPartTrainingInput(); Hashtable yInput = getPartTrainingOutput(); if ( ( xInput == null ) || ( yInput == null ) ) { System.out.println("not first time, so need to reconstruct"); this.initPartTrainingDataReading( trainingInputFileDir, trainingOutputFileDir, factory, size ); xInput = getPartTrainingInput(); yInput = getPartTrainingOutput(); } int size = new Double( xInput.size() * proportion ).intValue(); int totalSize = xInput.size(); Vector smallX = new Vector(); Vector smallY = new Vector(); Hashtable idCluster = getInUsingIDCluster(); // it is a copy of idCluster, but those id used will be removed in the following Object[] idClusterKeyArray = idCluster.keySet().toArray(); System.out.println( "Partial Data Size: " + totalSize ); // get live id cluster vector /* for ( int i = 0; i < idClusterKeyArray.length; i ++ ) { Vector oneIDCluster = ( Vector ) idCluster.get( idClusterKeyArray[ i ] ); int vectorSize = oneIDCluster.size(); int share = new Double( vectorSize * proportion ).intValue(); for ( int j = 0; j < share; j ++ ) { vectorSize = oneIDCluster.size(); int randomPosition = new Double( Math.random() * ( vectorSize - 1 ) ).intValue(); smallX.add( xInput.get( oneIDCluster.get( randomPosition ) ) ); smallY.add( yInput.get( oneIDCluster.get( randomPosition ) ) ); oneIDCluster.removeElementAt( randomPosition ); // remove it, so could not be gotten next time } idCluster.put( idClusterKeyArray[ i ], oneIDCluster ); // set the used id cluster to replace the old one. } */ Vector selectedId = new Vector(); for ( int i = 0; i < idClusterKeyArray.length; i ++ ) { Vector oneIDCluster = ( Vector ) idCluster.get( idClusterKeyArray[ i ] ); int vectorSize = oneIDCluster.size(); int share = new Double( vectorSize * proportion ).intValue(); for ( int j = 0; j < share; j ++ ) { vectorSize = oneIDCluster.size(); // since the vector will remove element every round, size changes. int randomPosition = new Double( Math.random() * ( vectorSize - 1 ) ).intValue(); selectedId.add( oneIDCluster.get( randomPosition ) ); oneIDCluster.removeElementAt( randomPosition ); // remove it, so could not be gotten next time } idCluster.put( idClusterKeyArray[ i ], oneIDCluster ); // set the used id cluster to replace the old one. } Object[] selectedIdTemp = selectedId.toArray(); int[] selectedIdArray = new int[ selectedIdTemp.length ]; for ( int i = 0; i < selectedIdArray.length; i ++ ) { selectedIdArray[ i ] = ( ( Integer ) selectedIdTemp[ i ] ).intValue(); } doIDArraySorting( selectedIdArray ); for ( int i = 0; i < selectedIdArray.length; i ++ ) { //System.out.println( selectedIdArray[ i ] ); smallX.add( xInput.get( new Integer( selectedIdArray[ i ] ) ) ); smallY.add( yInput.get( new Integer( selectedIdArray[ i ] ) ) ); } double[][] tempX = new double[ smallX.size() ][]; double[][] tempY = new double[ smallY.size() ][]; Object[] tempAX = smallX.toArray(); Object[] tempAY = smallY.toArray(); for ( int i = 0; i < tempX.length; i ++ ) { tempX[ i ] = (double[]) tempAX[ i ]; tempY[ i ] = (double[]) tempAY[ i ]; } Object[] temp = new Object[ 2 ]; temp[ 0 ] = tempX; temp[ 1 ] = tempY; System.out.println( size ); System.out.println( "Small Amount Data Size: " + smallX.size() ); System.out.println( "Small Amount Data Size: " + smallY.size() ); // release memory for processing // the part data will be reconstruct when more small amount data reselection required xInput = null; yInput = null; setInUsingIDCluster( null ); setPartTrainingInput( null ); setPartTrainingOutput( null ); System.gc(); return temp; } /** * return whether nodata value exists in this pair of x, y for just one pixel * if does have no data value, return true **/ public Hashtable getIDCluster() { return this.idCluster; } public void setIDCluster( Hashtable idCluster ) { this.idCluster = idCluster; } public int[] getTrainingDataID() { return this.trainingDataID; } /** * return pixel id in the grid with the index in the partInput and output double[ index ][] * if beyond the index boundary , return -1 **/ public int getTrainingDataID( int index ) { int[] dataID = getTrainingDataID(); if ( ( index < dataID.length ) || ( index > 0 ) ) { return dataID[ index ]; } else { return -1; } } public void setTrainingDataID( int[] trainingDataID ) { this.trainingDataID = trainingDataID; } public void setTrainingDataID( int index, int id ) { int[] dataID = getTrainingDataID(); if ( ( index < dataID.length ) || ( index > 0 ) ) { dataID[ index ] = id; } setTrainingDataID( dataID ); } private void setXFile( File[] file ){ for ( int i = 0; i < file.length; i ++ ) { this.xFile.put( new Integer( i ), file[i] ); } } private void setYFile( File[] file ) { for ( int i = 0; i < file.length; i ++ ) { this.yFile.put( new Integer( i ), file[i] ); } } /* public static AbstractGrid2DSquareCellDouble[] readDataForPrediction( File[] inputDataFile, File nnParameterFile ) { if ( ( inputDataFile == null ) || ( nnParameterFile == null ) ) { System.out.println( " set file directory please " ); return null; } try { NeuralNetwork nn = NeuralNetwork.load( nnParameterFile ); if( nn.getInputNumber() != inputDataFile.length ) { System.out.println( "Input number does not match:" + "InputNumber in Neural Network is " + nn.getInputNumber() + " but now inputis " + inputDataFile.length ); return null; } } catch ( Exception e ) { e.printStackTrace(); } Grid2DSquareCellDoubleFactory f; Grid2DSquareCellDoubleFileFactory ff; AbstractGrid2DSquareCellDouble[] inputGrids = new AbstractGrid2DSquareCellDouble[ inputDataFile.length ]; for ( int i = 0; i < inputDataFile.length; i ++ ) { Utilities u = new Utilities(); // initialising an empty fireGrid with the same extent of the input Grids try{ System.out.println( "trying to initialising inputGrid in memory..." ); f = new Grid2DSquareCellDoubleFactory(); inputGrids[ i ] = f.createGrid2DSquareCellDouble( inputDataFile[ i ] ); inputGrids[ i ].setNoDataValue( -9999.0 ); System.out.println( i + "...done" ); } catch ( java.lang.OutOfMemoryError e ) { System.out.println( e + " initialising inputGrid as file..." ); ff = new Grid2DSquareCellDoubleFileFactory(); ff.setFile( u.makeTemporaryFile( System.getProperty( "java.io.tmpdir" ) , Integer.toString( i ) ) ); inputGrids[ i ] = ff.createGrid2DSquareCellDouble( inputDataFile[ i ] ); inputGrids[ i ].setNoDataValue( -9999.0 ); System.out.println( i + "...done" ); } } return inputGrids; } */ /** * Generates a training data for training a network to learn to represent * the sine and the sine * cosine of an angle. */ public static Object[] constructTestingXAndY(){ double[][] x = new double[100][1]; double[][] y = new double[100][2]; for ( int i = 0; i < x.length; i ++ ) { x[ i ][ 0 ] = ( double ) i / ( double ) x.length ; //this.partX[ i ][ 1 ] = ( Math.cos( 2.0d * Math.PI * ( ( double ) i / ( double ) this.partY.length ) ) + 1.0d ) / 2.0d; y[ i ][ 0 ] = ( Math.cos( 2.0d * Math.PI * ( ( double ) i / ( double ) x.length ) ) + 1.0d ) / 2.0d * ( Math.sin( 2 * Math.PI * ( ( double ) i / ( double ) x.length ) ) + 1.0d ) / 2.0d; y[ i ][ 1 ] = ( Math.sin( 2.0d * Math.PI * ( ( double ) i / ( double ) x.length ) ) + 1.0d ) / 2.0d; //this.partY[ i ] = ( double ) i / ( double ) partY.length ; } return new Object[] {x,y}; } public double[][] getPartInput(){ return this.partX; } public double[][] getPartOutput(){ return this.partY; } public void setPartInput(double[][] newPartInput) { this.partX = newPartInput; } public void setPartOutput(double[][] newPartOutput) { this.partY = newPartOutput; } public double[] getInputGridsMax() { return this.inputGridsMax; } public void setInputGridsMax( double[] inputGridsMax ) { this.inputGridsMax = inputGridsMax; } public double[] getInputGridsMin() { return this.inputGridsMin; } public void setInputGridsMin( double[] inputGridsMin ) { this.inputGridsMin = inputGridsMin; } public AbstractGrid2DSquareCellDouble[] getInputGrids() { return this.inputGrids; } public void setInputGrids( AbstractGrid2DSquareCellDouble[] inputGrids ) { this.inputGrids = inputGrids; } public AbstractGrid2DSquareCellDouble[] getOutputGrids() { return this.outputGrids; } public void setOutputGrids( AbstractGrid2DSquareCellDouble[] outputGrids ) { this.outputGrids = outputGrids; } }