/* * NeuroNetInputAndOutput.java * * Created on July 22, 2002, 8:47 PM */ package uk.ac.leeds.ccg.medaction.neural; /** * Class for reading input data and writing output data using the grids package. * @author ma0101 */ import java.io.*; import java.util.Hashtable; import java.util.Vector; import java.util.Enumeration; import java.util.HashSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.Iterator; import java.util.Map; //import uk.ac.leeds.ccg.grids.*; //import uk.ac.leeds.ccg.grids.AbstractGrid2DSquareCellDouble; //import uk.ac.leeds.ccg.grids.AbstractGrid2DSquareCellDoubleFactory; //import uk.ac.leeds.ccg.grids.Grid2DSquareCellDoubleFileFactory; //import uk.ac.leeds.ccg.grids.Grid2DSquareCellDoubleJAIFactory; import uk.ac.leeds.ccg.andyt.grids.AbstractGrid2DSquareCellDouble; import uk.ac.leeds.ccg.andyt.grids.AbstractGrid2DSquareCellDoubleFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleFileFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleJAIFactory; import uk.ac.leeds.ccg.andyt.grids.Grid2DSquareCellDoubleProcessor; import uk.ac.leeds.ccg.andyt.grids.Utilities; public class NeuroNetInputAndOutput { private double[][] x = null; private double[][] y = null; private double[][] partX = null; private double[][] partY = null; private Hashtable partTrainingInput = new Hashtable(); private Hashtable partTrainingOutput = new Hashtable(); private AbstractGrid2DSquareCellDouble[] grids = null; private Hashtable xFile = new Hashtable(); private Hashtable yFile = new Hashtable(); private AbstractGrid2DSquareCellDouble[] inputGrids = null; private AbstractGrid2DSquareCellDouble[] outputGrids = null; private Hashtable idCluster = new Hashtable(); private Hashtable inUsingIDCluster = null; private int[] trainingDataID = null; private int[] nRowAndNCol = new int[2]; private String trainingInputFileDir = null, trainingOutputFileDir = null; private double size = 0.0; /** Creates a new instance of NeuroNetInputAndOutput */ public NeuroNetInputAndOutput() { } public void constructDataUsingKMean( String trainingInputFileDir, String trainingOutputFileDir, double size ) { initPartTrainingDataReading( trainingInputFileDir, trainingOutputFileDir, size ); } /* public Object[] constructAllData( String trainingInputFileDir, String trainingOutputFileDir ) { File trainingInputDir = new File( trainingInputFileDir ); File trainingOutputDir = new File( trainingOutputFileDir ); File[] trainingInputFile = trainingInputDir.listFiles(); File[] trainingOutputFile = trainingOutputDir.listFiles(); AbstractGrid2DSquareCellDoubleFactory factory = new Grid2DSquareCellDoubleFactory(); //double[][] partX = null; //double[][] partY = null; System.out.println("File Listed " ); AbstractGrid2DSquareCellDouble grid = factory.createGrid2DSquareCellDouble( trainingInputFile[ 0 ] ); int dataLength = grid.getNrows() * grid.getNcols(); setNRowAndNCol( grid.getNrows(), grid.getNcols() ); System.out.println("Training Data Size is " + dataLength ); double[][] allX = new double[ dataLength ][ trainingInputFile.length ]; double[][] allY = new double[ dataLength ][ trainingOutputFile.length ]; double max = 1.0; double min = 0.0; double noDataValue = -9999.0; System.out.println("training input grid number: " + trainingInputDir.listFiles().length ); for ( int i = 0; i < trainingInputFile.length; i ++ ) { grid = factory.createGrid2DSquareCellDouble( trainingInputFile[ i ] ); max = grid.getGridStatistics().getMax(); min = grid.getGridStatistics().getMin(); grid.setNoDataValue( noDataValue ); noDataValue = grid.getNoDataValue(); System.out.println("read input grid done" + i + " " + trainingInputFile[ i ].getName() ); for ( int j = 0; j < allX.length; j ++ ) { allX[ j ][ i ] = NNUtility.getRerangedValue( grid.getCell( j ), max, min, noDataValue ); } grid.clear(); } System.out.println("read input grid done"); System.out.println("training ouput grid number: " + trainingOutputDir.listFiles().length ); for ( int i = 0; i < trainingOutputFile.length; i ++ ) { grid = factory.createGrid2DSquareCellDouble( trainingOutputFile[ i ] ); max = grid.getGridStatistics().getMax(); min = grid.getGridStatistics().getMin(); grid.setNoDataValue( noDataValue ); noDataValue = grid.getNoDataValue(); System.out.println("read output grid done" + i + " " + trainingOutputFile[ i ].getName() ); for ( int j = 0; j < allY.length; j ++ ) { allY[ j ][ i ] = NNUtility.getRerangedValue( grid.getCell( j ), max, min, noDataValue ); } grid.clear(); } System.out.println("read output grid done"); System.gc(); Vector hasDataId = new Vector(); boolean hasData = true; for ( int i = 0; i < allX.length; i ++ ) { hasData = true; for ( int j = 0; j < trainingInputFile.length; j ++ ) { if ( allX[ i ][ j ] == noDataValue ) { hasData = false; break; } } if ( !hasData ) { continue; } for ( int j = 0; j < trainingOutputFile.length; j ++ ) { if ( allY[ i ][ j ] == noDataValue ) { hasData = false; break; } } if ( hasData ) { hasDataId.add( new Integer( i ) ) ; } } System.out.println( "Check Data Done, has Data Size: " + hasDataId.size() + "Start ReAssignData" ); this.partX = new double[ hasDataId.size() ][ trainingInputFile.length ]; this.partY = new double[ hasDataId.size() ][ trainingOutputFile.length ]; for ( int i = 0; i < this.partX.length; i ++ ) { int oneId = ( ( Integer ) hasDataId.get( i ) ).intValue(); for ( int j = 0; j < trainingInputFile.length; j ++ ) { this.partX[ i ][ j ] = allX[ oneId ][ j ]; } for ( int j = 0; j < trainingOutputFile.length; j ++ ) { this.partY[ i ][ j ] = allY[ oneId ][ j ]; } } System.out.println( "Data ReAssign Done" ); allX = null; allY = null; System.gc(); Object[] allData = new Object[ 2 ]; allData[ 0 ] = partX; allData[ 1 ] = partY; return allData; } */ private void initPartTrainingDataReading(String trainingInputFileDir, String trainingOutputFileDir, double proportion ) { this.trainingInputFileDir = trainingInputFileDir; this.trainingOutputFileDir = trainingOutputFileDir; this.size = proportion; File trainingInputDir = new File( trainingInputFileDir ); File trainingOutputDir = new File( trainingOutputFileDir ); File[] trainingInputFile = trainingInputDir.listFiles(); File[] trainingOutputFile = trainingOutputDir.listFiles(); Grid2DSquareCellDoubleFactory f = new Grid2DSquareCellDoubleFactory(); AbstractGrid2DSquareCellDoubleFactory factory = f; //double[][] partX = null; //double[][] partY = null; System.out.println("File Listed " ); AbstractGrid2DSquareCellDouble grid = factory.createGrid2DSquareCellDouble( trainingInputFile[ 0 ] ); int dataLength = grid.getNrows() * grid.getNcols(); setNRowAndNCol( grid.getNrows(), grid.getNcols() ); int randomSize = new Double( dataLength * proportion ).intValue(); System.out.println("Training Data Size is " + randomSize); partX = new double[randomSize][ trainingInputFile.length ]; partY = new double[randomSize][ trainingOutputFile.length ]; int[] idArray = new int[ randomSize ]; if ( proportion > 0.9 ) { int randomStart = new Double( Math.random() * dataLength * ( 1.0d - proportion ) ).intValue(); for ( int i = 0; i < idArray.length; i ++ ) { idArray[ i ] = i ; } } else { for ( int i = 0; i < idArray.length; i ++ ) { idArray[ i ] = new Double( Math.random() * dataLength ).intValue(); } } //doIDArraySorting( idArray ); //System.out.println( " do id array sorting " ); setTrainingDataID( idArray ); System.out.println( " Training Set ID Array Constructed. " ); //setXFile( trainingInputFile ); //setYFile( trainingOutputFile ); double max = 1.0; double min = 0.0; double noDataValue = -9999.0; System.out.println("training input grid number: " + trainingInputDir.listFiles().length ); for ( int i = 0; i < trainingInputFile.length; i ++ ) { grid = factory.createGrid2DSquareCellDouble( trainingInputFile[ i ] ); max = grid.getGridStatistics().getMax(); min = grid.getGridStatistics().getMin(); grid.setNoDataValue( -9999.0 ); noDataValue = grid.getNoDataValue(); System.out.println("read input grid done" + i + " " + trainingInputFile[ i ].getName() ); for ( int j = 0; j < this.partX.length; j ++ ) { partX[ j ][ i ] = NNUtility.getRerangedValue( grid.getCell( idArray[ j ] ), max, min, noDataValue ); } grid.clear(); } System.out.println("read input grid done"); System.out.println("training ouput grid number: " + trainingOutputDir.listFiles().length ); for ( int i = 0; i < trainingOutputFile.length; i ++ ) { grid = factory.createGrid2DSquareCellDouble( trainingOutputFile[ i ] ); max = grid.getGridStatistics().getMax(); min = grid.getGridStatistics().getMin(); grid.setNoDataValue( -9999.0 ); noDataValue = grid.getNoDataValue(); System.out.println("read output grid done" + i + " " + trainingOutputFile[ i ].getName() ); for ( int j = 0; j < this.partY.length; j ++ ) { partY[ j ][ i ] = NNUtility.getRerangedValue( grid.getCell( idArray[ j ] ), max, min, noDataValue ); } grid.clear(); } setPartTrainingInput( constructTrainingDataHashtable(idArray, partX ) ); setPartTrainingOutput( constructTrainingDataHashtable(idArray, partY ) ); partX = null; partY = null; System.gc(); System.out.println( " do no data value filtering " ); doNoDataValueFiltering( getPartTrainingInput(), getPartTrainingOutput() ); System.out.println( " do KMeansClustering " ); Hashtable idCluster = doKMeansClustering( 50 ); setIDCluster( idCluster ); setInUsingIDCluster( new Hashtable( idCluster ) ); System.out.println("read output grid done"); System.gc(); } private void setInUsingIDCluster( Hashtable inUsing ) { inUsingIDCluster = inUsing; } private Hashtable getInUsingIDCluster() { return this.inUsingIDCluster; } private Hashtable constructTrainingDataHashtable( int[] idArray, double[][] data ) { Hashtable dataHt = new Hashtable(); if ( idArray.length != data.length ) return null; for ( int i = 0; i < idArray.length; i ++ ) { dataHt.put( new Integer( idArray[ i ] ), ( Object ) data[ i ] ); } return dataHt; } private void doIDArraySorting( int[] idArray ) { int dataLength = idArray.length; int exchangeMedium = 0; System.out.println("start sorting ....."); java.util.Arrays.sort( idArray ); System.out.println("sorting done....."); /*for ( int i = 0; i < dataLength - 1; i ++ ) { //System.out.println( i + " Sorting" ); for ( int j = 0; j < dataLength - 1 - i; j ++ ) { if ( idArray[ j ] > idArray[ j + 1 ] ) { exchangeMedium = idArray[ j ]; idArray[ j ] = idArray[ j + 1 ]; idArray[ j + 1 ] = exchangeMedium; } } } */ } private void doNoDataValueFiltering( Hashtable x, Hashtable y ) { Object[] keys = x.keySet().toArray(); for ( int i = 0; i < keys.length; i ++ ) { Object id = keys[ i ]; double[] xArray = (double[]) x.get( id ); double[] yArray = (double[]) y.get( id ); if ( checkNoData( xArray, yArray ) ) { x.remove( id ); y.remove( id ); } } System.gc(); } private boolean checkNoData( double[] x, double[] y ) { for ( int i = 0; i < x.length; i ++ ) { if ( x[ i ] == -9999.0 ) { return true; } } for ( int i = 0; i < y.length; i ++ ) { if ( y[ i ] == -9999.0 ) { return true; } } return false; } public void setNRowAndNCol( int nrow, int ncol ) { this.nRowAndNCol[ 0 ] = nrow; this.nRowAndNCol[ 1 ] = ncol; } public int[] getNRowAndNCol() { return this.nRowAndNCol; } public void setPartTrainingInput( Hashtable inputHt ) { this.partTrainingInput = inputHt; } public void setPartTrainingOutput( Hashtable outputHt ) { this.partTrainingOutput = outputHt; } public Hashtable getPartTrainingInput() { return this.partTrainingInput; } public Hashtable getPartTrainingOutput() { return this.partTrainingOutput; } /** * return small proportion of the training data selected, usually is 40% * so if the small proportion is 5%, the data for training will be 5% * 40% = 2% * return Object[], Object[0] is x[][], Object[1] is y[][] */ private Hashtable doKMeansClustering( int clusterNumber ) { Hashtable input = getPartTrainingInput(); // all input data Iterator keys = input.keySet().iterator(); int dimension = ( (double[]) input.elements().nextElement() ).length; Hashtable oldMean = new Hashtable(); // using the cluster id to get its means Hashtable mean = new Hashtable(); // using the cluster id to get its mean Hashtable distance = new Hashtable(); // using the data id to get the distance array object of this data to all clusters means Hashtable idCluster = new Hashtable(); // using the cluster id to get the data ids Vector it has. int[] nrowAndncol = getNRowAndNCol(); double gridSize = nrowAndncol[ 0 ] * nrowAndncol[ 1 ]; // initialise and random the oldMean, newMean // get a pixel from input randomly as mean for initialisation for ( int i = 0; i < clusterNumber; i ++ ) { double[] temp = null; do { int randomIDForMeanInit = new Double( Math.random() * gridSize ).intValue(); temp = (double[]) input.get( new Integer( randomIDForMeanInit ) ); } while ( temp == null ); mean.put( new Integer( i ), (Object) temp ); oldMean.put( new Integer( i ), (Object) temp ); idCluster.put( new Integer( i ), new Vector() ); } System.out.println( "means initialised " ); double difference = 0.0d; int round = 0; // do until mean changes less than 5% or round do { round ++; // estimate the distances for all inputs from those means while( keys.hasNext() ) { Object key = keys.next(); double[] pixelValue = (double[]) input.get( key ); double[] distanceArray = new double[ clusterNumber ]; for ( int i = 0; i < clusterNumber; i ++ ) { // estimate one input one distance from one cluster mean double dist = 0.0d; double[] m = (double[]) mean.get( new Integer( i ) ); for ( int j = 0; j < dimension; j ++ ) { dist = dist + ( pixelValue[ j ] - m[ j ] ) * ( pixelValue[ j ] - m[ j ] ); } dist = Math.sqrt( dist / dimension ); distanceArray[ i ] = dist; } distance.put( key, distanceArray ); } for ( int i = 0; i < clusterNumber; i ++ ) { ( (Vector) idCluster.get( new Integer( i ) ) ).removeAllElements(); } // classify all input into clusters using those means keys = input.keySet().iterator();// get new iterator while( keys.hasNext() ) { Object key = keys.next(); double[] distArray = (double[]) distance.get( key ); int minimumDistClusterID = 0; for ( int i = 1; i < clusterNumber; i ++ ) { if ( distArray[ i ] < distArray[ minimumDistClusterID ] ) { minimumDistClusterID = i; } } ( (Vector) idCluster.get( new Integer( minimumDistClusterID ) ) ).add( key ); } // estimate new mean using those clusters Iterator clusterKey = idCluster.keySet().iterator(); oldMean = new Hashtable( (Map) mean ); int deadClusterCount = 0; while ( clusterKey.hasNext() ) { Object key = clusterKey.next(); Vector idVector = (Vector) idCluster.get( key ); // if the class dead, no pixel belong to it. // get a pixel for input randomly, assign it to a dead cluster as new mean if ( idVector.size() == 0 ) { if ( idVector.size() == 0 ) { double[] temp = null; deadClusterCount ++; do { int randomIDForMeanInit = new Double( Math.random() * gridSize ).intValue(); temp = (double[]) input.get( new Integer( randomIDForMeanInit ) ); } while ( temp == null ); mean.put( key, temp); } else { Iterator idIterator = idVector.iterator(); double[] newMean = new double[ dimension ]; while( idIterator.hasNext() ) { double[] d1 = (double[]) input.get( idIterator.next() ); for ( int i = 0; i < dimension; i ++ ) { newMean[ i ] = newMean[ i ] + d1[ i ]; } } for ( int i = 0; i < dimension; i ++ ) { newMean[ i ] = newMean[ i ] / idVector.size(); } mean.put( key, newMean ); } } System.out.println( "KMeans Round: " + round ); System.out.println( " Dead Cluster: " + deadClusterCount ); difference = getDifferenceBetweenMeans( oldMean, mean ); System.out.println("differnce: " + difference ); } while ( difference > 0.01 ); // condition for stop running // while ( round < 50 ); // condition for stop running return idCluster; } private double getDifferenceBetweenMeans( Hashtable oldM, Hashtable newM ) { Iterator oldMI = oldM.values().iterator(); int dimension = ( (double[]) oldMI.next() ).length; oldMI = oldM.values().iterator(); Iterator newMI = newM.values().iterator(); double totalDifference = 0.0d; while( oldMI.hasNext() ) { double[] oldMOne = (double[]) oldMI.next(); double[] newMOne = (double[]) newMI.next(); double difference = 0.0d; for ( int i = 0; i < dimension; i ++ ) { difference = difference + Math.abs( oldMOne[ i ] - newMOne[ i ] ); } totalDifference = totalDifference + difference/ dimension; } return totalDifference / oldM.size(); } /** * return a proportion of the selected and KMeaned Training Data, usually small amount for initial training * it will random select data from each Cluster with the same proportion and at the same time remove it from * the inUsingIDCluster, so the same data won't be gotten next time. User want to get all training data ID and its Clusters should call * getIDCluster() which keep the copy of the original IDCluster Hashtable for further training use. **/ public Object[] getRandomSmallAmountPartialTrainingData( double proportion ) { Hashtable xInput = getPartTrainingInput(); Hashtable yInput = getPartTrainingOutput(); if ( ( xInput == null ) || ( yInput == null ) ) { System.out.println("not first time, so need to reconstruct"); this.initPartTrainingDataReading( trainingInputFileDir, trainingOutputFileDir, size ); xInput = getPartTrainingInput(); yInput = getPartTrainingOutput(); } int size = new Double( xInput.size() * proportion ).intValue(); int totalSize = xInput.size(); Vector smallX = new Vector(); Vector smallY = new Vector(); Hashtable idCluster = getInUsingIDCluster(); // it is a copy of idCluster, but those id used will be removed in the following Object[] idClusterKeyArray = idCluster.keySet().toArray(); System.out.println( "Partial Data Size: " + totalSize ); // get live id cluster vector /* for ( int i = 0; i < idClusterKeyArray.length; i ++ ) { Vector oneIDCluster = ( Vector ) idCluster.get( idClusterKeyArray[ i ] ); int vectorSize = oneIDCluster.size(); int share = new Double( vectorSize * proportion ).intValue(); for ( int j = 0; j < share; j ++ ) { vectorSize = oneIDCluster.size(); int randomPosition = new Double( Math.random() * ( vectorSize - 1 ) ).intValue(); smallX.add( xInput.get( oneIDCluster.get( randomPosition ) ) ); smallY.add( yInput.get( oneIDCluster.get( randomPosition ) ) ); oneIDCluster.removeElementAt( randomPosition ); // remove it, so could not be gotten next time } idCluster.put( idClusterKeyArray[ i ], oneIDCluster ); // set the used id cluster to replace the old one. } */ Vector selectedId = new Vector(); for ( int i = 0; i < idClusterKeyArray.length; i ++ ) { Vector oneIDCluster = ( Vector ) idCluster.get( idClusterKeyArray[ i ] ); int vectorSize = oneIDCluster.size(); int share = new Double( vectorSize * proportion ).intValue(); for ( int j = 0; j < share; j ++ ) { vectorSize = oneIDCluster.size(); // since the vector will remove element every round, size changes. int randomPosition = new Double( Math.random() * ( vectorSize - 1 ) ).intValue(); selectedId.add( oneIDCluster.get( randomPosition ) ); oneIDCluster.removeElementAt( randomPosition ); // remove it, so could not be gotten next time } idCluster.put( idClusterKeyArray[ i ], oneIDCluster ); // set the used id cluster to replace the old one. } Object[] selectedIdTemp = selectedId.toArray(); int[] selectedIdArray = new int[ selectedIdTemp.length ]; for ( int i = 0; i < selectedIdArray.length; i ++ ) { selectedIdArray[ i ] = ( ( Integer ) selectedIdTemp[ i ] ).intValue(); } doIDArraySorting( selectedIdArray ); for ( int i = 0; i < selectedIdArray.length; i ++ ) { //System.out.println( selectedIdArray[ i ] ); smallX.add( xInput.get( new Integer( selectedIdArray[ i ] ) ) ); smallY.add( yInput.get( new Integer( selectedIdArray[ i ] ) ) ); } double[][] tempX = new double[ smallX.size() ][]; double[][] tempY = new double[ smallY.size() ][]; Object[] tempAX = smallX.toArray(); Object[] tempAY = smallY.toArray(); for ( int i = 0; i < tempX.length; i ++ ) { tempX[ i ] = (double[]) tempAX[ i ]; tempY[ i ] = (double[]) tempAY[ i ]; } Object[] temp = new Object[ 2 ]; temp[ 0 ] = tempX; temp[ 1 ] = tempY; System.out.println( size ); System.out.println( "Small Amount Data Size: " + smallX.size() ); System.out.println( "Small Amount Data Size: " + smallY.size() ); // release memory for processing // the part data will be reconstruct when more small amount data reselection required xInput = null; yInput = null; setInUsingIDCluster( null ); setPartTrainingInput( null ); setPartTrainingOutput( null ); System.gc(); return temp; } /** * return whether nodata value exists in this pair of x, y for just one pixel * if does have no data value, return true **/ public Hashtable getIDCluster() { return this.idCluster; } public void setIDCluster( Hashtable idCluster ) { this.idCluster = idCluster; } public int[] getTrainingDataID() { return this.trainingDataID; } /** * return pixel id in the grid with the index in the partInput and output double[ index ][] * if beyond the index boundary , return -1 **/ public int getTrainingDataID( int index ) { int[] dataID = getTrainingDataID(); if ( ( index < dataID.length ) || ( index > 0 ) ) { return dataID[ index ]; } else { return -1; } } public void setTrainingDataID( int[] trainingDataID ) { this.trainingDataID = trainingDataID; } public void setTrainingDataID( int index, int id ) { int[] dataID = getTrainingDataID(); if ( ( index < dataID.length ) || ( index > 0 ) ) { dataID[ index ] = id; } setTrainingDataID( dataID ); } private void setXFile( File[] file ){ for ( int i = 0; i < file.length; i ++ ) { this.xFile.put( new Integer( i ), file[i] ); } } private void setYFile( File[] file ) { for ( int i = 0; i < file.length; i ++ ) { this.yFile.put( new Integer( i ), file[i] ); } } public AbstractGrid2DSquareCellDouble[] getGrid(){ return this.grids; } public void setGrid(AbstractGrid2DSquareCellDouble[] grids){ this.grids = grids; } public static AbstractGrid2DSquareCellDouble[] readDataForPrediction( File[] inputDataFile, File nnParameterFile ) { if ( ( inputDataFile == null ) || ( nnParameterFile == null ) ) { System.out.println( " set file directory please " ); return null; } try { NeuroNetwork nn = NeuroNetwork.load( nnParameterFile ); if( nn.getInputNumber() != inputDataFile.length ) { System.out.println( "Input number does not match:" + "InputNumber in Neural Network is " + nn.getInputNumber() + " but now inputis " + inputDataFile.length ); return null; } } catch ( Exception e ) { e.printStackTrace(); } Grid2DSquareCellDoubleFactory f; Grid2DSquareCellDoubleFileFactory ff; AbstractGrid2DSquareCellDouble[] inputGrids = new AbstractGrid2DSquareCellDouble[ inputDataFile.length ]; for ( int i = 0; i < inputDataFile.length; i ++ ) { //Utilities u = new Utilities(); // initialising an empty fireGrid with the same extent of the input Grids try{ System.out.println( "trying to initialising inputGrid in memory..." ); f = new Grid2DSquareCellDoubleFactory(); inputGrids[ i ] = f.createGrid2DSquareCellDouble( inputDataFile[ i ] ); inputGrids[ i ].setNoDataValue( -9999.0 ); System.out.println( i + "...done" ); } catch ( java.lang.OutOfMemoryError e ) { System.out.println( e + " initialising inputGrid as file..." ); ff = new Grid2DSquareCellDoubleFileFactory(); ff.setFile( Utilities.makeTemporaryFile( System.getProperty( "java.io.tmpdir" ) , Integer.toString( i ) ) ); inputGrids[ i ] = ff.createGrid2DSquareCellDouble( inputDataFile[ i ] ); inputGrids[ i ].setNoDataValue( -9999.0 ); System.out.println( i + "...done" ); } } return inputGrids; } public static Object[] constructTestingXAndY(){ double[][] x = new double[100][1]; double[][] y = new double[100][2]; for ( int i = 0; i < x.length; i ++ ) { x[ i ][ 0 ] = ( double ) i / ( double ) x.length ; //this.partX[ i ][ 1 ] = ( Math.cos( 2.0d * Math.PI * ( ( double ) i / ( double ) this.partY.length ) ) + 1.0d ) / 2.0d; y[ i ][ 0 ] = ( Math.cos( 2.0d * Math.PI * ( ( double ) i / ( double ) x.length ) ) + 1.0d ) / 2.0d * ( Math.sin( 2 * Math.PI * ( ( double ) i / ( double ) x.length ) ) + 1.0d ) / 2.0d; y[ i ][ 1 ] = ( Math.sin( 2.0d * Math.PI * ( ( double ) i / ( double ) x.length ) ) + 1.0d ) / 2.0d; //this.partY[ i ] = ( double ) i / ( double ) partY.length ; } return new Object[] {x,y}; } public double[][] getPartInput(){ return this.partX; } public double[][] getPartOutput(){ return this.partY; } public void setPartInput(double[][] newPartInput) { this.partX = newPartInput; } public void setPartOutput(double[][] newPartOutput) { this.partY = newPartOutput; } }