NOTICE: This version of the NSF Unidata web site (archive.unidata.ucar.edu) is no longer being updated.
Current content can be found at unidata.ucar.edu.

To learn about what's going on, see About the Archive Site.

[netcdf-java] Aggregate netcdf files programmatically

Hi all,

I've n files containing Wave's height data. Each file contain wave data at a specific time. I would to join the files to obtain a single file with a forecast in a range of dates.
So for example,
file_1 contains wave height forecast for 19/03/2013 at 00:00
file_2 contains wave height forecast for 19/03/2013 at 06:00
file_3 contains wave height forecast for 19/03/2013 at 12:00
...
file_n-1 contains wave height forecast for 21/03/2013 at 00:00
file_n contains wave height forecast for 21/03/2013 at 06:00

I want to obtain a single file with wave height forecast in the range 19/03/2013 at 00:00 - 21/03/2013 at 06:00

I searched in the old posts and on google and I found this Class Utility:


public class AggregationUtil {

public static NetcdfDataset joinExisting(String aggDimName, List<NetcdfFile> ncFiles) throws IOException {
        NetcdfDataset aggDataset = new NetcdfDataset();

AggregationExistingOpenDataset joinExisting = new AggregationExistingOpenDataset(aggDataset, aggDimName);
        for (NetcdfFile ncFile : ncFiles) {
            joinExisting.addOpenNcFile(ncFile);
        }

        joinExisting.finish(null);
aggDataset.setAggregation(joinExisting); // Makes sure that ncFiles get closed when aggDataset gets closed.
        aggDataset.finish();
        // No enhancement done here!

        return aggDataset;
    }

public static class AggregationExistingOpenDataset extends AggregationExisting { public AggregationExistingOpenDataset(NetcdfDataset joinExistingDataset, String aggDimName) {
            super(joinExistingDataset, aggDimName, null);
        }

        public void addOpenNcFile(NetcdfFile openNcFile) {
            addDataset(new ExistingOpenDataset(openNcFile));
        }

        @Override protected void closeDatasets() throws IOException {
            for (Dataset dataset : getDatasets()) {
                ExistingOpenDataset eod = (ExistingOpenDataset) dataset;
                eod.openNcFile.close();
            }
        }


// The class being extended is *package private*. That's the reason that AggregationUtil must be in
        // ucar.nc2.ncml in the first place.
public class ExistingOpenDataset extends AggregationOuterDimension.DatasetOuterDimension {
            private NetcdfFile openNcFile;

            public ExistingOpenDataset(NetcdfFile openNcFile) {
                super(new MFileOS(new File(openNcFile.getLocation())));
                //super(openNcFile.getLocation());
this.cacheLocation = openNcFile.getLocation(); // Silences a warning in Aggregation.makeDataset().
                this.openNcFile = openNcFile;
            }

@Override public NetcdfFile acquireFile(CancelTask cancelTask) throws IOException {
                return openNcFile;
            }

@Override protected void close(NetcdfFile ncfile) throws IOException {
                // DON'T ACTUALLY CLOSE ncfile HERE!
// We're going to let it stay open until the user invokes Aggregation.close() (which, in turn, calls
                // AggregationExistingOpenDataset.closeDatasets()).
            }
        }
    }
}



and to test I tryed this:

public class Test {

    static public void main(String args[]) {

        // Test Merge
         try {
            joinExisting();
        } catch (Exception e) {
            e.printStackTrace();
        }


    }


public static void joinExisting() throws IOException, InvalidRangeException { File janFile = new File("C:/Users/Test/Desktop/union/US058GOCN-GR1mdl.0110_0240_00000F0RL2013031800_0001_000000-000000sig_wav_ht"); File febFile = new File("C:/Users/Test/Desktop/union/US058GOCN-GR1mdl.0110_0240_09000F0RL2013031800_0001_000000-000000sig_wav_ht");
        NetcdfFile janNcFile = NetcdfFile.open(janFile.getAbsolutePath());
        NetcdfFile febNcFile = NetcdfFile.open(febFile.getAbsolutePath());

NetcdfDataset joinExistingDataset = AggregationUtil.joinExisting("time", Arrays.asList(janNcFile, febNcFile));
        try {
            System.out.println(joinExistingDataset);
System.out.println("Time levels:" + joinExistingDataset.readSection("time"));



             GridDataset gds = new GridDataset(joinExistingDataset);
             NetcdfCFWriter writer = new NetcdfCFWriter();

             List<String> wantedVars = new ArrayList<String>();
             for (VariableSimpleIF var : gds.getDataVariables()) {
                System.out.println("Variabile:" + var.getFullName());
                wantedVars.add(var.getFullName());
            }
             try {
                 System.out.println("gds=" + gds);
writer.makeFile("C:/Users/Crisma/Desktop/union/out", gds, wantedVars, gds.getBoundingBox(), 1, null, null, 1, false, NetcdfFileWriter.Version.netcdf3 );
             } catch (Exception e) {
                 e.printStackTrace();
             }



        } finally {
            joinExistingDataset.close();    // Closes janFile and febFile.
        }
    }
}


If I run this I obtain:

netcdf null {
 dimensions:
   lon = 360;
   lat = 181;
   time = 2;
 variables:
   float lat(lat=181);
     :units = "degrees_north";
   float lon(lon=360);
     :units = "degrees_east";
   int time(time=2);
     :units = "Hour since 2013-03-18T00:00:00Z";
     :standard_name = "time";
:long_name = "Uninitialized analysis / image product / forecast product valid for RT + P1";
   float sig_wav_ht_surface(time=2, lat=181, lon=360);
:long_name = "ocean wave forecasting A fictitious wave with a height and period equal to the average height of the highest third of the actual waves that pass a fixed point @ Ground or water surface";
     :units = "m";
     :missing_value = NaNf; // float
     :grid_mapping = "LatLon_Projection";
     :Grib_Variable_Id = "VAR_58-0-3-100_L1";
     :Grib1_Center = 58; // int
     :Grib1_Subcenter = 0; // int
     :Grib1_TableVersion = 3; // int
     :Grib1_Parameter = 100; // int
     :Grib1_Parameter_Name = "sig_wav_ht";
     :Grib1_Level_Type = 1; // int

:Originating_or_generating_Center = "Fleet Numerical Meteorology and Oceanography Center, Monterey, CA, USA";
 :Originating_or_generating_Subcenter = "0";
 :Generating_process_or_model = "Wave Watch 3rd Revision Global";
 :Conventions = "CF-1.6";
 :history = "Read using CDM IOSP Grib1Collection";
 :featureType = "GRID";
 :file_format = "GRIB-1";
}

Time levels:0 90
Variabile:sig_wav_ht_surface
gds=ucar.nc2.dt.grid.GridDataset@177f409c

java.io.IOException: spi is null, perhaps file has been closed. Trying to read variable sig_wav_ht_surface
    at ucar.nc2.NetcdfFile.readData(NetcdfFile.java:1939)
    at ucar.nc2.Variable.reallyRead(Variable.java:859)
    at ucar.nc2.Variable._read(Variable.java:831)
    at ucar.nc2.Variable.read(Variable.java:709)
    at ucar.nc2.ncml.Aggregation$Dataset.read(Aggregation.java:683)
at ucar.nc2.ncml.AggregationOuterDimension.reallyRead(AggregationOuterDimension.java:374)
    at ucar.nc2.dataset.VariableDS._read(VariableDS.java:502)
    at ucar.nc2.Variable.read(Variable.java:709)
    at ucar.nc2.FileWriter.copyAll(FileWriter.java:325)
    at ucar.nc2.FileWriter.copyVarData(FileWriter.java:286)
    at ucar.nc2.FileWriter.writeToFile(FileWriter.java:241)
    at ucar.nc2.FileWriter.writeToFile(FileWriter.java:103)
    at Test.joinExisting(Test.java:115)
    at Test.main(Test.java:57)

Looking to the output seems that the join was ok (I've 2 time levels) but when i try to write the result in a file I obtain an exception.

Does Have someone some suggestions?

Best regards
Giovanni




  • 2013 messages navigation, sorted by:
    1. Thread
    2. Subject
    3. Author
    4. Date
    5. ↑ Table Of Contents
  • Search the netcdf-java archives: