diff --git a/src/bufr/BufrParser/Exports/Variables/AircraftAltitudeVariable.cpp b/src/bufr/BufrParser/Exports/Variables/AircraftAltitudeVariable.cpp index 06849a24d..5b8a70429 100644 --- a/src/bufr/BufrParser/Exports/Variables/AircraftAltitudeVariable.cpp +++ b/src/bufr/BufrParser/Exports/Variables/AircraftAltitudeVariable.cpp @@ -88,7 +88,6 @@ namespace Ingester for (size_t idx = 0; idx < referenceObj->size(); idx++) { - std::cout << "idx = " << idx << std::endl; for (auto nameIt = includedFields.rbegin(); nameIt != includedFields.rend(); ++nameIt) { const auto& fieldName = *nameIt; @@ -102,14 +101,12 @@ namespace Ingester { aircraftAlts[idx] = 11000.0f - (std::log1p(value / 22630.0f) / 0.0001576106f); - std::cout << "section 1" << std::endl; } else { aircraftAlts[idx] = (1.0f - powf((value / 101325.0f), (1.0f / 5.256f))) * (288.15f / 0.0065f); - std::cout << "section 2" << std::endl; } } else if (includedFieldMap.find(ConfKeys::AircraftIndicatedAltitude) @@ -119,7 +116,6 @@ namespace Ingester { aircraftAlts[idx] = includedFieldMap[ConfKeys::AircraftIndicatedAltitude]->getAsFloat(idx); - std::cout << "section 3" << std::endl; } } } @@ -127,16 +123,13 @@ namespace Ingester { // This variable is only used in conjunction with pressure. continue; - std::cout << "section 4" << std::endl; } else if (!fieldValues->isMissing(idx)) { std::cout << " fieldValues = " << fieldValues << std::endl; aircraftAlts[idx] = fieldValues->getAsFloat(idx); - std::cout << "section 5" << std::endl; } } - std::cout << "AA = " << aircraftAlts[idx] << std::endl; } return std::make_shared>(aircraftAlts, diff --git a/src/compo/aeronet_aaod2ioda.py b/src/compo/aeronet_aaod2ioda.py index 0dd9b3bf9..6261362fe 100755 --- a/src/compo/aeronet_aaod2ioda.py +++ b/src/compo/aeronet_aaod2ioda.py @@ -19,6 +19,7 @@ # -o: output IODA file import numpy as np +import netCDF4 as nc import inspect, sys, os, argparse import pandas as pd from datetime import datetime, timedelta @@ -123,6 +124,8 @@ def add_data(infile): print(aeronetinv_chan) print(frequency) + long_missing_value = nc.default_fillvals['i8'] + nlocs, columns = f3.shape nchans = len(aeronetinv_chan) if nlocs == 0: @@ -134,93 +137,123 @@ def add_data(infile): outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - obsvars = {'aerosol_optical_depth': ['aod_coincident_input[440nm]', 'aod_coincident_input[675nm]', - 'aod_coincident_input[870nm]', 'aod_coincident_input[1020nm]'], - 'absorption_aerosol_optical_depth': ['absorption_aod[440nm]', 'absorption_aod[675nm]', - 'absorption_aod[870nm]', 'absorption_aod[1020nm]']} + obsvars = {'aerosolOpticalDepth': ['aod_coincident_input[440nm]', 'aod_coincident_input[675nm]', + 'aod_coincident_input[870nm]', 'aod_coincident_input[1020nm]'], + 'absorptionAerosolOpticalDepth': ['absorption_aod[440nm]', 'absorption_aod[675nm]', + 'absorption_aod[870nm]', 'absorption_aod[1020nm]']} - AttrData = { - 'converter': os.path.basename(__file__) - } + # A dictionary of global attributes. More filled in further down. + AttrData = {} + AttrData['ioda_object_type'] = 'absorptionAOD' + AttrData['sensor'] = 'aeronet' - DimDict = { - } + # A dictionary of variable dimensions. + DimDict = {} + # A dictionary of variable names and their dimensions. VarDims = { - 'aerosol_optical_depth': ['nlocs', 'nchans'], - 'absorption_aerosol_optical_depth': ['nlocs', 'nchans'], - 'frequency': ['nchans'], - 'sensor_channel': ['nchans'] + 'aerosolOpticalDepth': ['Location', 'Channel'], + 'absorptionAerosolOpticalDepth': ['Location', 'Channel'], + 'sensorCentralFrequency': ['Channel'], + 'sensorChannelNumber': ['Channel'] } + # Get the group names we use the most. + metaDataName = iconv.MetaDataName() + obsValName = iconv.OvalName() + obsErrName = iconv.OerrName() + qcName = iconv.OqcName() + # Define varDict variables for key, value in obsvars.items(): - varDict[key]['valKey'] = key, iconv.OvalName() - varAttrs[key, iconv.OvalName()]['_FillValue'] = -999. - varAttrs[key, iconv.OvalName()]['coordinates'] = 'longitude latitude station_elevation' - varAttrs[key, iconv.OvalName()]['units'] = '1' - varDict[key]['errKey'] = key, iconv.OerrName() - varAttrs[key, iconv.OerrName()]['_FillValue'] = -999. - varAttrs[key, iconv.OerrName()]['coordinates'] = 'longitude latitude station_elevation' - varAttrs[key, iconv.OerrName()]['units'] = '1' - varDict[key]['qcKey'] = key, iconv.OqcName() - varAttrs[key, iconv.OqcName()]['_FillValue'] = -999 - varAttrs[key, iconv.OqcName()]['coordinates'] = 'longitude latitude station_elevation' - varAttrs[key, iconv.OqcName()]['units'] = 'unitless' + varDict[key]['valKey'] = key, obsValName + varDict[key]['errKey'] = key, obsErrName + varDict[key]['qcKey'] = key, qcName + varAttrs[key, obsValName]['coordinates'] = 'longitude latitude stationElevation' + varAttrs[key, obsErrName]['coordinates'] = 'longitude latitude stationElevation' + varAttrs[key, qcName]['coordinates'] = 'longitude latitude stationElevation' + varAttrs[key, obsValName]['_FillValue'] = -9999. + varAttrs[key, obsErrName]['_FillValue'] = -9999. + varAttrs[key, qcName]['_FillValue'] = -9999 + varAttrs[key, obsValName]['units'] = '1' + varAttrs[key, obsErrName]['units'] = '1' for key, value in obsvars.items(): - outdata[varDict[key]['valKey']] = np.array(np.float32(f3[value].fillna(np.float32(-999.)))) - outdata[varDict[key]['qcKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-999.), + outdata[varDict[key]['valKey']] = np.array(np.float32(f3[value].fillna(np.float32(-9999.)))) + outdata[varDict[key]['qcKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-9999.), 1, 0) - if key in ["aerosol_optical_depth"]: - outdata[varDict[key]['errKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-999.), - np.float32(-999.), np.float32(0.02)) + if key in ["aerosolOpticalDepth"]: + outdata[varDict[key]['errKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-9999.), + np.float32(-9999.), np.float32(0.02)) else: - outdata[varDict[key]['errKey']] = np.full((nlocs, nchans), np.float32(-999.)) + outdata[varDict[key]['errKey']] = np.full((nlocs, nchans), np.float32(-9999.)) - outdata[('latitude', 'MetaData')] = np.array(np.float32(f3['latitude'])) - outdata[('longitude', 'MetaData')] = np.array(np.float32(f3['longitude'])) - outdata[('station_elevation', 'MetaData')] = np.array(np.float32(f3['elevation'])) - varAttrs[('station_elevation', 'MetaData')]['units'] = 'm' - outdata[('surface_type', 'MetaData')] = np.full((nlocs), 1) - varAttrs[('surface_type', 'MetaData')]['units'] = 'unitless' - - # Whether aaod reaches Level 2.0 without the threshold of aod440 >= 0.4 (0: yes, 1: no) - outdata[('aaod_l2_qc_without_aod440_le_0.4_threshold', 'MetaData')] = np.where(f3['if_retrieval_is_l2(without_l2_0.4_aod_440_threshold)'] == 1, 0, 1) - varAttrs[('aaod_l2_qc_without_aod440_le_0.4_threshold', 'MetaData')]['units'] = 'unitless' + outdata[('latitude', metaDataName)] = np.array(np.float32(f3['latitude'])) + outdata[('longitude', metaDataName)] = np.array(np.float32(f3['longitude'])) + outdata[('stationElevation', metaDataName)] = np.array(np.float32(f3['elevation'])) + varAttrs[('stationElevation', metaDataName)]['units'] = 'm' # Whether Coincident_AOD440nm in aeronet_cad.txt reaches Level 2.0 (0: yes, 1: no) - outdata[('aod_l2_qc', 'MetaData')] = np.where(f3['if_aod_is_l2'] == 1, 0, 1) - varAttrs[('aod_l2_qc', 'MetaData')]['units'] = 'unitless' + qcL2Aod = np.where(f3['if_aod_is_l2'] == 1, 0, 1) # aaod inversion type: 0 for ALM20 and 1 for ALM15 - outdata[('aaod_l2_qc', 'MetaData')] = np.where(f3['inversion_data_quality_level'] == 'lev20', 0, 1) - varAttrs[('aaod_l2_qc', 'MetaData')]['units'] = 'unitless' + qcL2Aaod = np.where(f3['inversion_data_quality_level'] == 'lev20', 0, 1) + # Whether aaod reaches Level 2.0 without the threshold of aod440 >= 0.4 (0: yes, 1: no) + qcL2Aaod2 = np.where(f3['if_retrieval_is_l2(without_l2_0.4_aod_440_threshold)'] == 1, 0, 1) + + qcAll = np.full(len(qcL2Aod), long_missing_value, dtype=np.int32) + for i in range(len(qcL2Aod)): + qc1 = qcL2Aod[i] + qc2 = qcL2Aaod[i] + qc3 = qcL2Aaod2[i] + if qc1 == 0 and qc2 == 0 and qc3 == 0: + # Both AOD and AAOD w/and w/o aod440>0.4 threshold reach L2. + qcAll[i] = 0 + elif qc1 == 0 and qc2 == 0 and qc3 == 1: + # Both AOD and AAOD w/ aod440>0.4 threshold reaches L2, + # but AAOD w/o aod440>0.4 threshold does not. + qcAll[i] = 1 + elif qc1 == 0 and qc2 == 1 and qc3 == 0: + # Both AOD and AAOD w/o aod440>0.4 threshold reaches L2, + # but AAOD w/ aod440>0.4 threshold does not. + qcAll[i] = 2 + elif qc1 == 0 and qc2 == 1 and qc3 == 1: + # Only AOD reaches L2. + qcAll[i] = 3 + elif qc1 == 1 and qc2 == 0 and qc3 == 0: + # AAOD w/ and w/o aod440>0.4 threshold reaches L2, + # but AOD does not. + qcAll[i] = 4 + elif qc1 == 1 and qc2 == 0 and qc3 == 1: + # AAOD w/ aod440>0.4 threshold reaches L2, + # but AOD and AAOD w/o aod440>0.4 threshold do not. + qcAll[i] = 5 + elif qc1 == 1 and qc2 == 1 and qc3 == 0: + # AAOD w/o aod440>0.4 threshold reaches L2, + # but AOD and AAOD w/ aod440>0.4 threshold do not. + qcAll[i] = 6 + elif qc1 == 1 and qc2 == 1 and qc3 == 1: + # Neither AOD or AAOD w/ and w/o aod440>0.4 threshold reaches L2. + qcAll[i] = 7 + + outdata[('qualityFlags', metaDataName)] = qcAll c = np.empty([nlocs], dtype=object) c[:] = np.array(f3.siteid) - outdata[('station_id', 'MetaData')] = c - varAttrs[('station_id', 'MetaData')]['units'] = '' + outdata[('stationIdentification', metaDataName)] = c d = np.empty([nlocs], dtype=object) for i in range(nlocs): d[i] = f3.time[i].strftime('%Y-%m-%dT%H:%M:%SZ') - outdata[('datetime', 'MetaData')] = d - varAttrs[('datetime', 'MetaData')]['units'] = '' + outdata[('dateTime', metaDataName)] = d - outdata[('frequency', 'MetaData')] = np.float32(frequency) - varAttrs[('frequency', 'MetaData')]['units'] = 'Hz' - outdata[('sensor_channel', 'MetaData')] = np.int32(aeronetinv_chan) - varAttrs[('sensor_channel', 'MetaData')]['units'] = 'unitless' + outdata[('sensorCentralFrequency', metaDataName)] = np.float32(frequency) + varAttrs[('sensorCentralFrequency', metaDataName)]['units'] = 'Hz' + outdata[('sensorChannelNumber', metaDataName)] = np.int32(aeronetinv_chan) # Add global atrributes - DimDict['nlocs'] = nlocs - DimDict['nchans'] = aeronetinv_chan - AttrData['nlocs'] = np.int32(DimDict['nlocs']) - AttrData['nchans'] = np.int32(nchans) - AttrData['observation_type'] = 'AAOD' - AttrData['sensor'] = 'aeronet' - AttrData['surface_type'] = 'ocean=0,land=1,costal=2' + DimDict['Location'] = nlocs + DimDict['Channel'] = aeronetinv_chan # Setup the IODA writer writer = iconv.IodaWriter(outfile, locationKeyList, DimDict) diff --git a/src/compo/aeronet_aod2ioda.py b/src/compo/aeronet_aod2ioda.py index dfc0a6bc3..5749a896f 100644 --- a/src/compo/aeronet_aod2ioda.py +++ b/src/compo/aeronet_aod2ioda.py @@ -103,82 +103,79 @@ def add_data(infile): print('Zero AERONET AOD is available in file: ' + infile + ' and exit.') exit(0) - locationKeyList = [("latitude", "float"), ("longitude", "float"), ("datetime", "string")] + locationKeyList = [("latitude", "float"), ("longitude", "float"), ("dateTime", "string")] varDict = defaultdict(lambda: defaultdict(dict)) outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - obsvars = {'aerosol_optical_depth': ['aod_340nm', 'aod_380nm', - 'aod_440nm', 'aod_675nm', - 'aod_500nm', 'aod_870nm', - 'aod_1020nm', 'aod_1640nm']} + obsvars = {'aerosolOpticalDepth': ['aod_340nm', 'aod_380nm', + 'aod_440nm', 'aod_675nm', + 'aod_500nm', 'aod_870nm', + 'aod_1020nm', 'aod_1640nm']} - AttrData = { - 'converter': os.path.basename(__file__), - } + # A dictionary of global attributes. More filled in further down. + AttrData = {} + AttrData['ioda_object_type'] = 'AOD' + AttrData['sensor'] = 'aeronet' - DimDict = { - } + # A dictionary of variable dimensions. + DimDict = {} + # A dictionary of variable names and their dimensions. VarDims = { - 'aerosol_optical_depth': ['nlocs', 'nchans'], - 'frequency': ['nchans'], - 'sensor_channel': ['nchans'] + 'aerosolOpticalDepth': ["Location", "Channel"], + 'sensorCentralFrequency': ['Channel'], + 'sensorChannelNumber': ['Channel'] } + # Get the group names we use the most. + metaDataName = iconv.MetaDataName() + obsValName = iconv.OvalName() + obsErrName = iconv.OerrName() + qcName = iconv.OqcName() + for key, value in obsvars.items(): - varDict[key]['valKey'] = key, iconv.OvalName() - varAttrs[key, iconv.OvalName()]['_FillValue'] = -999. - varAttrs[key, iconv.OvalName()]['coordinates'] = 'longitude latitude station_elevation' - varAttrs[key, iconv.OvalName()]['units'] = '1' - varDict[key]['errKey'] = key, iconv.OerrName() - varAttrs[key, iconv.OerrName()]['_FillValue'] = -999. - varAttrs[key, iconv.OerrName()]['units'] = '1' - varAttrs[key, iconv.OerrName()]['coordinates'] = 'longitude latitude station_elevation' - varDict[key]['qcKey'] = key, iconv.OqcName() - varAttrs[key, iconv.OqcName()]['_FillValue'] = -999 - varAttrs[key, iconv.OqcName()]['coordinates'] = 'longitude latitude station_elevation' - varAttrs[key, iconv.OqcName()]['units'] = 'unitless' + varDict[key]['valKey'] = key, obsValName + varDict[key]['errKey'] = key, obsErrName + varDict[key]['qcKey'] = key, qcName + varAttrs[key, obsValName]['coordinates'] = 'longitude latitude stationElevation' + varAttrs[key, obsErrName]['coordinates'] = 'longitude latitude stationElevation' + varAttrs[key, qcName]['coordinates'] = 'longitude latitude stationElevation' + varAttrs[key, obsValName]['_FillValue'] = -9999. + varAttrs[key, obsErrName]['_FillValue'] = -9999. + varAttrs[key, qcName]['_FillValue'] = -9999 + varAttrs[key, obsValName]['units'] = '1' + varAttrs[key, obsErrName]['units'] = '1' for key, value in obsvars.items(): - outdata[varDict[key]['valKey']] = np.array(np.float32(f3[value].fillna(np.float32(-999.)))) - outdata[varDict[key]['qcKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-999.), + outdata[varDict[key]['valKey']] = np.array(np.float32(f3[value].fillna(np.float32(-9999.)))) + outdata[varDict[key]['qcKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-9999.), 1, 0) - outdata[varDict[key]['errKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-999.), - np.float32(-999.), np.float32(0.02)) + outdata[varDict[key]['errKey']] = np.where(outdata[varDict[key]['valKey']] == np.float32(-9999.), + np.float32(-9999.), np.float32(0.02)) # Add metadata variables - outdata[('latitude', 'MetaData')] = np.array(np.float32(f3['latitude'])) - outdata[('longitude', 'MetaData')] = np.array(np.float32(f3['longitude'])) - outdata[('station_elevation', 'MetaData')] = np.array(np.float32(f3['elevation'])) - varAttrs[('station_elevation', 'MetaData')]['units'] = 'm' - outdata[('surface_type', 'MetaData')] = np.full((nlocs), 1) - varAttrs[('surface_type', 'MetaData')]['units'] = 'unitless' + outdata[('latitude', metaDataName)] = np.array(np.float32(f3['latitude'])) + outdata[('longitude', metaDataName)] = np.array(np.float32(f3['longitude'])) + outdata[('stationElevation', metaDataName)] = np.array(np.float32(f3['elevation'])) + varAttrs[('stationElevation', metaDataName)]['units'] = 'm' c = np.empty([nlocs], dtype=object) c[:] = np.array(f3.siteid) - outdata[('station_id', 'MetaData')] = c - varAttrs[('station_id', 'MetaData')]['units'] = '' + outdata[('stationIdentification', metaDataName)] = c d = np.empty([nlocs], dtype=object) for i in range(nlocs): d[i] = f3.time[i].strftime('%Y-%m-%dT%H:%M:%SZ') - outdata[('datetime', 'MetaData')] = d - varAttrs[('datetime', 'MetaData')]['units'] = '' + outdata[('dateTime', metaDataName)] = d - outdata[('frequency', 'MetaData')] = np.float32(frequency) - varAttrs[('frequency', 'MetaData')]['units'] = 'Hz' - outdata[('sensor_channel', 'MetaData')] = np.int32(aod_chan) - varAttrs[('sensor_channel', 'MetaData')]['units'] = 'unitless' + outdata[('sensorCentralFrequency', metaDataName)] = np.float32(frequency) + varAttrs[('sensorCentralFrequency', metaDataName)]['units'] = 'Hz' + outdata[('sensorChannelNumber', metaDataName)] = np.int32(aod_chan) # Add global atrributes - DimDict['nlocs'] = nlocs - DimDict['nchans'] = aod_chan - AttrData['nlocs'] = np.int32(DimDict['nlocs']) - AttrData['nchans'] = np.int32(nchans) - AttrData['observation_type'] = 'AOD' - AttrData['sensor'] = 'aeronet' - AttrData['surface_type'] = 'ocean=0,land=1,costal=2' + DimDict['Location'] = nlocs + DimDict['Channel'] = aod_chan # Setup the IODA writer writer = iconv.IodaWriter(outfile, locationKeyList, DimDict) diff --git a/src/compo/airnow2ioda-nc.py b/src/compo/airnow2ioda-nc.py index c149c315f..d2c7766b2 100755 --- a/src/compo/airnow2ioda-nc.py +++ b/src/compo/airnow2ioda-nc.py @@ -1,20 +1,66 @@ #!/usr/bin/env python3 -# read airnow data and convert to netcdf +# Read airnow text data file and convert to IODA netcdf +import os, sys +from datetime import datetime +from pathlib import Path import netCDF4 as nc import numpy as np -import inspect, os, sys, argparse import pandas as pd -from datetime import datetime -from pathlib import Path IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" if not IODA_CONV_PATH.is_dir(): IODA_CONV_PATH = Path(__file__).parent/'..'/'lib-python' sys.path.append(str(IODA_CONV_PATH.resolve())) -import meteo_utils -import ioda_conv_ncio as iconv + from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict +import ioda_conv_engines as iconv + +os.environ["TZ"] = "UTC" + +# Dictionary of output variables (ObsVal, ObsError, and PreQC). +# First is the incoming variable name followed by list of IODA outgoing name and units. + +varDict = {'PM2.5': ['particulatematter2p5Surface', 'mg m-3'], + 'OZONE': ['ozoneSurface', 'ppmV']} + +locationKeyList = [("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z"), + ("stationElevation", "float", "m"), + ("height", "float", "m"), + ("stationIdentification", "string", "")] +meta_keys = [m_item[0] for m_item in locationKeyList] + +GlobalAttrs = {'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'description': 'AIRNow data (converted from text/csv to IODA', + 'source': 'Unknown (ftp)'} + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} def read_monitor_file(sitefile=None): @@ -86,6 +132,8 @@ def add_data(infile, sitefile): if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser( description=( 'Reads single AIRNow text file ' @@ -113,46 +161,76 @@ def add_data(infile, sitefile): f3 = f.dropna(subset=['PM2.5'], how='any').reset_index() nlocs, columns = f3.shape - obsvars = {'pm25': 'pm25', 'o3': 'o3', } - AttrData = {'converter': os.path.basename(__file__), } - - locationKeyList = [("latitude", "float"), ("longitude", "float"), - ("station_elevation", "float"), ("height", "float"), ("station_id", "string"), - ("datetime", "string")] - - writer = iconv.NcWriter(args.output, locationKeyList) - - varDict = defaultdict(lambda: defaultdict(dict)) - outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - loc_mdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - var_mdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - units = {} - units['pm25'] = 'microgram/m3' - units['o3'] = 'ppmV' - - for i in ['pm25', 'o3']: - varDict[i]['valKey'] = i, writer.OvalName() - varDict[i]['errKey'] = i, writer.OerrName() - varDict[i]['qcKey'] = i, writer.OqcName() - - d = np.empty([nlocs], 'S20') - d[:] = np.array(f3.time[1].strftime('%Y-%m-%dT%H:%M:%SZ')) - loc_mdata['datetime'] = writer.FillNcVector(d, 'datetime') - loc_mdata['latitude'] = np.array(f3['latitude']) - loc_mdata['longitude'] = np.array(f3['longitude']) - loc_mdata['height'] = np.full((nlocs), 10.) - loc_mdata['station_elevation'] = np.array(f3['elevation']) - - c = np.empty([nlocs], dtype='S20') - c[:] = np.array(f3.siteid) - loc_mdata['station_id'] = writer.FillNcVector(c, 'string') - - outdata[varDict['pm25']['valKey']] = np.array(f3['PM2.5'].fillna(nc.default_fillvals['f4'])) - outdata[varDict['o3']['valKey']] = np.array((f3['OZONE']/1000).fillna(nc.default_fillvals['f4'])) - for i in ['pm25', 'o3']: - outdata[varDict[i]['errKey']] = np.full((nlocs), 0.1) - outdata[varDict[i]['qcKey']] = np.full((nlocs), 0) - - writer._nvars = 2 - writer._nlocs = nlocs - writer.BuildNetcdf(outdata, loc_mdata, var_mdata, AttrData, units) + dt = f3.time[1].to_pydatetime() + time_offset = round((dt - epoch).total_seconds()) + + ioda_data = {} # The final outputs. + data = {} # Before assigning the output types into the above. + for key in varDict.keys(): + data[key] = [] + for key in meta_keys: + data[key] = [] + + # Fill the temporary data arrays from input file column data + data['stationIdentification'] = np.full(nlocs, f3.siteid, dtype='S20') + data['dateTime'] = np.full(nlocs, np.int64(time_offset)) + data['latitude'] = np.array(f3['latitude']) + data['longitude'] = np.array(f3['longitude']) + data['stationElevation'] = np.array(f3['elevation']) + data['height'] = np.array(f3['elevation']) + for n in range(nlocs): + data['height'][n] = data['height'][n] + 10.0 # 10 meters above stationElevation + + for n, key in enumerate(varDict.keys()): + if n == 0: + key1 = key + var1 = varDict[key][0] + elif n == 1: + key2 = key + var2 = varDict[key][0] + + data[var1] = np.array(f3[key1].fillna(float_missing_value)) + data[var2] = np.array((f3[key2]/1000).fillna(float_missing_value)) + + DimDict = {'Location': nlocs} + + varDims = {} + for key in varDict.keys(): + variable = varDict[key][0] + varDims[variable] = ['Location'] + + # Set units of the MetaData variables and all _FillValues. + varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + + # Set units and FillValue attributes for groups associated with observed variable. + for key in varDict.keys(): + variable = varDict[key][0] + units = varDict[key][1] + varAttrs[(variable, obsValName)]['units'] = units + varAttrs[(variable, obsErrName)]['units'] = units + varAttrs[(variable, obsValName)]['coordinates'] = 'longitude latitude' + varAttrs[(variable, obsErrName)]['coordinates'] = 'longitude latitude' + varAttrs[(variable, qcName)]['coordinates'] = 'longitude latitude' + varAttrs[(variable, obsValName)]['_FillValue'] = float_missing_value + varAttrs[(variable, obsErrName)]['_FillValue'] = float_missing_value + varAttrs[(variable, qcName)]['_FillValue'] = int_missing_value + + # Fill the final IODA data: MetaData then ObsValues, ObsErrors, and QC + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + ioda_data[(key, metaDataName)] = np.array(data[key], dtype=dtypes[dtypestr]) + + for key in varDict.keys(): + variable = varDict[key][0] + ioda_data[(variable, obsValName)] = np.array(data[variable], dtype=np.float32) + ioda_data[(variable, obsErrName)] = np.full(nlocs, 0.1, dtype=np.float32) + ioda_data[(variable, qcName)] = np.full(nlocs, 2, dtype=np.int32) + + # setup the IODA writer and write everything out. + writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + writer.BuildIoda(ioda_data, varDims, varAttrs, GlobalAttrs) diff --git a/src/compo/mls_o3_nc2ioda.py b/src/compo/mls_o3_nc2ioda.py index e3a67056d..5bfd6c1df 100755 --- a/src/compo/mls_o3_nc2ioda.py +++ b/src/compo/mls_o3_nc2ioda.py @@ -29,38 +29,44 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("air_pressure", "float"), - ("dateTime", "integer"), + ("pressure", "float"), + ("dateTime", "long"), ] +varname_ozone = 'ozoneProfile' + ioda2nc = {} ioda2nc['latitude'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Latitude' ioda2nc['longitude'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Longitude' ioda2nc['dateTime'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Time' -ioda2nc['air_pressure'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Pressure' +ioda2nc['pressure'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Pressure' ioda2nc['valKey'] = 'HDFEOS/SWATHS/O3/Data Fields/O3' ioda2nc['precision'] = 'HDFEOS/SWATHS/O3/Data Fields/O3Precision' ioda2nc['convergence'] = 'HDFEOS/SWATHS/O3/Data Fields/Convergence' ioda2nc['status'] = 'HDFEOS/SWATHS/O3/Data Fields/Status' ioda2nc['quality'] = 'HDFEOS/SWATHS/O3/Data Fields/Quality' -ioda2nc['solar_zenith_angle'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/SolarZenithAngle' +ioda2nc['solarZenithAngle'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/SolarZenithAngle' obsvars = { - 'mole_fraction_of_ozone_in_air': 'mole_fraction_of_ozone_in_air', + 'mole_fraction_of_ozone_in_air': varname_ozone, } AttrData = { - 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), + 'converter': os.path.basename(__file__) } DimDict = { } VarDims = { - 'mole_fraction_of_ozone_in_air': ['nlocs'], + varname_ozone: ['Location'], } +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + class mls(object): def __init__(self, filenames, lvmin, lvmax, sTAI, eTAI, nrt, qcOn, errorOn): @@ -76,33 +82,34 @@ def __init__(self, filenames, lvmin, lvmax, sTAI, eTAI, nrt, qcOn, errorOn): self.endTAI = eTAI self.nrt = nrt for v in list(ioda2nc.keys()): - if(v != 'valKey' and v != 'errKey'): + if(v == 'status' or v == 'precision' or v == 'convergence' or v == 'quality'): + pass + elif(v != 'valKey' and v != 'errKey'): self.outdata[(v, 'MetaData')] = [] - self.outdata[('level', 'MetaData')] = [] - self._setVarDict('mole_fraction_of_ozone_in_air') - self.outdata[self.varDict['mole_fraction_of_ozone_in_air']['valKey']] = [] + self.outdata[('referenceLevel', 'MetaData')] = [] + self._setVarDict(varname_ozone) + self.outdata[self.varDict[varname_ozone]['valKey']] = [] if(self.qcOn): - self.outdata[self.varDict['mole_fraction_of_ozone_in_air']['errKey']] = [] + self.outdata[self.varDict[varname_ozone]['errKey']] = [] self._read() # set ioda variable keys def _setVarDict(self, iodavar): - self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() + self.varDict[iodavar]['valKey'] = iodavar, obsValName if(self.qcOn): - self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() - self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() + self.varDict[iodavar]['errKey'] = iodavar, obsErrName + self.varDict[iodavar]['qcKey'] = iodavar, qcName # set variable attributes for IODA def _setVarAttr(self, iodavar): - self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'ppmv' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'ppmv' + self.varAttrs[iodavar, obsValName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsErrName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsValName]['units'] = 'ppmv' + self.varAttrs[iodavar, obsErrName]['units'] = 'ppmv' varsToAddUnits = list(ioda2nc.keys()) - varsToAddUnits.append('level') for v in varsToAddUnits: if(v != 'valKey' and v != 'errKey'): vkey = (v, 'MetaData') @@ -110,14 +117,14 @@ def _setVarAttr(self, iodavar): self.varAttrs[vkey]['units'] = 'Pa' elif(v == 'dateTime'): self.varAttrs[vkey]['units'] = 'seconds since 1993-01-01T00:00:00Z' - elif('angle' in v.lower() or 'latitude' in v.lower() or 'longitude' in v.lower()): - self.varAttrs[vkey]['units'] = 'degrees' - elif('flag' in v.lower()): - self.varAttrs[vkey]['units'] = 'unitless' + elif('latitude' in v.lower()): + self.varAttrs[vkey]['units'] = 'degree_north' + elif('longitude' in v.lower()): + self.varAttrs[vkey]['units'] = 'degree_east' + elif('angle' in v.lower()): + self.varAttrs[vkey]['units'] = 'degree' elif('prior' in v.lower()): self.varAttrs[vkey]['units'] = 'ppmv' - else: - self.varAttrs[vkey]['units'] = 'unitless' # Read data needed from raw MLS file. def _read_nc(self, filename, ifile, maxfile): @@ -139,7 +146,7 @@ def _read_nc(self, filename, ifile, maxfile): d = {} for k in list(ioda2nc.keys()): - if (k == 'air_pressure'): + if (k == 'pressure'): d[k] = ncd[ioda2nc[k]][...]*100. # convert to Pa d[k].mask = False else: @@ -179,13 +186,13 @@ def _just_flatten(self, d): dd['precision'] = d['precision'][idx, self.lmin:self.lmax+1] lvec = np.arange(self.lmin+1, self.lmax+2) dd['level'], dd['status'] = np.meshgrid(np.arange(self.lmin+1, self.lmax+2), d['status'][idx]) - dd['air_pressure'], dd['dateTime'] = np.meshgrid(d['air_pressure'][self.lmin:self.lmax+1], d['dateTime'][idx]) + dd['pressure'], dd['dateTime'] = np.meshgrid(d['pressure'][self.lmin:self.lmax+1], d['dateTime'][idx]) dd['quality'] = np.tile(d['quality'][idx], (lvec.shape[0], 1)).T dd['convergence'] = np.tile(d['convergence'][idx], (lvec.shape[0], 1)).T dd['status'] = np.tile(d['status'][idx], (lvec.shape[0], 1)).T dd['latitude'] = np.tile(d['latitude'][idx], (lvec.shape[0], 1)).T dd['longitude'] = np.tile(d['longitude'][idx], (lvec.shape[0], 1)).T - dd['solar_zenith_angle'] = np.tile(d['solar_zenith_angle'][idx], (lvec.shape[0], 1)).T + dd['solarZenithAngle'] = np.tile(d['solarZenithAngle'][idx], (lvec.shape[0], 1)).T for k in list(dd.keys()): dd[k] = np.asarray(dd[k]) dd[k] = dd[k].flatten().tolist() @@ -209,9 +216,9 @@ def _do_qc(self, d): if(d['dateTime'][irec] < self.startTAI or d['dateTime'][irec] > self.endTAI): continue for k in list(d.keys()): - if (len(d[k].shape) == 1 and k != 'air_pressure'): + if (len(d[k].shape) == 1 and k != 'pressure'): dd[k].append(d[k][irec]) - elif (k == 'air_pressure'): + elif (k == 'pressure'): dd[k].append(d[k][ilev]) elif k != 'errKey': dd[k].append(d[k][irec, ilev]) @@ -220,7 +227,7 @@ def _do_qc(self, d): def _read(self): # set up variable names for IODA - self._setVarAttr('mole_fraction_of_ozone_in_air') + self._setVarAttr(varname_ozone) # loop through input filenames for i, f in enumerate(self.filenames): @@ -238,7 +245,11 @@ def _read(self): d['errKey'].append(self._calc_error( val, d['precision'][ival], d['level'][ival]-1)) for v in list(d.keys()): - if(v != 'valKey' and v != 'errKey'): + if(v == 'status' or v == 'precision' or v == 'convergence' or v == 'quality'): + pass + elif(v == 'level'): + self.outdata[('referenceLevel', 'MetaData')].extend(d[v]) + elif(v != 'valKey' and v != 'errKey'): self.outdata[(v, 'MetaData')].extend(d[v]) for ncvar, iodavar in obsvars.items(): self.outdata[self.varDict[iodavar] @@ -246,8 +257,8 @@ def _read(self): if(self.errorOn): self.outdata[self.varDict[iodavar]['errKey']].extend(d['errKey']) - DimDict['nlocs'] = np.float32(len(self.outdata[('dateTime', 'MetaData')])) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + nlocs = len(self.outdata[('dateTime', 'MetaData')]) + DimDict['Location'] = nlocs for k in self.outdata.keys(): self.outdata[k] = np.asarray(self.outdata[k]) diff --git a/src/compo/modis_aod2ioda.py b/src/compo/modis_aod2ioda.py index 8f9487529..561946204 100755 --- a/src/compo/modis_aod2ioda.py +++ b/src/compo/modis_aod2ioda.py @@ -28,26 +28,30 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] +obsvars = ["aerosolOpticalDepth"] -obsvars = { - 'A': "aerosol_optical_depth_4", -} - +# A dictionary of global attributes. More filled in further down. AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), + 'description': 'AOD at 550nm' } +# A dictionary of variable dimensions. +DimDict = {} -DimDict = { -} +# A dictionary of variable names and their dimensions. +VarDims = {'aerosolOpticalDepth': ['Location']} -VarDims = { - 'aerosol_optical_depth': ['nlocs'] -} +# Get the group names we use the most. +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +long_missing_value = nc.default_fillvals['i8'] class AOD(object): @@ -58,47 +62,55 @@ def __init__(self, filenames, obs_time, pltfrm): self.varDict = defaultdict(lambda: defaultdict(dict)) self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + # there's absolutely no difference in the hdf4 files attributes + # between Terra and Aqua files. So it is user specified + AttrData['platform'] = pltfrm + # sensor would be always MODIS for this converter + AttrData['sensor'] = 'MODIS' + AttrData['datetimeReference'] = obs_time self._read() def _read(self): # set up variable names for IODA - for ncvar, iodavar in obsvars.items(): - self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() - self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() - self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() - self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = -9999. - self.varAttrs[iodavar, iconv.OerrName()]['_FillValue'] = -9999. - self.varAttrs[iodavar, iconv.OqcName()]['_FillValue'] = -9999 - self.varAttrs[iodavar, iconv.OvalName()]['units'] = '1' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'unitless' + for iodavar in obsvars: + self.varDict[iodavar]['valKey'] = iodavar, obsValName + self.varDict[iodavar]['errKey'] = iodavar, obsErrName + self.varDict[iodavar]['qcKey'] = iodavar, qcName + self.varAttrs[iodavar, obsValName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsErrName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsValName]['_FillValue'] = -9999. + self.varAttrs[iodavar, obsErrName]['_FillValue'] = -9999. + self.varAttrs[iodavar, qcName]['_FillValue'] = -9999 + self.varAttrs[iodavar, obsValName]['units'] = '1' + self.varAttrs[iodavar, obsErrName]['units'] = '1' + + # All of MODIS AOD data have a singular reference time + self.varAttrs[('dateTime', metaDataName)]['units'] = 'seconds since 1993-01-01T00:00:00Z' + + # Make empty lists for the output vars + self.outdata[('latitude', metaDataName)] = np.array([], dtype=np.float32) + self.outdata[('longitude', metaDataName)] = np.array([], dtype=np.float32) + self.outdata[('dateTime', metaDataName)] = np.array([], dtype=np.int64) + for iodavar in obsvars: + self.outdata[self.varDict[iodavar]['valKey']] = np.array([], dtype=np.float32) + self.outdata[self.varDict[iodavar]['errKey']] = np.array([], dtype=np.float32) + self.outdata[self.varDict[iodavar]['qcKey']] = np.array([], dtype=np.int32) # loop through input filenames - first = True for f in self.filenames: hdf = SD(f, SDC.READ) - # there's absolutely no difference in the hdf4 files attributes - # between Terra and Aqua files. So it is user specified - AttrData['platform'] = self.pltfrm - # sensor would be always MODIS for this converter - AttrData['sensor'] = 'MODIS' - - obstime = self.obs_time - AttrData['date_time'] = self.obs_time - AttrData['observation_type'] = 'Aod' # Get variables modis_time = hdf.select('Scan_Start_Time')[:].ravel() - - # convert time to date_string + print(f"length of time var: {len(modis_time)}") + modis_time = modis_time.astype('float32') lats = hdf.select('Latitude')[:].ravel() lats = lats.astype('float32') lons = hdf.select('Longitude')[:].ravel() lons = lons.astype('float32') aod = hdf.select('AOD_550_Dark_Target_Deep_Blue_Combined')[:].ravel() + aod = aod.astype('float64') land_sea_flag = hdf.select('Land_sea_Flag')[:].ravel() QC_flag = hdf.select('Land_Ocean_Quality_Flag')[:].ravel() QC_flag = QC_flag.astype('int32') @@ -117,10 +129,9 @@ def _read(self): sen_zen = sen_zen[pos_index] unc_land = unc_land[pos_index] * 1E-3 # see scale factor modis_time = modis_time[pos_index] - obs_time = np.empty_like(QC_flag, dtype=object) - obs_time_2 = [datetime.fromisoformat('1993-01-01') + timedelta(seconds=x) for x in modis_time] - for t in range(len(obs_time_2)): - obs_time[t] = obs_time_2[t].strftime('%Y-%m-%dT%H:%M:%SZ') + obs_time = np.full(len(modis_time), long_missing_value, dtype=np.int64) + for n, t in enumerate(modis_time): + obs_time[n] = round(t) # uncertainty estimates: # From MODIS file (over ocean) and Levy, 2010 (over land) @@ -129,34 +140,17 @@ def _read(self): over_ocean = np.logical_not(land_sea_flag > 0) over_land = np.logical_not(land_sea_flag == 0) UNC = np.where(over_land, unc_land, np.add(0.05, np.multiply(0.15, aod))) - if first: - self.outdata[('latitude', 'MetaData')] = lats - self.outdata[('longitude', 'MetaData')] = lons - self.outdata[('datetime', 'MetaData')] = obs_time - else: - self.outdata[('latitude', 'MetaData')] = np.concatenate((self.outdata[('latitude', 'MetaData')], lats)) - self.outdata[('longitude', 'MetaData')] = np.concatenate((self.outdata[('longitude', 'MetaData')], lons)) - self.outdata[('datetime', 'MetaData')] = np.concatenate((self.outdata[('datetime', 'MetaData')], obs_time)) - - for ncvar, iodavar in obsvars.items(): - data = aod.astype('float32') - err = UNC.astype('float32') - if first: - self.outdata[self.varDict[iodavar]['valKey']] = data - self.outdata[self.varDict[iodavar]['errKey']] = err - self.outdata[self.varDict[iodavar]['qcKey']] = QC_flag - - else: - self.outdata[self.varDict[iodavar]['valKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['valKey']], data)) - self.outdata[self.varDict[iodavar]['errKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['errKey']], err)) - self.outdata[self.varDict[iodavar]['qcKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['qcKey']], QC_flag)) - - first = False - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + + self.outdata[('latitude', metaDataName)] = np.append(self.outdata[('latitude', metaDataName)], np.array(lats, dtype=np.float32)) + self.outdata[('longitude', metaDataName)] = np.append(self.outdata[('longitude', metaDataName)], np.array(lons, dtype=np.float32)) + self.outdata[('dateTime', metaDataName)] = np.append(self.outdata[('dateTime', metaDataName)], np.array(obs_time, dtype=np.int64)) + + for iodavar in obsvars: + self.outdata[self.varDict[iodavar]['valKey']] = np.append(self.outdata[self.varDict[iodavar]['valKey']], np.array(aod, dtype=np.float32)) + self.outdata[self.varDict[iodavar]['errKey']] = np.append(self.outdata[self.varDict[iodavar]['errKey']], np.array(UNC, dtype=np.float32)) + self.outdata[self.varDict[iodavar]['qcKey']] = np.append(self.outdata[self.varDict[iodavar]['qcKey']], np.array(QC_flag, dtype=np.int32)) + + DimDict['Location'] = len(self.outdata[('dateTime', metaDataName)]) def main(): diff --git a/src/compo/mopitt_co_nc2ioda.py b/src/compo/mopitt_co_nc2ioda.py index a88b7aed8..44af99c7c 100755 --- a/src/compo/mopitt_co_nc2ioda.py +++ b/src/compo/mopitt_co_nc2ioda.py @@ -27,11 +27,11 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] obsvars = { - 'carbonmonoxide_total_column': 'carbon_monoxide_in_total_column', + 'carbonmonoxide_total_column': 'carbonmonoxideTotal', } AttrData = { @@ -43,7 +43,7 @@ } VarDims = { - 'carbon_monoxide_in_total_column': ['nlocs'], + 'carbonmonoxideTotal': ['Location'], } # constants @@ -64,7 +64,7 @@ def __init__(self, filenames, time_range): def _read(self): # set up variable names for IODA - for iodavar in ['carbon_monoxide_in_total_column']: + for iodavar in ['carbonmonoxideTotal']: self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -164,7 +164,7 @@ def _read(self): if first: # add metadata variables - self.outdata[('datetime', 'MetaData')] = times[flg] + self.outdata[('dateTime', 'MetaData')] = times[flg] self.outdata[('latitude', 'MetaData')] = lats[flg] self.outdata[('longitude', 'MetaData')] = lons[flg] self.outdata[('apriori_term', 'RtrvlAncData')] = ap_tc[flg] @@ -182,8 +182,8 @@ def _read(self): self.outdata[self.varDict[iodavar]['qcKey']] = qa[flg] else: - self.outdata[('datetime', 'MetaData')] = np.concatenate(( - self.outdata[('datetime', 'MetaData')], times[flg])) + self.outdata[('dateTime', 'MetaData')] = np.concatenate(( + self.outdata[('dateTime', 'MetaData')], times[flg])) self.outdata[('latitude', 'MetaData')] = np.concatenate(( self.outdata[('latitude', 'MetaData')], lats[flg])) self.outdata[('longitude', 'MetaData')] = np.concatenate(( @@ -209,8 +209,8 @@ def _read(self): (self.outdata[self.varDict[iodavar]['qcKey']], qa[flg])) first = False - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) + AttrData['Location'] = np.int32(DimDict['Location']) for k in range(nlevs): varname = 'averaging_kernel_level_'+str(k+1) diff --git a/src/compo/omi_o3_nc2ioda.py b/src/compo/omi_o3_nc2ioda.py index 778001ce3..5decb1b3c 100755 --- a/src/compo/omi_o3_nc2ioda.py +++ b/src/compo/omi_o3_nc2ioda.py @@ -24,39 +24,36 @@ from orddicts import DefaultOrderedDict import ioda_conv_engines as iconv - -def is_bit_set(integer_value, bit_position): - return (integer_value & (1 << bit_position)) != 0 - - # Global Dictionaries. locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("dateTime", "integer"), + ("dateTime", "long"), ] +# Name to call the output ozone variable +varname_ozone = 'ozoneTotal' + +# Dictionary of variable name found in input file and corresponding output name +obsvars = { + 'integrated_layer_ozone_in_air': varname_ozone, +} + ioda2nc = {} ioda2nc['latitude'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/Latitude' ioda2nc['longitude'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/Longitude' ioda2nc['dateTime'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/Time' -ioda2nc['solar_zenith_angle'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/SolarZenithAngle' +ioda2nc['solarZenithAngle'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/SolarZenithAngle' ioda2nc['prior_o3'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/APrioriLayerO3' # ioda2nc['Layer_Efficiency'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/LayerEfficiency' ioda2nc['valKey'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/ColumnAmountO3' ioda2nc['quality_flag'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/QualityFlags' ioda2nc['algorithm_flag'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/AlgorithmFlags' - -obsvars = { - 'integrated_layer_ozone_in_air': 'integrated_layer_ozone_in_air', -} - AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), - 'satellite': 'aura', + 'platformCommonName': 'aura', 'sensor': 'omi', } @@ -64,9 +61,17 @@ def is_bit_set(integer_value, bit_position): } VarDims = { - 'integrated_layer_ozone_in_air': ['nlocs'], + varname_ozone: ['Location'], } +missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +long_missing_value = nc.default_fillvals['i8'] + + +def is_bit_set(integer_value, bit_position): + return (integer_value & (1 << bit_position)) != 0 + class omi(object): def __init__(self, filenames, sTAI, eTAI, qcOn): @@ -77,15 +82,17 @@ def __init__(self, filenames, sTAI, eTAI, qcOn): self.startTAI = sTAI self.endTAI = eTAI self.qcOn = qcOn - self._setVarDict('integrated_layer_ozone_in_air') + self._setVarDict(varname_ozone) # initialize dictionary with empty list for MetaData variable to populate later. vars2output = list(ioda2nc.keys()) - vars2output.append('scan_position') + vars2output.append('sensorScanPosition') for v in vars2output: - if(v != 'valKey'): + if(v == 'quality_flag' or v == 'algorithm_flag' or v == 'prior_o3'): + pass + elif(v != 'valKey'): self.outdata[(v, 'MetaData')] = [] - self.outdata[self.varDict['integrated_layer_ozone_in_air']['valKey']] = [] + self.outdata[self.varDict[varname_ozone]['valKey']] = [] self._read() # set ioda variable keys @@ -95,11 +102,9 @@ def _setVarDict(self, iodavar): # set variable attributes for IODA def _setVarAttr(self, iodavar): self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - missing_value = 9.96921e+36 - int_missing_value = -2147483647 self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = missing_value varsToAddUnits = list(ioda2nc.keys()) - varsToAddUnits.append('scan_position') + varsToAddUnits.append('sensorScanPosition') for v in varsToAddUnits: if(v != 'valKey'): vkey = (v, 'MetaData') @@ -107,10 +112,13 @@ def _setVarAttr(self, iodavar): self.varAttrs[vkey]['units'] = 'Pa' elif(v == 'dateTime'): self.varAttrs[vkey]['units'] = 'seconds since 1993-01-01T00:00:00Z' + self.varAttrs[vkey]['_FillValue'] = long_missing_value + elif('latitude' in v.lower()): + self.varAttrs[vkey]['units'] = 'degree_north' + elif('longitude' in v.lower()): + self.varAttrs[vkey]['units'] = 'degree_east' elif('angle' in v.lower()): - self.varAttrs[vkey]['units'] = 'degrees' - elif('flag' in v.lower()): - self.varAttrs[vkey]['units'] = 'unitless' + self.varAttrs[vkey]['units'] = 'degree' elif('prior' in v.lower()): self.varAttrs[vkey]['units'] = 'ppmv' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'DU' @@ -122,6 +130,8 @@ def _read_nc(self, filename): # use dictionary above to just read fields we want out of the netcdf. for k in list(ioda2nc.keys()): d[k] = ncd[ioda2nc[k]][...] + if k == 'dateTime': + d[k] = np.round(d[k]) d[k].mask = False ncd.close() return d @@ -137,8 +147,8 @@ def _just_flatten(self, d): scn_tmp = scn_tmp.astype('float32') tmp = tmp.astype(np.int64) dd[k] = tmp.flatten().tolist() - dd['scan_position'] = scn_tmp.flatten().tolist() - elif(k == 'Prior_O3'): + dd['sensorScanPosition'] = scn_tmp.flatten().tolist() + elif(k.lower() == 'prior_o3'): dd[k] = d[k][:, :, 0].flatten().tolist() else: dd[k] = d[k].flatten().tolist() @@ -153,7 +163,7 @@ def _do_qc(self, d): # intialize dictonary of qc'd variables dd = {} flatVars = list(ioda2nc.keys()) - flatVars.append('scan_position') + flatVars.append('sensorScanPosition') for v in flatVars: dd[v] = [] @@ -200,20 +210,20 @@ def _do_qc(self, d): if (six_set or eight_set or nine_set): continue # could simply this further with one if statement possibly more clever use of a bit masking. - dd['scan_position'].append(float(iscan+1)) + dd['sensorScanPosition'].append(float(iscan+1)) for v in flatVars: if(v == 'dateTime'): dd[v].append(d[v][itime]) elif(v == 'prior_o3'): dd[v].append(d[v][itime, iscan, 0]) - elif(v != 'scan_position'): + elif(v != 'sensorScanPosition'): dd[v].append(d[v][itime, iscan]) return dd def _read(self): # set up variable names for IODA - for iodavar in ['integrated_layer_ozone_in_air', ]: + for iodavar in [varname_ozone, ]: # self._setVarDict(var) self._setVarAttr(iodavar) # loop through input filenames @@ -227,18 +237,21 @@ def _read(self): d = self._just_flatten(nc_data) # add MetaData variables. for v in list(d.keys()): - if(v != 'valKey'): + if(v == 'quality_flag' or v == 'algorithm_flag' or v == 'prior_o3'): + pass + elif(v != 'valKey'): self.outdata[(v, 'MetaData')].extend(d[v]) for ncvar, iodavar in obsvars.items(): self.outdata[self.varDict[iodavar] ['valKey']].extend(d['valKey']) - DimDict['nlocs'] = len(self.outdata[('longitude', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + + nlocs = len(self.outdata[('longitude', 'MetaData')]) + DimDict['Location'] = np.int64(nlocs) # add dummy air_pressure so UFO will know this is a total column ob, and not partial. - self.outdata[('air_pressure', 'MetaData')] = np.zeros( - DimDict['nlocs']).tolist() - self.varAttrs[('air_pressure', 'MetaData')]['units'] = 'Pa' + self.outdata[('pressure', 'MetaData')] = np.zeros( + DimDict['Location']).tolist() + self.varAttrs[('pressure', 'MetaData')]['units'] = 'Pa' for k in self.outdata.keys(): self.outdata[k] = np.asarray(self.outdata[k]) @@ -247,7 +260,7 @@ def _read(self): elif(self.outdata[k].dtype == 'int64' and k != ('dateTime', 'MetaData')): self.outdata[k] = self.outdata[k].astype('int32') elif(self.outdata[k].dtype == 'uint16' or self.outdata[k].dtype == 'uint8'): - self.outdata[k] = self.outdata[k].astype(int) + self.outdata[k] = self.outdata[k].astype('int32') self.outdata[('dateTime', 'MetaData')] = self.outdata[('dateTime', 'MetaData')].astype('int64') # ensure lon is 0-360 self.outdata[('longitude', 'MetaData')] = self.outdata[('longitude', 'MetaData')] % 360 diff --git a/src/compo/ompsnm_o3_nc2ioda.py b/src/compo/ompsnm_o3_nc2ioda.py index ca72c0785..0502c4b59 100755 --- a/src/compo/ompsnm_o3_nc2ioda.py +++ b/src/compo/ompsnm_o3_nc2ioda.py @@ -25,21 +25,27 @@ from orddicts import DefaultOrderedDict import ioda_conv_engines as iconv - -# Global Dictionaries. +# Dictionary of essential MetaData locationKeyList = [ ("latitude", "float"), ("longitude", "float"), ("dateTime", "long"), ] +# Name to call the output ozone variable +varname_ozone = 'ozoneTotal' + +# Dictionary of variable name found in input file and corresponding output name +obsvars = { + 'integrated_layer_ozone_in_air': varname_ozone, +} # dictionary to map things we're putting into ioda and taking out of instrument native format ioda2nc = {} ioda2nc['latitude'] = 'GeolocationData/Latitude' ioda2nc['longitude'] = 'GeolocationData/Longitude' ioda2nc['dateTime'] = 'GeolocationData/Time' -ioda2nc['solar_zenith_angle'] = 'GeolocationData/SolarZenithAngle' +ioda2nc['solarZenithAngle'] = 'GeolocationData/SolarZenithAngle' ioda2nc['valKey'] = 'ScienceData/ColumnAmountO3' ioda2nc['ground_pixel_quality'] = 'GeolocationData/GroundPixelQualityFlags' ioda2nc['quality_flags'] = 'ScienceData/QualityFlags' @@ -47,15 +53,9 @@ ioda2nc['measurement_quality_flags'] = 'ScienceData/MeasurementQualityFlags' ioda2nc['instrument_quality_flags'] = 'GeolocationData/InstrumentQualityFlags' - -obsvars = { - 'integrated_layer_ozone_in_air': 'integrated_layer_ozone_in_air', -} - AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), - 'satellite': 'npp', + 'platformCommonName': 'npp', 'sensor': 'ompsnm', } @@ -63,7 +63,7 @@ } VarDims = { - 'integrated_layer_ozone_in_air': ['nlocs'], + varname_ozone: ['Location'], } @@ -75,17 +75,19 @@ def __init__(self, filenames, sTAI, eTAI): self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self.startTAI = sTAI self.endTAI = eTAI - self._setVarDict('integrated_layer_ozone_in_air') + self._setVarDict(varname_ozone) vars2output = list(ioda2nc.keys()) - vars2output.append('scan_position') + vars2output.append('sensorScanPosition') for v in vars2output: - if(v != 'valKey'): + if('quality' in v or 'flags' in v): + pass + elif(v != 'valKey'): self.outdata[(v, 'MetaData')] = [] - self.outdata[self.varDict['integrated_layer_ozone_in_air']['valKey']] = [] + self.outdata[self.varDict[varname_ozone]['valKey']] = [] - self._setVarDict('integrated_layer_ozone_in_air') - self.outdata[self.varDict['integrated_layer_ozone_in_air']['valKey']] = [] + self._setVarDict(varname_ozone) + self.outdata[self.varDict[varname_ozone]['valKey']] = [] self._read() @@ -98,7 +100,7 @@ def _setVarAttr(self, iodavar): self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' varsToAddUnits = list(ioda2nc.keys()) - varsToAddUnits.append('scan_position') + varsToAddUnits.append('sensorScanPosition') for v in varsToAddUnits: if(v != 'valKey'): vkey = (v, 'MetaData') @@ -108,15 +110,11 @@ def _setVarAttr(self, iodavar): self.varAttrs[vkey]['units'] = 'seconds since 1993-01-01T00:00:00Z' elif('angle' in v.lower()): self.varAttrs[vkey]['units'] = 'degrees' - elif('flag' in v.lower()): - self.varAttrs[vkey]['units'] = 'unitless' elif('prior' in v.lower()): self.varAttrs[vkey]['units'] = 'ppmv' - else: - self.varAttrs[vkey]['units'] = 'unitless' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'DU' - vkey = ('air_pressure', 'MetaData') + vkey = ('pressure', 'MetaData') self.varAttrs[vkey]['units'] = 'Pa' # Read data needed from raw OMPSNM file. @@ -132,8 +130,8 @@ def _read_nc(self, filename): # mesh time and scan_position to get flattened array instead of using loops time_vec = d['dateTime'] scan_position_vec = np.arange(1, d['valKey'].shape[1]+1) - d['scan_position'], d['dateTime'] = np.meshgrid(scan_position_vec, time_vec) - d['scan_position'] = d['scan_position'].astype('float32') + d['sensorScanPosition'], d['dateTime'] = np.meshgrid(scan_position_vec, time_vec) + d['sensorScanPosition'] = d['sensorScanPosition'].astype('float32') d['measurement_quality_flags'].mask = False d['instrument_quality_flags'].mask = False d['measurement_quality_flags'] = np.tile(d['measurement_quality_flags'], (scan_position_vec.shape[0], 1)).T @@ -144,7 +142,7 @@ def _read_nc(self, filename): def _read(self): # set up variable names for IODA - for iodavar in ['integrated_layer_ozone_in_air', ]: + for iodavar in [varname_ozone, ]: # self._setVarDict(var) self._setVarAttr(iodavar) # loop through input filenames @@ -152,15 +150,17 @@ def _read(self): fileData, idx = self._read_nc(f) # add metadata variables for v in list(fileData.keys()): - if(v != 'valKey' and v != 'ozone_Apriori' and v != 'layer_efficiency'): + if('quality' in v or 'flags' in v): + pass + elif(v != 'valKey' and v != 'ozone_Apriori' and v != 'layer_efficiency'): # add metadata variables self.outdata[(v, 'MetaData')].extend(fileData[v][idx].flatten().tolist()) for ncvar, iodavar in obsvars.items(): self.outdata[self.varDict[iodavar]['valKey']].extend(fileData['valKey'][idx].flatten().tolist()) # add dummy air_pressure so UFO will know this is a total column ob, and not partial. - nloc = len(self.outdata[('dateTime', 'MetaData')]) - self.outdata[('air_pressure', 'MetaData')] = np.zeros(nloc).tolist() + nlocs = len(self.outdata[('dateTime', 'MetaData')]) + self.outdata[('pressure', 'MetaData')] = np.zeros(nlocs).tolist() for k in self.outdata.keys(): self.outdata[k] = np.asarray(self.outdata[k]) @@ -168,8 +168,7 @@ def _read(self): self.outdata[k] = self.outdata[k].astype('float32') elif(self.outdata[k].dtype == 'int64' and k != ('dateTime', 'MetaData')): self.outdata[k] = self.outdata[k].astype('int32') - DimDict['nlocs'] = self.outdata[('dateTime', 'MetaData')].shape[0] - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = nlocs self.outdata[('dateTime', 'MetaData')] = self.outdata[('dateTime', 'MetaData')].astype(np.int64) self.outdata[('longitude', 'MetaData')] = self.outdata[('longitude', 'MetaData')] % 360 # end ompsnm object. diff --git a/src/compo/tropomi_no2_co_nc2ioda.py b/src/compo/tropomi_no2_co_nc2ioda.py index 2e28d367c..34ac03c7d 100755 --- a/src/compo/tropomi_no2_co_nc2ioda.py +++ b/src/compo/tropomi_no2_co_nc2ioda.py @@ -27,7 +27,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "string"), ] AttrData = { @@ -58,10 +58,6 @@ def _read(self): varname_str = list(self.obsVar.keys())[0] print('Processing variable: %s' % (varname_str), flush=1) iodavar = self.obsVar[varname_str] - # if self.columnType == 'total': - # iodavar = self.obsVar['nitrogendioxide_total_column'] - # elif self.columnType == 'tropo': - # iodavar = self.obsVar['nitrogendioxide_tropospheric_column'] self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -124,11 +120,7 @@ def _read(self): AttrData['averaging_kernel_levels'] = np.int32(nlevs) # scale the avk using AMF ratio and tropopause level for tropo column - if self.varname == 'no2': - nlocf = len(trop_layer[flg]) - elif self.varname == 'co': - nlocf = len(lats[flg]) - + nlocf = len(lats[flg]) scaleAK = np.ones((nlocf, nlevs), dtype=np.float32) if self.varname == 'no2' and self.columnType == 'tropo': # do not loop over nlocs here this makes the execution very slow @@ -138,7 +130,7 @@ def _read(self): if first: # add metadata variables - self.outdata[('datetime', 'MetaData')] = times[flg] + self.outdata[('dateTime', 'MetaData')] = times[flg] self.outdata[('latitude', 'MetaData')] = lats[flg] self.outdata[('longitude', 'MetaData')] = lons[flg] self.outdata[('quality_assurance_value', 'MetaData')] = qa_value[flg] @@ -157,11 +149,11 @@ def _read(self): if self.varname == 'no2': self.outdata[varname_pr] = ak[nlevs-1, 1] + bk[nlevs-1, 1]*ps[...].ravel() elif self.varname == 'co': - self.outdata[varname_pr] = np.zeros((nlocf, nlevs), dtype=np.float32) + self.outdata[varname_pr] = np.zeros((nlocf), dtype=np.float32) else: - self.outdata[('datetime', 'MetaData')] = np.concatenate(( - self.outdata[('datetime', 'MetaData')], times[flg])) + self.outdata[('dateTime', 'MetaData')] = np.concatenate(( + self.outdata[('dateTime', 'MetaData')], times[flg])) self.outdata[('latitude', 'MetaData')] = np.concatenate(( self.outdata[('latitude', 'MetaData')], lats[flg])) self.outdata[('longitude', 'MetaData')] = np.concatenate(( @@ -184,7 +176,7 @@ def _read(self): (self.outdata[varname_pr], ak[nlevs-1, 1] + bk[nlevs-1, 1]*ps[...].ravel()[flg])) elif self.varname == 'co': self.outdata[varname_pr] = np.concatenate( - (self.outdata[varname_pr], np.zeros((nlocf, nlevs), dtype=np.float32))) + (self.outdata[varname_pr], np.zeros((nlocf), dtype=np.float32))) for ncvar, iodavar in self.obsVar.items(): @@ -210,8 +202,8 @@ def _read(self): first = False - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) + AttrData['Location'] = np.int32(DimDict['Location']) for k in range(nlevs): varname = 'averaging_kernel_level_'+str(k+1) @@ -267,33 +259,31 @@ def main(): args = parser.parse_args() if args.variable == "co": - var_in_name = 'carbonmonoxide' - var_out_name = 'carbon_monoxide' + var_name = 'carbonmonoxide' if args.column == "tropo": print('CO is only available for total column, reset column to total', flush=1) args.column = 'total' elif args.variable == "no2": - var_in_name = 'nitrogendioxide' - var_out_name = 'nitrogen_dioxide' + var_name = 'nitrogendioxide' if args.column == "tropo": obsVar = { - var_in_name+'_tropospheric_column': var_out_name+'_in_tropospheric_column' + var_name+'_tropospheric_column': var_name+'Column' } varDims = { - var_out_name+'_in_tropospheric_column': ['nlocs'] + var_name+'Column': ['Location'] } elif args.column == "total": obsVar = { - var_in_name+'_total_column': var_out_name+'_in_total_column' + var_name+'_total_column': var_name+'Total' } varDims = { - var_out_name+'_in_total_column': ['nlocs'] + var_name+'Total': ['Location'] } # Read in the NO2 data diff --git a/src/compo/viirs_aod2ioda.py b/src/compo/viirs_aod2ioda.py index bec5d4e73..a8fe8a343 100644 --- a/src/compo/viirs_aod2ioda.py +++ b/src/compo/viirs_aod2ioda.py @@ -26,26 +26,28 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "integer") ] +obsvars = ["aerosolOpticalDepth"] -obsvars = { - 'A': "aerosol_optical_depth", -} +# A dictionary of global attributes. More filled in further down. +AttrData = {} +AttrData['ioda_object_type'] = 'AOD at 550nm' -AttrData = { - 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), -} +# A dictionary of variable dimensions. +DimDict = {} +# A dictionary of variable names and their dimensions. +VarDims = {'aerosolOpticalDepth': ['Location']} -DimDict = { -} +# Get the group names we use the most. +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() -VarDims = { - 'aerosol_optical_depth': ['nlocs', 'nchans'], -} +long_missing_value = nc.default_fillvals['i8'] class AOD(object): @@ -61,35 +63,42 @@ def __init__(self, filenames, method, mask, thin): def _read(self): # set up variable names for IODA - for ncvar, iodavar in obsvars.items(): - self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() - self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() - self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() - - self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = '1' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'unitless' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' + for iodavar in obsvars: + self.varDict[iodavar]['valKey'] = iodavar, obsValName + self.varDict[iodavar]['errKey'] = iodavar, obsErrName + self.varDict[iodavar]['qcKey'] = iodavar, qcName + self.varAttrs[iodavar, obsValName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsErrName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsValName]['_FillValue'] = -9999. + self.varAttrs[iodavar, obsErrName]['_FillValue'] = -9999. + self.varAttrs[iodavar, qcName]['_FillValue'] = -9999 + self.varAttrs[iodavar, obsValName]['units'] = '1' + self.varAttrs[iodavar, obsErrName]['units'] = '1' + + # Make empty lists for the output vars + self.outdata[('latitude', metaDataName)] = [] + self.outdata[('longitude', metaDataName)] = [] + self.outdata[('dateTime', metaDataName)] = np.array([], dtype=object) + for iodavar in obsvars: + self.outdata[self.varDict[iodavar]['valKey']] = [] + self.outdata[self.varDict[iodavar]['errKey']] = [] + self.outdata[self.varDict[iodavar]['qcKey']] = [] # loop through input filenamess - first = True for f in self.filenames: - ncd = nc.Dataset(f) + ncd = nc.Dataset(f, 'r') gatts = {attr: getattr(ncd, attr) for attr in ncd.ncattrs()} - base_datetime = gatts["time_coverage_end"] + base_datetime = datetime.strptime(gatts["time_coverage_end"], '%Y-%m-%dT%H:%M:%SZ') self.satellite = gatts["satellite_name"] self.sensor = gatts["instrument_name"] - AttrData["observation_type"] = "AOD" - AttrData["satellite"] = self.satellite + AttrData["platform"] = self.satellite AttrData["sensor"] = self.sensor if AttrData['sensor'] == 'VIIRS': AttrData['sensor'] = "v.viirs-m_npp" - if AttrData['satellite'] == 'NPP': - AttrData['satellite'] = "suomi_npp" - AttrData['date_time_string'] = base_datetime + if AttrData['platform'] == 'NPP': + AttrData['platform'] = "suomi_npp" lons = ncd.variables['Longitude'][:].ravel() lats = ncd.variables['Latitude'][:].ravel() @@ -130,36 +139,19 @@ def _read(self): errs[qcpath != 1] = 0.111431 + 0.128699*vals[qcpath != 1] # Write out data - - # Values - - if first: - self.outdata[self.varDict[iodavar]['valKey']] = vals - self.outdata[self.varDict[iodavar]['errKey']] = errs - self.outdata[self.varDict[iodavar]['qcKey']] = qcall - else: - self.outdata[self.varDict[iodavar]['valKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['valKey']], vals)) - self.outdata[self.varDict[iodavar]['errKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['errKey']], errs)) - self.outdata[self.varDict[iodavar]['qcKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['qcKey']], qcall)) - - # Add Meta data - - if first: - self.outdata[('latitude', 'MetaData')] = lats - self.outdata[('longitude', 'MetaData')] = lons - self.outdata[('datetime', 'MetaData')] = obs_time - else: - self.outdata[('latitude', 'MetaData')] = np.concatenate((self.outdata[('latitude', 'MetaData')], lats)) - self.outdata[('longitude', 'MetaData')] = np.concatenate((self.outdata[('longitude', 'MetaData')], lons)) - self.outdata[('datetime', 'MetaData')] = np.concatenate((self.outdata[('datetime', 'MetaData')], obs_time)) - - first = False - - DimDict['nlocs'] = len(self.outdata[('latitude', 'MetaData')]) - DimDict['nchans'] = np.array([4]) + self.outdata[('latitude', metaDataName)] = np.append(self.outdata[('latitude', metaDataName)], np.array(lats, dtype=np.float32)) + self.outdata[('longitude', metaDataName)] = np.append(self.outdata[('longitude', metaDataName)], np.array(lons, dtype=np.float32)) + self.outdata[('dateTime', metaDataName)] = np.append(self.outdata[('dateTime', metaDataName)], np.array(obs_time, dtype=object)) + + for iodavar in obsvars: + self.outdata[self.varDict[iodavar]['valKey']] = np.append( + self.outdata[self.varDict[iodavar]['valKey']], np.array(vals, dtype=np.float32)) + self.outdata[self.varDict[iodavar]['errKey']] = np.append( + self.outdata[self.varDict[iodavar]['errKey']], np.array(errs, dtype=np.float32)) + self.outdata[self.varDict[iodavar]['qcKey']] = np.append( + self.outdata[self.varDict[iodavar]['qcKey']], np.array(qcall, dtype=np.int32)) + + DimDict['Location'] = len(self.outdata[('latitude', metaDataName)]) def main(): diff --git a/src/conventional/CMakeLists.txt b/src/conventional/CMakeLists.txt index 6737e7638..55f4c925c 100644 --- a/src/conventional/CMakeLists.txt +++ b/src/conventional/CMakeLists.txt @@ -1,9 +1,11 @@ list(APPEND programs amv_ssec_ascii2ioda.py amdar_bufr2ioda.py + amv_ssec_ascii2ioda.py buoy_bufr2ioda.py metar_csv2ioda.py - decode_bufr_LDM_raob.py + sonde_bufr2ioda.py + sonde_tac2ioda.py ship_bufr2ioda.py sonde_tac2ioda.py synop_bufr2ioda.py diff --git a/src/conventional/amdar_bufr2ioda.py b/src/conventional/amdar_bufr2ioda.py index b648438a7..d49a4118f 100644 --- a/src/conventional/amdar_bufr2ioda.py +++ b/src/conventional/amdar_bufr2ioda.py @@ -28,17 +28,15 @@ os.environ["TZ"] = "UTC" locationKeyList = [ - ("aircraft_id", "string", ""), - ("aircraft_flightNum", "string", ""), - ("aircraft_tailNum", "string", ""), - ("obs_sequenceNum", "integer", ""), - ("originationAirport", "string", ""), - ("destinationAirport", "string", ""), - ("flight_phase", "integer", ""), - ("roll_angle", "float", "degrees"), - ("roll_angle_quality", "integer", ""), - ("aircraft_speed", "float", "m s-1"), - ("aircraft_heading", "integer", "degrees"), + ("aircraftIdentifier", "string", ""), + ("aircraftFlightNumber", "string", ""), + ("aircraftTailNumber", "string", ""), + ("observationSequenceNum", "integer", ""), + ("aircraftFlightPhase", "integer", ""), + ("aircraftRollAngle", "float", "degrees"), + ("aircraftRollAngleQuality", "integer", ""), + ("aircraftVelocity", "float", "m s-1"), + ("aircraftHeading", "integer", "degrees"), ("latitude", "float", "degrees_north"), ("longitude", "float", "degrees_east"), ("height", "float", "m"), @@ -47,17 +45,15 @@ meta_keys = [m_item[0] for m_item in locationKeyList] metaDataKeyList = { - 'aircraft_id': ['aircraftRegistrationNumberOrOtherIdentification'], - 'aircraft_flightNum': ['aircraftFlightNumber'], - 'aircraft_tailNum': ['aircraftTailNumber'], - 'obs_sequenceNum': ['observationSequenceNumber'], - 'originationAirport': ['originationAirport'], - 'destinationAirport': ['destinationAirport'], - 'flight_phase': ['detailedPhaseOfFlight'], - 'roll_angle': ['aircraftRollAngle'], - 'roll_angle_quality': ['aircraftRollAngleQuality'], - 'aircraft_speed': ['aircraftTrueAirspeed'], - 'aircraft_heading': ['aircraftTrueHeading'], + 'aircraftIdentifier': ['aircraftRegistrationNumberOrOtherIdentification'], + 'aircraftFlightNumber': ['aircraftFlightNumber'], + 'aircraftTailNumber': ['aircraftTailNumber'], + 'observationSequenceNum': ['observationSequenceNumber'], + 'aircraftFlightPhase': ['detailedPhaseOfFlight'], + 'aircraftRollAngle': ['aircraftRollAngle'], + 'aircraftRollAngleQuality': ['aircraftRollAngleQuality'], + 'aircraftVelocity': ['aircraftTrueAirspeed'], + 'aircraftHeading': ['aircraftTrueHeading'], 'latitude': ['latitude'], 'longitude': ['longitude'], 'height': ['Constructed', 'globalNavigationSatelliteSystemAltitude', 'height', 'flightLevel'], @@ -70,15 +66,15 @@ raw_obsvars = ['airTemperature', 'mixingRatio', 'windDirection', 'windSpeed'] # The outgoing IODA variables (ObsValues), their units, and assigned constant ObsError. -obsvars = ['air_temperature', 'specific_humidity', 'eastward_wind', 'northward_wind'] +obsvars = ['airTemperature', 'specificHumidity', 'windEastward', 'windNorthward'] obsvars_units = ['K', 'kg kg-1', 'm s-1', 'm s-1'] obserrlist = [1.2, 0.75E-3, 1.7, 1.7] VarDims = { - 'air_temperature': ['nlocs'], - 'specific_humidity': ['nlocs'], - 'eastward_wind': ['nlocs'], - 'northward_wind': ['nlocs'], + 'airTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location'], } metaDataName = iconv.MetaDataName() @@ -91,7 +87,7 @@ 'ioda_version': 2, 'description': 'Aircraft observations converted from BUFR', 'source': 'LDM at NCAR-RAL', - 'source_files': '' + 'sourceFiles': '' } DimDict = { @@ -138,12 +134,12 @@ def main(file_names, output_file): for fname in file_names: logging.debug("Reading file: " + fname) - AttrData['source_files'] += ", " + fname + AttrData['sourceFiles'] += ", " + fname data, count, start_pos = read_file(fname, count, start_pos, data) - AttrData['source_files'] = AttrData['source_files'][2:] - logging.debug("All source files: " + AttrData['source_files']) + AttrData['sourceFiles'] = AttrData['sourceFiles'][2:] + logging.debug("All source files: " + AttrData['sourceFiles']) if not data: logging.critical("ABORT: no message data was captured, stopping execution.") @@ -151,8 +147,7 @@ def main(file_names, output_file): logging.info("--- {:9.4f} BUFR read seconds ---".format(time.time() - start_time)) nlocs = len(data['dateTime']) - DimDict = {'nlocs': nlocs} - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict = {'Location': nlocs} # Set coordinates and units of the ObsValues. for n, iodavar in enumerate(obsvars): @@ -164,12 +159,12 @@ def main(file_names, output_file): varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' varAttrs[iodavar, obsValName]['units'] = obsvars_units[n] varAttrs[iodavar, obsErrName]['units'] = obsvars_units[n] - varAttrs[iodavar, qcName]['units'] = 'unitless' # Set units of the MetaData variables and all _FillValues. for key in meta_keys: dtypestr = locationKeyList[meta_keys.index(key)][1] - varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + if locationKeyList[meta_keys.index(key)][2]: + varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] obs_data[(key, metaDataName)] = np.array(data[key], dtype=dtypes[dtypestr]) @@ -481,10 +476,10 @@ def read_bufr_message(f, count, start_pos, data): spfh[n] = mixing_ratio / (1.0 + mixing_ratio) # Move everything into the final data dictionary, including metadata. - data['eastward_wind'] = np.append(data['eastward_wind'], uwnd) - data['northward_wind'] = np.append(data['northward_wind'], vwnd) - data['specific_humidity'] = np.append(data['specific_humidity'], spfh) - data['air_temperature'] = np.append(data['air_temperature'], vals['airTemperature']) + data['windEastward'] = np.append(data['windEastward'], uwnd) + data['windNorthward'] = np.append(data['windNorthward'], vwnd) + data['specificHumidity'] = np.append(data['specificHumidity'], spfh) + data['airTemperature'] = np.append(data['airTemperature'], vals['airTemperature']) for key in meta_keys: data[key] = np.append(data[key], meta_data[key]) diff --git a/src/conventional/amv_ssec_ascii2ioda.py b/src/conventional/amv_ssec_ascii2ioda.py index 70a1722b6..87c21658a 100644 --- a/src/conventional/amv_ssec_ascii2ioda.py +++ b/src/conventional/amv_ssec_ascii2ioda.py @@ -23,21 +23,28 @@ os.environ["TZ"] = "UTC" -varDict = {'eastward_wind': ['eastward_wind', 'm s-1'], - 'northward_wind': ['northward_wind', 'm s-1']} +varDict = {'windEastward': ['windEastward', 'm s-1'], + 'windNorthward': ['windNorthward', 'm s-1']} locationKeyList = [("latitude", "float", "degrees_north"), ("longitude", "float", "degrees_east"), ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z"), - ("air_pressure", "float", "Pa"), + ("pressure", "float", "Pa"), ("sensorCentralFrequency", "float", "Hz"), ("sensorZenithAngle", "float", "degrees"), ("windTrackingCorrelation", "float", "1"), ("windHeightAssignMethod", "integer", ""), - ("satelliteID", "integer", "")] + ("satelliteIdentifier", "integer", "")] meta_keys = [m_item[0] for m_item in locationKeyList] +GlobalAttrs = { + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'description': 'Satellite atmospheric motion vectors (AMV)', + 'source': 'SSEC (ftp)' +} + iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] epoch = datetime.fromisoformat(iso8601_string[14:-1]) @@ -118,12 +125,12 @@ def main(file_names, output_file, datetimeRef): nlocs = len(data['dateTime']) logging.info(f" found a total of {nlocs} observations") - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} varDims = {} for key in varDict.keys(): variable = varDict[key][0] - varDims[variable] = ['nlocs'] + varDims[variable] = ['Location'] varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) @@ -184,20 +191,15 @@ def read_file(file_name, data): for row in reader: try: - year = int(row['day'][0:4]) - month = int(row['day'][4:6]) - day = int(row['day'][6:]) - hour = int(row['hms'][0:2]) - minute = int(row['hms'][2:]) - second = 0 - dtg = datetime(year, month, day, hour, minute, second) + dtg = datetime.strptime(f"{row['day']} {row['hms']}", '%Y%m%d %H%M') time_offset = np.int64(round((dtg - epoch).total_seconds())) local_data['dateTime'] = np.append(local_data['dateTime'], time_offset) local_data['longitude'] = np.append(local_data['longitude'], float(row['lon'])) local_data['latitude'] = np.append(local_data['latitude'], float(row['lat'])) pres = float(row['pre'])*100. - local_data['air_pressure'] = np.append(local_data['air_pressure'], pres) + local_data['pressure'] = np.append(local_data['pressure'], pres) + wdir = float(row['dir'])*1.0 wspd = float(row['spd'])*1.0 if (wdir >= 0 and wdir <= 360 and wspd >= 0 and wspd < 300): @@ -206,8 +208,8 @@ def read_file(file_name, data): uwnd = float_missing_value vwnd = float_missing_value - local_data['eastward_wind'] = np.append(local_data['eastward_wind'], uwnd) - local_data['northward_wind'] = np.append(local_data['northward_wind'], vwnd) + local_data['windEastward'] = np.append(local_data['windEastward'], uwnd) + local_data['windNorthward'] = np.append(local_data['windNorthward'], vwnd) if row['type'] in known_freq.keys(): freq = known_freq[row['type']] @@ -221,8 +223,8 @@ def read_file(file_name, data): else: satid = int_missing_value unk_sat.append(row['sat']) - local_data['satelliteID'] = np.append(local_data['satelliteID'], satid) + local_data['satelliteIdentifier'] = np.append(local_data['satelliteIdentifier'], satid) local_data['sensorZenithAngle'] = np.append(local_data['sensorZenithAngle'], float(row['rff'])) local_data['windTrackingCorrelation'] = np.append(local_data['windTrackingCorrelation'], float(row['qi'])) local_data['windHeightAssignMethod'] = np.append(local_data['windHeightAssignMethod'], int(row['int'])) @@ -233,8 +235,8 @@ def read_file(file_name, data): keyerr = True if (e.args[0] == 'dir') or (e.args[0] == 'spd'): - local_data['eastward_wind'] = np.append(local_data['eastward_wind'], float_missing_value) - local_data['northward_wind'] = np.append(local_data['northward_wind'], float_missing_value) + local_data['windEastward'] = np.append(local_data['windEastward'], float_missing_value) + local_data['windNorthward'] = np.append(local_data['windNorthward'], float_missing_value) else: local_data[known_var[e.args[0]][0]] = np.append(local_data[known_var[e.args[0]][0]], known_var[e.args[0]][1]) @@ -279,7 +281,6 @@ def read_file(file_name, data): parser.set_defaults(debug=False) parser.set_defaults(verbose=False) - parser.set_defaults(datetimeReference=" ") optional = parser.add_argument_group(title='optional arguments') optional.add_argument('--debug', action='store_true', help='enable debug messages') diff --git a/src/conventional/buoy_bufr2ioda.py b/src/conventional/buoy_bufr2ioda.py index 1d51f4786..85c777729 100644 --- a/src/conventional/buoy_bufr2ioda.py +++ b/src/conventional/buoy_bufr2ioda.py @@ -28,22 +28,22 @@ os.environ["TZ"] = "UTC" locationKeyList = [ - ("station_id", "integer", ""), - ("station_name", "string", ""), + ("stationIdentification", "integer", ""), + ("stationLongName", "string", ""), ("latitude", "float", "degrees_north"), ("longitude", "float", "degrees_east"), - ("station_elevation", "float", "m"), + ("stationElevation", "float", "m"), ("height", "float", "m"), ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") ] meta_keys = [m_item[0] for m_item in locationKeyList] metaDataKeyList = { - 'station_id': ['marineObservingPlatformIdentifier'], - 'station_name': ['stationOrSiteName'], + 'stationIdentification': ['marineObservingPlatformIdentifier'], + 'stationLongName': ['stationOrSiteName'], 'latitude': ['latitude'], 'longitude': ['longitude'], - 'station_elevation': ['heightOfStationGroundAboveMeanSeaLevel'], + 'stationElevation': ['heightOfStationGroundAboveMeanSeaLevel'], 'height': ['heightOfSensorAboveWaterSurface', 'heightOfBarometerAboveMeanSeaLevel'], 'dateTime': ['Constructed'] @@ -61,22 +61,22 @@ 'pressureReducedToMeanSeaLevel'] # The outgoing IODA variables (ObsValues), their units, and assigned constant ObsError. -obsvars = ['air_temperature', - 'specific_humidity', - 'sea_surface_temperature', - 'eastward_wind', - 'northward_wind', - 'surface_pressure'] +obsvars = ['airTemperature', + 'specificHumidity', + 'seaSurfaceTemperature', + 'windEastward', + 'windNorthward', + 'stationPressure'] obsvars_units = ['K', 'kg kg-1', 'K', 'm s-1', 'm s-1', 'Pa'] obserrlist = [1.2, 0.75E-3, 2.2, 1.7, 1.7, 120.0] VarDims = { - 'air_temperature': ['nlocs'], - 'specific_humidity': ['nlocs'], - 'sea_surface_temperature': ['nlocs'], - 'eastward_wind': ['nlocs'], - 'northward_wind': ['nlocs'], - 'surface_pressure': ['nlocs'] + 'airTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'seaSurfaceTemperature': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location'], + 'stationPressure': ['Location'] } metaDataName = iconv.MetaDataName() @@ -89,7 +89,7 @@ 'ioda_version': 2, 'description': 'Surface (Ship) observations converted from BUFR', 'source': 'LDM at NCAR-RAL', - 'source_files': '' + 'sourceFiles': '' } DimDict = { @@ -136,12 +136,12 @@ def main(file_names, output_file): for fname in file_names: logging.debug("Reading file: " + fname) - AttrData['source_files'] += ", " + fname + AttrData['sourceFiles'] += ", " + fname data, count, start_pos = read_file(fname, count, start_pos, data) - AttrData['source_files'] = AttrData['source_files'][2:] - logging.debug("All source files: " + AttrData['source_files']) + AttrData['sourceFiles'] = AttrData['sourceFiles'][2:] + logging.debug("All source files: " + AttrData['sourceFiles']) if not data: logging.critical("ABORT: no message data was captured, stopping execution.") @@ -149,8 +149,7 @@ def main(file_names, output_file): logging.info("--- {:9.4f} BUFR read seconds ---".format(time.time() - start_time)) nlocs = len(data['dateTime']) - DimDict = {'nlocs': nlocs} - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict = {'Location': nlocs} # Set coordinates and units of the ObsValues. for n, iodavar in enumerate(obsvars): @@ -162,12 +161,12 @@ def main(file_names, output_file): varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' varAttrs[iodavar, obsValName]['units'] = obsvars_units[n] varAttrs[iodavar, obsErrName]['units'] = obsvars_units[n] - varAttrs[iodavar, qcName]['units'] = 'unitless' # Set units of the MetaData variables and all _FillValues. for key in meta_keys: dtypestr = locationKeyList[meta_keys.index(key)][1] - varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + if locationKeyList[meta_keys.index(key)][2]: + varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] obs_data[(key, metaDataName)] = np.array(data[key], dtype=dtypes[dtypestr]) @@ -436,12 +435,12 @@ def read_bufr_message(f, count, start_pos, data): mask_height = np.logical_or(meta_data['height'] < -425, meta_data['height'] > 800) meta_data['height'][mask_height] = float_missing_value - # If the height of the observation (sensor) is missing, try to fill it with station_elevation. - for n, elev in enumerate(meta_data['station_elevation']): + # If the height of the observation (sensor) is missing, try to fill it with stationElevation. + for n, elev in enumerate(meta_data['stationElevation']): if (elev > -425 and elev < 800 and np.abs(meta_data['height'][n]-elev) > 50): meta_data['height'][n] = elev + 2 else: - meta_data['station_elevation'][n] = 0.5 + meta_data['stationElevation'][n] = 0.5 meta_data['height'][n] = 2.0 # Next, get the raw observed weather variables we want. @@ -495,12 +494,12 @@ def read_bufr_message(f, count, start_pos, data): spfh[n] = met_utils.specific_humidity(dewpoint, psfc) # Move everything into the final data dictionary, including metadata. - data['eastward_wind'] = np.append(data['eastward_wind'], uwnd) - data['northward_wind'] = np.append(data['northward_wind'], vwnd) - data['specific_humidity'] = np.append(data['specific_humidity'], spfh) - data['air_temperature'] = np.append(data['air_temperature'], vals['airTemperature']) - data['surface_pressure'] = np.append(data['surface_pressure'], vals['nonCoordinatePressure']) - data['sea_surface_temperature'] = np.append(data['sea_surface_temperature'], vals['seaSurfaceTemperature']) + data['windEastward'] = np.append(data['windEastward'], uwnd) + data['windNorthward'] = np.append(data['windNorthward'], vwnd) + data['specificHumidity'] = np.append(data['specificHumidity'], spfh) + data['airTemperature'] = np.append(data['airTemperature'], vals['airTemperature']) + data['stationPressure'] = np.append(data['stationPressure'], vals['nonCoordinatePressure']) + data['seaSurfaceTemperature'] = np.append(data['seaSurfaceTemperature'], vals['seaSurfaceTemperature']) for key in meta_keys: data[key] = np.append(data[key], meta_data[key]) diff --git a/src/conventional/metar_csv2ioda.py b/src/conventional/metar_csv2ioda.py old mode 100644 new mode 100755 index b6228a97c..055ce1ad9 --- a/src/conventional/metar_csv2ioda.py +++ b/src/conventional/metar_csv2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2020, 2021 UCAR +# (C) Copyright 2020-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -20,6 +20,7 @@ from pathlib import Path import csv import netCDF4 +import logging IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" if not IODA_CONV_PATH.is_dir(): @@ -30,46 +31,62 @@ from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict import meteo_utils -from def_jedi_utils import concat_obs_dict os.environ["TZ"] = "UTC" -locationKeyList = [ - ("station_id", "string"), - ("latitude", "float"), - ("longitude", "float"), - ("station_elevation", "float"), - ("height", "float"), - ("dateTime", "integer"), -] - -obsvars = { - 'ob_temp': 'air_temperature', - 'ob_spfh': 'specific_humidity', - 'ob_psfc': 'surface_pressure', - 'ob_uwnd': 'eastward_wind', - 'ob_vwnd': 'northward_wind', -} - +locationKeyList = [("stationICAO", "string", ""), + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("stationElevation", "float", "m"), + ("height", "float", "m"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z")] +meta_keys = [m_item[0] for m_item in locationKeyList] + +obsvars = ['airTemperature', + 'specificHumidity', + 'stationPressure', + 'windEastward', + 'windNorthward'] obsvars_units = ['K', 'kg kg-1', 'Pa', 'm s-1', 'm s-1'] - -VarDims = { - 'ob_temp': ['nlocs'], - 'ob_spfh': ['nlocs'], - 'ob_psfc': ['nlocs'], - 'ob_uwnd': ['nlocs'], - 'ob_vwnd': ['nlocs'], -} - -AttrData = { - 'converter': os.path.basename(__file__), - 'ioda_version': 2, - 'description': 'METAR surface observation data converted from CSV', - 'source': 'NCAR-RAL METAR database (gthompsn)', -} - -DimDict = { -} +obserrlist = [1.2, 0.75E-3, 120.0, 1.7, 1.7] + +VarDims = {'airTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'stationPressure': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location']} + +AttrData = {'converter': os.path.basename(__file__), + 'ioda_object_version': 2, + 'description': 'METAR surface observation data converted from CSV', + 'source': 'NCAR-RAL METAR database (gthompsn)'} + +DimDict = {} + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = netCDF4.default_fillvals['f4'] +int_missing_value = netCDF4.default_fillvals['i4'] +double_missing_value = netCDF4.default_fillvals['f8'] +long_missing_value = netCDF4.default_fillvals['i8'] +string_missing_value = '_' + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} class reformatMetar(object): @@ -79,12 +96,11 @@ def __init__(self, filename, date): self.filename = filename self.date = date self.meteo_utils = meteo_utils.meteo_utils() - self.float_fill = netCDF4.default_fillvals['f4'] self.varDict = defaultdict(lambda: DefaultOrderedDict(dict)) self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - AttrData['datetime_reference'] = date.strftime("%Y-%m-%dT%H:%M:%SZ") + AttrData['datetimeReference'] = date.strftime("%Y-%m-%dT%H:%M:%SZ_PT1H") # Read in CSV-formatted file of METAR data self._rd_metars() @@ -93,51 +109,45 @@ def __init__(self, filename, date): def _rd_metars(self): - n = 0 - for iodavar in obsvars.values(): - self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() - self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() - self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() - self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = obsvars_units[n] - self.varAttrs[iodavar, iconv.OerrName()]['units'] = obsvars_units[n] - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' - n += 1 - - # Set units of some MetaData variables - self.varAttrs['station_elevation', 'MetaData']['units'] = 'm' - self.varAttrs['height', 'MetaData']['units'] = 'm' - self.varAttrs['dateTime', 'MetaData']['units'] = 'seconds since 1970-01-01T00:00:00Z' + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + this_units = locationKeyList[meta_keys.index(key)][2] + if this_units: + self.varAttrs[(key, metaDataName)]['units'] = this_units + + # Set coordinates and units of the ObsValues. + for n, iodavar in enumerate(obsvars): + self.varDict[iodavar]['valKey'] = iodavar, obsValName + self.varDict[iodavar]['errKey'] = iodavar, obsErrName + self.varDict[iodavar]['qcKey'] = iodavar, qcName + self.varAttrs[iodavar, obsValName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsErrName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, obsValName]['units'] = obsvars_units[n] + self.varAttrs[iodavar, obsErrName]['units'] = obsvars_units[n] # data is the dictionary of incoming observation (METAR) data data = {} - - data['ob_icao'] = [] - data['ob_lat'] = [] - data['ob_lon'] = [] - data['ob_time'] = [] - data['ob_datetime'] = [] - data['ob_elev'] = [] - data['ob_hght'] = [] - data['ob_psfc'] = [] - data['ob_temp'] = [] - data['ob_spfh'] = [] - data['ob_uwnd'] = [] - data['ob_vwnd'] = [] + for key in meta_keys: + data[key] = [] + for key in obsvars: + data[key] = [] ''' Read in the METARs data - Header contains: Unix_time,DateString,ICAO,Latitude,Longitude,Elev,Temp,Dewp,Wdir,Wspd,Wgst,Vis,\ - Pcp,Pcp3h,Pcp6h,Pcp24h,QcFlag,WxString,WxCode,Altimeter,Cvg1,Bas1,Cvg2,Bas2,Cvg3,Bas3,Length,Raw + Header contains: Unix_time,DateString,ICAO,Latitude,Longitude,Elev,Temp,Dewp,Wdir,Wspd,Wgst,Vis, # noqa + Pcp,Pcp3h,Pcp6h,Pcp24h,QcFlag,WxString,WxCode,Altimeter,Cvg1,Bas1,Cvg2,Bas2,Cvg3,Bas3,Length,Raw # noqa ''' # open file in read mode with open(self.filename, 'r') as fh: + missing = float_missing_value # pass the file object to reader() to get the reader object csv_dict_reader = csv.DictReader(fh) column_names = csv_dict_reader.fieldnames + logging.info("Header, columns = " + ", ".join(column_names)) # Iterate over each row in the csv using reader object for row in csv_dict_reader: # row variable is a list that represents a row in csv @@ -152,99 +162,87 @@ def _rd_metars(self): lon = float(row['Longitude']) elev = float(row['Elev']) if (elev < -999 or elev > 8450): - elev = self.float_fill - hght = self.float_fill - else: - hght = elev + 2.0 # Height of observation assumed 2 meters above station elevation + elev = missing + hght = missing + else: # Height of observation assumed 2 meters above station elevation + hght = elev + 2.0 except (csv.Error, ValueError): continue try: temp = float(row['Temp']) + self.meteo_utils.C_2_K except (csv.Error, ValueError): - temp = self.float_fill + temp = missing try: dewp = float(row['Dewp']) + self.meteo_utils.C_2_K except (csv.Error, ValueError): - dewp = self.float_fill + dewp = missing try: wdir = float(row['Wdir']) except (csv.Error, ValueError): - wdir = self.float_fill + wdir = missing try: wspd = float(row['Wspd']) * self.meteo_utils.KTS_2_MS except (csv.Error, ValueError): - wspd = self.float_fill + wspd = missing - if ((wdir is not self.float_fill) and (wspd is not self.float_fill)): + if ((wdir != missing) and (wspd != missing)): if (wdir == 0 and wspd == 0): uwnd = 0.0 vwnd = 0.0 elif (wdir > 0 and wdir <= 360 and wspd > 0): uwnd, vwnd = self.meteo_utils.dir_speed_2_uv(wdir, wspd) else: - uwnd = self.float_fill - vwnd = self.float_fill + uwnd = missing + vwnd = missing else: - uwnd = self.float_fill - vwnd = self.float_fill + uwnd = missing + vwnd = missing try: altim = float(row['Altimeter']) psfc = self.meteo_utils.altim_2_sfcPressure(altim, elev) except (csv.Error, ValueError): - altim = self.float_fill - psfc = self.float_fill + altim = missing + psfc = missing - if ((psfc is not self.float_fill) and (temp is not self.float_fill) and (dewp is not self.float_fill)): + if ((psfc != missing) and (temp != missing) and (dewp != missing)): spfh = self.meteo_utils.specific_humidity(dewp, psfc) else: - spfh = self.float_fill - - data['ob_icao'].append(icao) - data['ob_datetime'].append(utime) - data['ob_lat'].append(lat) - data['ob_lon'].append(lon) - data['ob_elev'].append(elev) - data['ob_hght'].append(hght) - data['ob_psfc'].append(psfc) - data['ob_temp'].append(temp) - data['ob_spfh'].append(spfh) - data['ob_uwnd'].append(uwnd) - data['ob_vwnd'].append(vwnd) + spfh = missing + + data['stationICAO'].append(icao) + data['dateTime'].append(utime) + data['latitude'].append(lat) + data['longitude'].append(lon) + data['stationElevation'].append(elev) + data['height'].append(hght) + data['stationPressure'].append(psfc) + data['airTemperature'].append(temp) + data['specificHumidity'].append(spfh) + data['windEastward'].append(uwnd) + data['windNorthward'].append(vwnd) fh.close() - nlocs = len(data['ob_datetime']) - - self.outdata[('station_id', 'MetaData')] = np.array(data['ob_icao'], dtype=object) - self.outdata[('dateTime', 'MetaData')] = np.array(data['ob_datetime'], dtype=np.int64) - self.outdata[('latitude', 'MetaData')] = np.array(data['ob_lat'], dtype=np.float32) - self.outdata[('longitude', 'MetaData')] = np.array(data['ob_lon'], dtype=np.float32) - self.outdata[('station_elevation', 'MetaData')] = np.array(data['ob_elev'], dtype=np.float32) - self.outdata[('height', 'MetaData')] = np.array(data['ob_hght'], dtype=np.float32) - iodavar = 'surface_pressure' - self.outdata[(iodavar, iconv.OvalName())] = np.array(data['ob_psfc'], dtype=np.float32) - self.outdata[(iodavar, iconv.OerrName())] = np.full((nlocs), 200.0, dtype=np.float32) - self.outdata[(iodavar, iconv.OqcName())] = np.full((nlocs), 2, dtype=np.int32) - iodavar = 'air_temperature' - self.outdata[(iodavar, iconv.OvalName())] = np.array(data['ob_temp'], dtype=np.float32) - self.outdata[(iodavar, iconv.OerrName())] = np.full((nlocs), 0.2, dtype=np.float32) - self.outdata[(iodavar, iconv.OqcName())] = np.full((nlocs), 2, dtype=np.int32) - iodavar = 'specific_humidity' - self.outdata[(iodavar, iconv.OvalName())] = np.array(data['ob_spfh'], dtype=np.float32) - self.outdata[(iodavar, iconv.OerrName())] = np.full((nlocs), 0.75E-3, dtype=np.float32) - self.outdata[(iodavar, iconv.OqcName())] = np.full((nlocs), 2, dtype=np.int32) - iodavar = 'eastward_wind' - self.outdata[(iodavar, iconv.OvalName())] = np.array(data['ob_uwnd'], dtype=np.float32) - self.outdata[(iodavar, iconv.OerrName())] = np.full((nlocs), 0.7, dtype=np.float32) - self.outdata[(iodavar, iconv.OqcName())] = np.full((nlocs), 2, dtype=np.int32) - iodavar = 'northward_wind' - self.outdata[(iodavar, iconv.OvalName())] = np.array(data['ob_vwnd'], dtype=np.float32) - self.outdata[(iodavar, iconv.OerrName())] = np.full((nlocs), 0.7, dtype=np.float32) - self.outdata[(iodavar, iconv.OqcName())] = np.full((nlocs), 2, dtype=np.int32) - - DimDict['nlocs'] = nlocs - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + nlocs = len(data['dateTime']) + DimDict['Location'] = nlocs + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + this_units = locationKeyList[meta_keys.index(key)][2] + if this_units: + self.varAttrs[(key, metaDataName)]['units'] = this_units + self.outdata[(key, metaDataName)] = np.array(data[key], dtype=dtypes[dtypestr]) + + # Transfer from the 1-D data vectors and ensure output data (obs_data) types using numpy. + # The value of 2 for the preQC is NCEP-EMC prepBUFR code table 7 meaning not-checked QC. + # per source: https://www.emc.ncep.noaa.gov/mmb/data_processing/prepbufr.doc/table_7.htm + for n, iodavar in enumerate(obsvars): + self.outdata[(iodavar, obsValName)] = np.array(data[iodavar], dtype=np.float32) + self.outdata[(iodavar, obsErrName)] = np.full(nlocs, obserrlist[n], dtype=np.float32) + self.outdata[(iodavar, qcName)] = np.full(nlocs, 2, dtype=np.int32) return @@ -256,7 +254,7 @@ def main(): description=desc, formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument( - '-i', '--input', nargs='+', help='name of the input METARs CSV-formatted file', + '-i', '--input', help='name of the input METARs CSV-formatted file', type=str, required=True, default=None) parser.add_argument( '-o', '--output', help='name of the output netCDF IODA-ready file', @@ -269,21 +267,13 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') - obs_data = {} - for afile in args.input: - metar = None - metar = reformatMetar(afile, fdate) - file_obs_data = metar.outdata - if obs_data: - concat_obs_dict(obs_data, file_obs_data) - else: - obs_data = file_obs_data + obs = reformatMetar(args.input, fdate) # setup the IODA writer writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) # write everything out - writer.BuildIoda(obs_data, VarDims, metar.varAttrs, AttrData) + writer.BuildIoda(obs.outdata, VarDims, obs.varAttrs, AttrData) if __name__ == '__main__': diff --git a/src/conventional/ship_bufr2ioda.py b/src/conventional/ship_bufr2ioda.py index 6660b878f..64bc156cc 100644 --- a/src/conventional/ship_bufr2ioda.py +++ b/src/conventional/ship_bufr2ioda.py @@ -28,26 +28,26 @@ os.environ["TZ"] = "UTC" locationKeyList = [ - ("station_id", "string", ""), - ("ship_heading", "integer", ""), - ("ship_speed", "float", ""), + ("stationIdentification", "string", ""), + ("shipHeading", "integer", ""), + ("shipVelocity", "float", ""), ("latitude", "float", "degrees_north"), ("longitude", "float", "degrees_east"), - ("station_elevation", "float", "m"), + ("stationElevation", "float", "m"), ("height", "float", "m"), ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") ] meta_keys = [m_item[0] for m_item in locationKeyList] metaDataKeyList = { - 'station_id': ['shipOrMobileLandStationIdentifier'], - 'ship_heading': ['directionOfMotionOfMovingObservingPlatform'], - 'ship_speed': ['movingObservingPlatformSpeed'], + 'stationIdentification': ['shipOrMobileLandStationIdentifier'], + 'shipHeading': ['directionOfMotionOfMovingObservingPlatform'], + 'shipVelocity': ['movingObservingPlatformSpeed'], 'latitude': ['latitude'], 'longitude': ['longitude'], - 'station_elevation': ['Constructed', - 'heightOfStationGroundAboveMeanSeaLevel', - 'heightOfSensorAboveWaterSurface'], + 'stationElevation': ['Constructed', + 'heightOfStationGroundAboveMeanSeaLevel', + 'heightOfSensorAboveWaterSurface'], 'height': ['Constructed', 'heightOfBarometerAboveMeanSeaLevel', 'heightOfStationGroundAboveMeanSeaLevel'], @@ -66,22 +66,22 @@ 'pressureReducedToMeanSeaLevel'] # The outgoing IODA variables (ObsValues), their units, and assigned constant ObsError. -obsvars = ['air_temperature', - 'specific_humidity', - 'sea_surface_temperature', - 'eastward_wind', - 'northward_wind', - 'surface_pressure'] +obsvars = ['airTemperature', + 'specificHumidity', + 'seaSurfaceTemperature', + 'windEastward', + 'windNorthward', + 'stationPressure'] obsvars_units = ['K', 'kg kg-1', 'K', 'm s-1', 'm s-1', 'Pa'] obserrlist = [1.2, 0.75E-3, 2.2, 1.7, 1.7, 120.0] VarDims = { - 'air_temperature': ['nlocs'], - 'specific_humidity': ['nlocs'], - 'sea_surface_temperature': ['nlocs'], - 'eastward_wind': ['nlocs'], - 'northward_wind': ['nlocs'], - 'surface_pressure': ['nlocs'] + 'airTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'seaSurfaceTemperature': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location'], + 'stationPressure': ['Location'] } metaDataName = iconv.MetaDataName() @@ -94,7 +94,7 @@ 'ioda_version': 2, 'description': 'Surface (Ship) observations converted from BUFR', 'source': 'LDM at NCAR-RAL', - 'source_files': '' + 'sourceFiles': '' } DimDict = { @@ -141,12 +141,12 @@ def main(file_names, output_file): for fname in file_names: logging.debug("Reading file: " + fname) - AttrData['source_files'] += ", " + fname + AttrData['sourceFiles'] += ", " + fname data, count, start_pos = read_file(fname, count, start_pos, data) - AttrData['source_files'] = AttrData['source_files'][2:] - logging.debug("All source files: " + AttrData['source_files']) + AttrData['sourceFiles'] = AttrData['sourceFiles'][2:] + logging.debug("All source files: " + AttrData['sourceFiles']) if not data: logging.critical("ABORT: no message data was captured, stopping execution.") @@ -154,8 +154,7 @@ def main(file_names, output_file): logging.info("--- {:9.4f} BUFR read seconds ---".format(time.time() - start_time)) nlocs = len(data['dateTime']) - DimDict = {'nlocs': nlocs} - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict = {'Location': nlocs} # Set coordinates and units of the ObsValues. for n, iodavar in enumerate(obsvars): @@ -167,7 +166,6 @@ def main(file_names, output_file): varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' varAttrs[iodavar, obsValName]['units'] = obsvars_units[n] varAttrs[iodavar, obsErrName]['units'] = obsvars_units[n] - varAttrs[iodavar, qcName]['units'] = 'unitless' # Set units of the MetaData variables and all _FillValues. for key in meta_keys: @@ -441,12 +439,12 @@ def read_bufr_message(f, count, start_pos, data): mask_height = np.logical_or(meta_data['height'] < -425, meta_data['height'] > 800) meta_data['height'][mask_height] = float_missing_value - # If the height of the observation (sensor) is missing, try to fill it with station_elevation. - for n, elev in enumerate(meta_data['station_elevation']): + # If the height of the observation (sensor) is missing, try to fill it with stationElevation. + for n, elev in enumerate(meta_data['stationElevation']): if (elev > -425 and elev < 800 and np.abs(meta_data['height'][n]-elev) > 50): meta_data['height'][n] = elev + 2 else: - meta_data['station_elevation'][n] = 1.0 + meta_data['stationElevation'][n] = 1.0 meta_data['height'][n] = 10.0 # Next, get the raw observed weather variables we want. @@ -500,12 +498,12 @@ def read_bufr_message(f, count, start_pos, data): spfh[n] = met_utils.specific_humidity(dewpoint, psfc) # Move everything into the final data dictionary, including metadata. - data['eastward_wind'] = np.append(data['eastward_wind'], uwnd) - data['northward_wind'] = np.append(data['northward_wind'], vwnd) - data['specific_humidity'] = np.append(data['specific_humidity'], spfh) - data['air_temperature'] = np.append(data['air_temperature'], vals['airTemperature']) - data['surface_pressure'] = np.append(data['surface_pressure'], vals['nonCoordinatePressure']) - data['sea_surface_temperature'] = np.append(data['sea_surface_temperature'], vals['oceanographicWaterTemperature']) + data['windEastward'] = np.append(data['windEastward'], uwnd) + data['windNorthward'] = np.append(data['windNorthward'], vwnd) + data['specificHumidity'] = np.append(data['specificHumidity'], spfh) + data['airTemperature'] = np.append(data['airTemperature'], vals['airTemperature']) + data['stationPressure'] = np.append(data['stationPressure'], vals['nonCoordinatePressure']) + data['seaSurfaceTemperature'] = np.append(data['seaSurfaceTemperature'], vals['oceanographicWaterTemperature']) for key in meta_keys: data[key] = np.append(data[key], meta_data[key]) diff --git a/src/conventional/decode_bufr_LDM_raob.py b/src/conventional/sonde_bufr2ioda.py similarity index 91% rename from src/conventional/decode_bufr_LDM_raob.py rename to src/conventional/sonde_bufr2ioda.py index 2c0810b6f..298cca0c8 100644 --- a/src/conventional/decode_bufr_LDM_raob.py +++ b/src/conventional/sonde_bufr2ioda.py @@ -30,23 +30,23 @@ locationKeyList = [ ("latitude", "float", "degrees_north", "keep"), ("longitude", "float", "degrees_east", "keep"), - ("station_elevation", "float", "m", "keep"), + ("stationElevation", "float", "m", "keep"), ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z", "keep"), - ("LaunchTime", "long", "seconds since 1970-01-01T00:00:00Z", "keep"), - ("air_pressure", "float", "Pa", "keep"), - ("geopotential_height", "float", "m", "keep"), + ("releaseTime", "long", "seconds since 1970-01-01T00:00:00Z", "keep"), + ("pressure", "float", "Pa", "keep"), + ("geopotentialHeight", "float", "m", "keep"), ("vertSignificance", "integer", "", "toss"), ("latDisplacement", "float", "degrees", "toss"), ("lonDisplacement", "float", "degrees", "toss"), ("timeDisplacement", "float", "s", "toss"), ("wmoBlockNumber", "integer", "", "toss"), ("wmoStationNumber", "integer", "", "toss"), - ("station_id", "string", "", "keep"), - ("instrumentType", "integer", "", "keep"), - ("instrumentRadiationCorrectionInfo", "integer", "", "keep"), - ("instrumentHumidityCorrectionInfo", "integer", "", "keep"), - ("temperatureSensorType", "integer", "", "keep"), - ("humiditySensorType", "integer", "", "keep"), + ("stationIdentification", "string", "", "keep"), + ("instrumentIdentifier", "integer", "", "keep"), + # ("instrumentRadiationCorrectionInfo", "integer", "", "keep"), + # ("instrumentHumidityCorrectionInfo", "integer", "", "keep"), + # ("temperatureSensorType", "integer", "", "keep"), + # ("humiditySensorType", "integer", "", "keep"), ("year", "integer", "", "toss"), ("month", "integer", "", "toss"), ("day", "integer", "", "toss"), @@ -59,25 +59,25 @@ metaDataKeyList = { 'latitude': ['latitude'], 'longitude': ['longitude'], - 'station_elevation': ['Constructed', 'heightOfBarometerAboveMeanSeaLevel', - 'heightOfStationGroundAboveMeanSeaLevel', 'heightOfStation', 'height'], + 'stationElevation': ['Constructed', 'heightOfBarometerAboveMeanSeaLevel', + 'heightOfStationGroundAboveMeanSeaLevel', 'heightOfStation', 'height'], 'dateTime': ['Constructed'], - 'LaunchTime': ['Constructed'], - 'air_pressure': ['pressure', 'nonCoordinatePressure'], - 'geopotential_height': ['nonCoordinateGeopotentialHeight', 'geopotentialHeight'], + 'releaseTime': ['Constructed'], + 'pressure': ['pressure', 'nonCoordinatePressure'], + 'geopotentialHeight': ['nonCoordinateGeopotentialHeight', 'geopotentialHeight'], 'vertSignificance': ['extendedVerticalSoundingSignificance', 'verticalSoundingSignificance'], 'latDisplacement': ['latitudeDisplacement'], 'lonDisplacement': ['longitudeDisplacement'], 'timeDisplacement': ['timePeriod'], 'wmoBlockNumber': ['blockNumber'], 'wmoStationNumber': ['stationNumber'], - 'station_id': ['Constructed'], + 'stationIdentification': ['Constructed'], # "stationLongName": 'shipOrMobileLandStationIdentifier', - "instrumentType": ['radiosondeType'], - "instrumentRadiationCorrectionInfo": ['solarAndInfraredRadiationCorrection'], - "instrumentHumidityCorrectionInfo": ['correctionAlgorithmsForHumidityMeasurements'], - "temperatureSensorType": ['temperatureSensorType'], - "humiditySensorType": ['humiditySensorType'], + "instrumentIdentifier": ['radiosondeType'], + # "instrumentRadiationCorrectionInfo": ['solarAndInfraredRadiationCorrection'], + # "instrumentHumidityCorrectionInfo": ['correctionAlgorithmsForHumidityMeasurements'], + # "temperatureSensorType": ['temperatureSensorType'], + # "humiditySensorType": ['humiditySensorType'], # "instrumentSerialNum": 'radiosondeSerialNumber', # "instrumentSoftwareVersion": 'softwareVersionNumber', 'year': ['year'], @@ -92,16 +92,16 @@ raw_obsvars = ['airTemperature', 'dewpointTemperature', 'windDirection', 'windSpeed'] # The outgoing IODA variables (ObsValues), their units, and assigned constant ObsError. -obsvars = ['air_temperature', 'virtual_temperature', 'specific_humidity', 'eastward_wind', 'northward_wind'] +obsvars = ['airTemperature', 'virtualTemperature', 'specificHumidity', 'windEastward', 'windNorthward'] obsvars_units = ['K', 'K', 'kg kg-1', 'm s-1', 'm s-1'] obserrlist = [1.2, 1.2, 0.75E-3, 1.7, 1.7] VarDims = { - 'air_temperature': ['nlocs'], - 'virtual_temperature': ['nlocs'], - 'specific_humidity': ['nlocs'], - 'eastward_wind': ['nlocs'], - 'northward_wind': ['nlocs'] + 'airTemperature': ['Location'], + 'virtualTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location'] } AttrData = { @@ -175,7 +175,7 @@ def main(file_names, output_file, datetimeRef): logging.info("--- {:9.4f} BUFR read seconds ---".format(time.time() - start_time)) nlocs = count[1] - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} # Set coordinates and units of the ObsValues. for n, iodavar in enumerate(obsvars): @@ -589,7 +589,7 @@ def read_bufr_message(f, count, start_pos, data): target_number = len(temp_data['timeDisplacement']) elif temp_data['latDisplacement'] is not None: target_number = len(temp_data['latDisplacement']) - elif temp_data['air_pressure'] is not None: + elif temp_data['pressure'] is not None: target_number = len(temp_data['air_pressure']) elif temp_data['airTemperature'] is not None: target_number = len(temp_data['airTemperature']) @@ -626,13 +626,13 @@ def read_bufr_message(f, count, start_pos, data): meta_data['dateTime'] = specialty_time(temp_data['timeDisplacement'][b:e], meta_data['year'][0], meta_data['month'][0], meta_data['day'][0], # noqa meta_data['hour'][0], meta_data['minute'][0], meta_data['second'][0]) # noqa - meta_data['LaunchTime'] = np.full(target_number, meta_data['dateTime'][0]) + meta_data['releaseTime'] = np.full(target_number, meta_data['dateTime'][0]) else: meta_data['dateTime'][0] = specialty_time([0], meta_data['year'][0], meta_data['month'][0], meta_data['day'][0], # noqa meta_data['hour'][0], meta_data['minute'][0], meta_data['second'][0]) # noqa meta_data['dateTime'] = np.full(target_number, meta_data['dateTime'][0]) - meta_data['LaunchTime'] = np.full(target_number, meta_data['dateTime'][0]) + meta_data['releaseTime'] = np.full(target_number, meta_data['dateTime'][0]) # Sondes also have lat/lon displacement from launch/release location. if temp_data['latDisplacement'] is not None and temp_data['lonDisplacement'] is not None: @@ -663,20 +663,20 @@ def read_bufr_message(f, count, start_pos, data): if (lat < -90 or lat > 90): meta_data['latitude'][n] = meta_data['latitude'][n-1] - # Forcably create station_id 5-char string from WMO block+station number. - meta_data['station_id'] = np.full(target_number, string_missing_value, dtype=' 0 and block < 100 and number > 0 and number < 1000): - meta_data['station_id'][n] = "{:02d}".format(block) + "{:03d}".format(number) + meta_data['stationIdentification'][n] = "{:02d}".format(block) + "{:03d}".format(number) if n == 0: count[3] += 1 - logging.info(f"Processing sonde for station: {meta_data['station_id'][n]}") + logging.info(f"Processing sonde for station: {meta_data['stationIdentification'][n]}") # Very odd, sometimes the first level of data has some variables set to zero. Reset to missing. - if (meta_data['geopotential_height'][0] == 0 or meta_data['air_pressure'][0] == 0): - meta_data['geopotential_height'][0] = float_missing_value - meta_data['air_pressure'][0] = float_missing_value + if (meta_data['geopotentialHeight'][0] == 0 or meta_data['pressure'][0] == 0): + meta_data['geopotentialHeight'][0] = float_missing_value + meta_data['pressure'][0] = float_missing_value # And now processing the observed variables we care about. nbad = 0 @@ -723,7 +723,7 @@ def read_bufr_message(f, count, start_pos, data): spfh = np.full(target_number, float_missing_value) for n, dewpoint in enumerate(vals['dewpointTemperature']): - pres = meta_data['air_pressure'][n] + pres = meta_data['pressure'][n] if dewpoint and pres: if (dewpoint > 50 and dewpoint < 325 and pres > 100 and pres < 109900): spfh[n] = met_utils.specific_humidity(dewpoint, pres) @@ -736,17 +736,17 @@ def read_bufr_message(f, count, start_pos, data): tvirt = np.full(target_number, float_missing_value) for n, temp in enumerate(airt): - pres = meta_data['air_pressure'][n] + pres = meta_data['pressure'][n] if (temp != float_missing_value and spfh[n] and pres < 108000 and pres > 10000): qvapor = max(1.0e-12, spfh[n]/(1.0-spfh[n])) tvirt[n] = temp*(1.0 + 0.61*qvapor) # Finally fill up the output data dictionary with observed variables. - data['eastward_wind'] = np.append(data['eastward_wind'], uwnd) - data['northward_wind'] = np.append(data['northward_wind'], vwnd) - data['specific_humidity'] = np.append(data['specific_humidity'], spfh) - data['air_temperature'] = np.append(data['air_temperature'], airt) - data['virtual_temperature'] = np.append(data['virtual_temperature'], tvirt) + data['windEastward'] = np.append(data['windEastward'], uwnd) + data['windNorthward'] = np.append(data['windNorthward'], vwnd) + data['specificHumidity'] = np.append(data['specificHumidity'], spfh) + data['airTemperature'] = np.append(data['airTemperature'], airt) + data['virtualTemperature'] = np.append(data['virtualTemperature'], tvirt) obnum += 1 diff --git a/src/conventional/sonde_tac2ioda.py b/src/conventional/sonde_tac2ioda.py index e8221fb0a..def658933 100644 --- a/src/conventional/sonde_tac2ioda.py +++ b/src/conventional/sonde_tac2ioda.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 ########################################################################### # These functions decode WMO format soundings which contain at least the # mandatory levels (TTAA, TTCC), and also can include significant temperature @@ -63,32 +64,32 @@ # The outgoing IODA MetaData variables, their data type, and units. MetaDataKeyList = [ - ("station_id", "string", ""), + ("stationIdentification", "string", ""), ("latitude", "float", "degrees_north"), ("longitude", "float", "degrees_east"), - ("station_elevation", "float", "m"), + ("stationElevation", "float", "m"), ("height", "float", "m"), - ("air_pressure", "float", "Pa"), - ("launch_time", "string", ""), - ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z"), + ("pressure", "float", "Pa"), + ("releaseTime", "long", "seconds since 1970-01-01T00:00:00Z"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") ] meta_keys = [m_item[0] for m_item in MetaDataKeyList] # The outgoing IODA variables (ObsValues), their units, and assigned constant ObsError. -obsvars = ['air_temperature', - 'specific_humidity', - 'virtual_temperature', - 'eastward_wind', - 'northward_wind'] +obsvars = ['airTemperature', + 'specificHumidity', + 'virtualTemperature', + 'windEastward', + 'windNorthward'] obsvars_units = ['K', 'kg kg-1', 'K', 'm s-1', 'm s-1'] obserrlist = [1.2, 0.75E-3, 1.5, 1.7, 1.7] VarDims = { - 'air_temperature': ['nlocs'], - 'specific_humidity': ['nlocs'], - 'virtual_temperature': ['nlocs'], - 'eastward_wind': ['nlocs'], - 'northward_wind': ['nlocs'] + 'airTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'virtualTemperature': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location'] } metaDataName = iconv.MetaDataName() @@ -815,9 +816,6 @@ def getPressureLevels(section, levels): section['levels'] = {} pressures = sorted(levels.keys(), reverse=True) - if len(pressures) < 1: - return - # loop through each height and try to find surrounding heights with defined # levels to interpolate or extrapolate pressure level from @@ -860,9 +858,6 @@ def getHeights(section, levels): """ pressures = sorted(levels.keys(), reverse=True) - if len(pressures) < 1: - return - for pressure in sorted(section['levels'].keys(), reverse=True): pressurelo = pressures[0] pressureup = pressures[-1] @@ -952,6 +947,7 @@ def change_vars(profile): # launch is usually initiated close to 11:05Z. this_datetime = datetime(profile['year'], profile['month'], profile['day'], profile['hour'], 0, 0) launch_time = this_datetime - timedelta(seconds=55*60) + time_offset1 = round((launch_time - epoch).total_seconds()) previous_time = launch_time heightKm1 = profile['elev'] @@ -994,19 +990,19 @@ def change_vars(profile): time_offset = round((this_datetime - epoch).total_seconds()) previous_time = this_datetime - new_profile['station_id'].append(profile['synop']) + new_profile['stationIdentification'].append(profile['synop']) new_profile['latitude'].append(profile['lat']) new_profile['longitude'].append(profile['lon']) - new_profile['station_elevation'].append(profile['elev']) - new_profile['launch_time'].append(launch_time.strftime("%Y-%m-%dT%H:%M:%SZ")) + new_profile['stationElevation'].append(profile['elev']) + new_profile['releaseTime'].append(time_offset1) new_profile['dateTime'].append(time_offset) - new_profile['air_pressure'].append(pres) + new_profile['pressure'].append(pres) new_profile['height'].append(height) - new_profile['air_temperature'].append(temp) - new_profile['virtual_temperature'].append(tvirt) - new_profile['specific_humidity'].append(spfh) - new_profile['eastward_wind'].append(u) - new_profile['northward_wind'].append(v) + new_profile['airTemperature'].append(temp) + new_profile['virtualTemperature'].append(tvirt) + new_profile['specificHumidity'].append(spfh) + new_profile['windEastward'].append(u) + new_profile['windNorthward'].append(v) """ Based on height and time and the wind componenents, predict the lat, lon positions @@ -1021,13 +1017,13 @@ def change_vars(profile): for idx in range(1, len(delta_t)): - if (new_profile['eastward_wind'][idx-1] != float_missing_value and new_profile['northward_wind'][idx-1] != float_missing_value): + if (new_profile['windEastward'][idx-1] != float_missing_value and new_profile['windNorthward'][idx-1] != float_missing_value): # move north-south - d_north = new_profile['northward_wind'][idx-1] * delta_t[idx-1] + d_north = new_profile['windNorthward'][idx-1] * delta_t[idx-1] location = geod.direct(points=previous_loc[:2], azimuths=0., distances=d_north)[0] new_profile['latitude'][idx] = location[1] # move east-west - d_east = new_profile['eastward_wind'][idx-1] * delta_t[idx-1] + d_east = new_profile['windEastward'][idx-1] * delta_t[idx-1] location = geod.direct(points=location[:2], azimuths=90., distances=d_east)[0] new_profile['longitude'][idx] = location[0] else: @@ -1164,7 +1160,7 @@ def append_ioda_data(in_profile, obs_data): if args.netcdf: ioda_data = {} - DimDict = {'nlocs': ntotal} + DimDict = {'Location': ntotal} AttrData['sourceFiles'] = AttrData['sourceFiles'][2:] # Set coordinates and units of the ObsValues. for n, iodavar in enumerate(obsvars): diff --git a/src/conventional/synop_bufr2ioda.py b/src/conventional/synop_bufr2ioda.py index ff1cb9c5d..5bc2b614b 100644 --- a/src/conventional/synop_bufr2ioda.py +++ b/src/conventional/synop_bufr2ioda.py @@ -28,13 +28,13 @@ os.environ["TZ"] = "UTC" locationKeyList = [ - ("station_id", "string", "", "keep"), + ("stationIdentification", "string", "", "keep"), # ("station_name", "string", "", "keep"), ("wmoBlockNumber", "integer", "", "toss"), ("wmoStationNumber", "integer", "", "toss"), ("latitude", "float", "degrees_north", "keep"), ("longitude", "float", "degrees_east", "keep"), - ("station_elevation", "float", "m", "keep"), + ("stationElevation", "float", "m", "keep"), ("height", "float", "m", "keep"), ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z", "keep"), ("year", "integer", "", "toss"), @@ -52,11 +52,11 @@ # 'station_name': ['stationOrSiteName'], This fails due to unicode characters 'latitude': ['latitude'], 'longitude': ['longitude'], - 'station_elevation': ['heightOfStationGroundAboveMeanSeaLevel'], + 'stationElevation': ['heightOfStationGroundAboveMeanSeaLevel'], 'height': ['Constructed', 'heightOfBarometerAboveMeanSeaLevel', 'heightOfStationGroundAboveMeanSeaLevel'], - 'station_id': ['Constructed'], + 'stationIdentification': ['Constructed'], 'dateTime': ['Constructed'], 'year': ['year'], 'month': ['month'], @@ -76,22 +76,22 @@ 'nonCoordinatePressure'] # The outgoing IODA variables (ObsValues), their units, and assigned constant ObsError. -obsvars = ['air_temperature', - 'specific_humidity', - 'virtual_temperature', - 'eastward_wind', - 'northward_wind', - 'surface_pressure'] +obsvars = ['airTemperature', + 'specificHumidity', + 'virtualTemperature', + 'windEastward', + 'windNorthward', + 'stationPressure'] obsvars_units = ['K', 'kg kg-1', 'K', 'm s-1', 'm s-1', 'Pa'] obserrlist = [1.2, 0.75E-3, 1.5, 1.7, 1.7, 120.0] VarDims = { - 'air_temperature': ['nlocs'], - 'specific_humidity': ['nlocs'], - 'virtual_temperature': ['nlocs'], - 'eastward_wind': ['nlocs'], - 'northward_wind': ['nlocs'], - 'surface_pressure': ['nlocs'] + 'airTemperature': ['Location'], + 'specificHumidity': ['Location'], + 'virtualTemperature': ['Location'], + 'windEastward': ['Location'], + 'windNorthward': ['Location'], + 'stationPressure': ['Location'] } metaDataName = iconv.MetaDataName() @@ -104,7 +104,7 @@ 'ioda_version': 2, 'description': 'Surface (SYNOP) observations converted from BUFR', 'source': 'LDM at NCAR-RAL', - 'source_files': '' + 'sourceFiles': '' } DimDict = { @@ -151,12 +151,12 @@ def main(file_names, output_file): for fname in file_names: logging.debug("Reading file: " + fname) - AttrData['source_files'] += ", " + fname + AttrData['sourceFiles'] += ", " + fname data, count, start_pos = read_file(fname, count, start_pos, data) - AttrData['source_files'] = AttrData['source_files'][2:] - logging.debug("All source files: " + AttrData['source_files']) + AttrData['sourceFiles'] = AttrData['sourceFiles'][2:] + logging.debug("All source files: " + AttrData['sourceFiles']) if not data: logging.critical("ABORT: no message data was captured, stopping execution.") @@ -164,8 +164,7 @@ def main(file_names, output_file): logging.info("--- {:9.4f} BUFR read seconds ---".format(time.time() - start_time)) nlocs = len(data['dateTime']) - DimDict = {'nlocs': nlocs} - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict = {'Location': nlocs} # Set coordinates and units of the ObsValues. for n, iodavar in enumerate(obsvars): @@ -177,7 +176,6 @@ def main(file_names, output_file): varAttrs[iodavar, qcName]['coordinates'] = 'longitude latitude' varAttrs[iodavar, obsValName]['units'] = obsvars_units[n] varAttrs[iodavar, obsErrName]['units'] = obsvars_units[n] - varAttrs[iodavar, qcName]['units'] = 'unitless' # Set units of the MetaData variables and all _FillValues. for key in meta_keys: @@ -499,7 +497,7 @@ def read_bufr_message(f, count, start_pos, data): meta_data['height'][mask_height] = float_missing_value # If the height of the observation (sensor) is missing, try to fill it with station_elevation. - for n, elev in enumerate(meta_data['station_elevation']): + for n, elev in enumerate(meta_data['stationElevation']): if (elev > -425 and elev < 8500): meta_data['height'][n] = elev + 2 @@ -579,12 +577,12 @@ def read_bufr_message(f, count, start_pos, data): tvirt[n] = airt[n]*(1.0 + 0.61*qvapor) # Finally fill up the output data dictionary with observed variables. - data['eastward_wind'] = np.append(data['eastward_wind'], uwnd) - data['northward_wind'] = np.append(data['northward_wind'], vwnd) - data['specific_humidity'] = np.append(data['specific_humidity'], spfh) - data['air_temperature'] = np.append(data['air_temperature'], airt) - data['virtual_temperature'] = np.append(data['virtual_temperature'], tvirt) - data['surface_pressure'] = np.append(data['surface_pressure'], psfc) + data['windEastward'] = np.append(data['windEastward'], uwnd) + data['windNorthward'] = np.append(data['windNorthward'], vwnd) + data['specificHumidity'] = np.append(data['specificHumidity'], spfh) + data['airTemperature'] = np.append(data['airTemperature'], airt) + data['virtualTemperature'] = np.append(data['virtualTemperature'], tvirt) + data['stationPressure'] = np.append(data['stationPressure'], psfc) logging.info(f"number of observations so far: {count[1]} from {count[0]} BUFR msgs.") logging.info(f"number of invalid or useless observations: {count[2]}") diff --git a/src/gnssro/gnssro_bufr2ioda.py b/src/gnssro/gnssro_bufr2ioda.py index 3b681430c..a740bc04d 100644 --- a/src/gnssro/gnssro_bufr2ioda.py +++ b/src/gnssro/gnssro_bufr2ioda.py @@ -17,6 +17,7 @@ import os from pathlib import Path from itertools import repeat +import netCDF4 as nc IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" if not IODA_CONV_PATH.is_dir(): @@ -30,13 +31,18 @@ # globals ioda_float_type = 'float32' ioda_int_type = 'int32' -float_missing_value = -1.0e+37 -int_missing_value = -2147483647 +float_missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] @@ -68,42 +74,45 @@ def main(args): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file GlobalAttrs = {} - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") - date_time_int32 = np.array(int(args.date.strftime("%Y%m%d%H")), dtype='int32') - GlobalAttrs['date_time'] = date_time_int32.item() + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['converter'] = os.path.basename(__file__) # pass parameters to the IODA writer VarDims = { - 'bending_angle': ['nlocs'], - 'refractivity': ['nlocs'] + 'bendingAngle': ['Location'], + 'atmosphericRefractivity': ['Location'] } # write them out - nlocs = obs_data[('bending_angle', 'ObsValue')].shape[0] - DimDict = {'nlocs': nlocs} + nlocs = obs_data[('bendingAngle', 'ObsValue')].shape[0] + DimDict = {'Location': nlocs} meta_data_types = def_meta_types() for k, v in meta_data_types.items(): locationKeyList.append((k, v)) writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('bending_angle', 'ObsValue')]['units'] = 'Radians' - VarAttrs[('bending_angle', 'ObsError')]['units'] = 'Radians' - VarAttrs[('bending_angle', 'PreQC')]['units'] = 'unitless' - VarAttrs[('refractivity', 'ObsValue')]['units'] = 'N' - VarAttrs[('refractivity', 'ObsError')]['units'] = 'N' - VarAttrs[('refractivity', 'PreQC')]['units'] = 'unitless' - - VarAttrs[('bending_angle', 'ObsValue')]['_FillValue'] = float_missing_value - VarAttrs[('bending_angle', 'ObsError')]['_FillValue'] = float_missing_value - VarAttrs[('bending_angle', 'PreQC')]['_FillValue'] = int_missing_value - VarAttrs[('refractivity', 'ObsValue')]['_FillValue'] = float_missing_value - VarAttrs[('refractivity', 'ObsError')]['_FillValue'] = float_missing_value - VarAttrs[('refractivity', 'PreQC')]['_FillValue'] = int_missing_value + VarAttrs[('bendingAngle', 'ObsValue')]['units'] = 'Radians' + VarAttrs[('bendingAngle', 'ObsError')]['units'] = 'Radians' + VarAttrs[('atmosphericRefractivity', 'ObsValue')]['units'] = 'N units' + VarAttrs[('atmosphericRefractivity', 'ObsError')]['units'] = 'N units' + VarAttrs[('height', 'MetaData')]['units'] = 'm' + VarAttrs[('latitude', 'MetaData')]['units'] = 'degree_north' + VarAttrs[('longitude', 'MetaData')]['units'] = 'degree_east' + VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string + VarAttrs[('sensorAzimuthAngle', 'MetaData')]['units'] = 'degree' + VarAttrs[('geoidUndulation', 'MetaData')]['units'] = 'm' + VarAttrs[('earthRadiusCurvature', 'MetaData')]['units'] = 'm' + + VarAttrs[('bendingAngle', 'ObsValue')]['_FillValue'] = float_missing_value + VarAttrs[('bendingAngle', 'ObsError')]['_FillValue'] = float_missing_value + VarAttrs[('bendingAngle', 'PreQC')]['_FillValue'] = int_missing_value + VarAttrs[('atmosphericRefractivity', 'ObsValue')]['_FillValue'] = float_missing_value + VarAttrs[('atmosphericRefractivity', 'ObsError')]['_FillValue'] = float_missing_value + VarAttrs[('atmosphericRefractivity', 'PreQC')]['_FillValue'] = int_missing_value VarAttrs[('latitude', 'MetaData')]['_FillValue'] = float_missing_value VarAttrs[('longitude', 'MetaData')]['_FillValue'] = float_missing_value - VarAttrs[('altitude', 'MetaData')]['_FillValue'] = float_missing_value + VarAttrs[('height', 'MetaData')]['_FillValue'] = float_missing_value # final write to IODA file writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) @@ -153,10 +162,13 @@ def get_meta_data(bufr): hour = codes_get(bufr, 'hour') minute = codes_get(bufr, 'minute') second = codes_get(bufr, 'second') # non-integer value + second = round(second) - # should really add seconds - dtg = ("%4i-%.2i-%.2iT%.2i:%.2i:00Z" % (year, month, day, hour, minute)) - profile_meta_data['datetime'] = datetime.strptime(dtg, "%Y-%m-%dT%H:%M:%SZ") + # get string date, translate to a datetime object, then offset from epoch + dtg = ("%4i-%.2i-%.2iT%.2i:%.2i:%.2iZ" % (year, month, day, hour, minute, second)) + this_datetime = datetime.strptime(dtg, "%Y-%m-%dT%H:%M:%SZ") + time_offset = round((this_datetime - epoch).total_seconds()) + profile_meta_data['dateTime'] = np.int64(time_offset) return profile_meta_data @@ -189,12 +201,12 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): # ! Bit 3=Rising Occulation (1=rising; 0=setting) # ! Bit 4=Excess Phase non-nominal # ! Bit 5=Bending Angle non-nominal - i_non_nominal = get_normalized_bit(profile_meta_data['qualityFlag'], bit_index=16-1) - i_phase_non_nominal = get_normalized_bit(profile_meta_data['qualityFlag'], bit_index=16-4) - i_bang_non_nominal = get_normalized_bit(profile_meta_data['qualityFlag'], bit_index=16-5) - iasc = get_normalized_bit(profile_meta_data['qualityFlag'], bit_index=16-3) + i_non_nominal = get_normalized_bit(profile_meta_data['qualityFlags'], bit_index=16-1) + i_phase_non_nominal = get_normalized_bit(profile_meta_data['qualityFlags'], bit_index=16-4) + i_bang_non_nominal = get_normalized_bit(profile_meta_data['qualityFlags'], bit_index=16-5) + iasc = get_normalized_bit(profile_meta_data['qualityFlags'], bit_index=16-3) # add rising/setting (ascending/descending) bit - obs_data[('ascending_flag', 'MetaData')] = np.array(np.repeat(iasc, krepfac[0]), dtype=ioda_int_type) + obs_data[('satelliteAscendingFlag', 'MetaData')] = np.array(np.repeat(iasc, krepfac[0]), dtype=ioda_int_type) # print( " ... RO QC flags: %i %i %i %i" % (i_non_nominal, i_phase_non_nominal, i_bang_non_nominal, iasc) ) @@ -203,9 +215,9 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): return {} # value, ob_error, qc - obs_data[('bending_angle', "ObsValue")] = assign_values(bang) - obs_data[('bending_angle', "ObsError")] = assign_values(bang_err) - obs_data[('bending_angle', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) + obs_data[('bendingAngle', "ObsValue")] = assign_values(bang) + obs_data[('bendingAngle', "ObsError")] = assign_values(bang_err) + obs_data[('bendingAngle', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) # (geometric) height is read as integer but expected as float in output height = codes_get_array(bufr, 'height', ktype=float) @@ -216,31 +228,32 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): refrac_conf = codes_get_array(bufr, 'percentConfidence')[sum(krepfac[:1])+1:sum(krepfac[:2])+1] # value, ob_error, qc - obs_data[('refractivity', "ObsValue")] = assign_values(refrac) - obs_data[('refractivity', "ObsError")] = assign_values(refrac_err) - obs_data[('refractivity', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) + obs_data[('atmosphericRefractivity', "ObsValue")] = assign_values(refrac) + obs_data[('atmosphericRefractivity', "ObsError")] = assign_values(refrac_err) + obs_data[('atmosphericRefractivity', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) meta_data_types = def_meta_types() obs_data[('latitude', 'MetaData')] = assign_values(lats) obs_data[('longitude', 'MetaData')] = assign_values(lons) - obs_data[('impact_parameter', 'MetaData')] = assign_values(impact) - obs_data[('altitude', 'MetaData')] = assign_values(height) + obs_data[('impactParameterRO', 'MetaData')] = assign_values(impact) + obs_data[('height', 'MetaData')] = assign_values(height) for k, v in profile_meta_data.items(): - if type(v) is int: + if type(v) is np.int64: + obs_data[(k, 'MetaData')] = np.array(np.repeat(v, krepfac[0]), dtype=np.int64) + elif type(v) is int: obs_data[(k, 'MetaData')] = np.array(np.repeat(v, krepfac[0]), dtype=ioda_int_type) elif type(v) is float: obs_data[(k, 'MetaData')] = np.array(np.repeat(v, krepfac[0]), dtype=ioda_float_type) - else: # something else (datetime for instance) - string_array = np.repeat(v.strftime("%Y-%m-%dT%H:%M:%SZ"), krepfac[0]) - obs_data[(k, 'MetaData')] = string_array.astype(object) + else: # something else (what do we do with it) + print(f"Found neither float nor in, type={type(v)}; skipping") # set record number (multi file procesing will change this) if record_number is None: nrec = 1 else: nrec = record_number - obs_data[('record_number', 'MetaData')] = np.array(np.repeat(nrec, krepfac[0]), dtype=ioda_int_type) + obs_data[('sequenceNumber', 'MetaData')] = np.array(np.repeat(nrec, krepfac[0]), dtype=ioda_int_type) # get derived profiles geop = codes_get_array(bufr, 'geopotentialHeight')[:-1] @@ -250,10 +263,10 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): prof_conf = codes_get_array(bufr, 'percentConfidence')[sum(krepfac[:2])+1:sum(krepfac)+1] # Compute impact height - obs_data[('impact_height', 'MetaData')] = \ - obs_data[('impact_parameter', 'MetaData')] - \ - obs_data[('geoid_height_above_reference_ellipsoid', 'MetaData')] - \ - obs_data[('earth_radius_of_curvature', 'MetaData')] + obs_data[('impactHeightRO', 'MetaData')] = \ + obs_data[('impactParameterRO', 'MetaData')] - \ + obs_data[('geoidUndulation', 'MetaData')] - \ + obs_data[('earthRadiusCurvature', 'MetaData')] if add_qc: good = quality_control(profile_meta_data, height, lats, lons) @@ -272,8 +285,8 @@ def quality_control(profile_meta_data, heights, lats, lons): # bad radius or # large geoid undulation - if (profile_meta_data['earth_radius_of_curvature'] > 6450000.) or (profile_meta_data['earth_radius_of_curvature'] < 6250000.) or \ - (abs(profile_meta_data['geoid_height_above_reference_ellipsoid']) > 200): + if (profile_meta_data['earthRadiusCurvature'] > 6450000.) or (profile_meta_data['earthRadiusCurvature'] < 6250000.) or \ + (abs(profile_meta_data['geoidUndulation']) > 200): good = [] # bad profile return good @@ -282,16 +295,16 @@ def quality_control(profile_meta_data, heights, lats, lons): def def_meta_data(): meta_data_keys = { - "qualityFlag": 'radioOccultationDataQualityFlags', - "geoid_height_above_reference_ellipsoid": 'geoidUndulation', - "sensor_azimuth_angle": 'bearingOrAzimuth', - "time": 'timeIncrement', - "earth_radius_of_curvature": 'earthLocalRadiusOfCurvature', - "occulting_sat_id": 'satelliteIdentifier', - "occulting_sat_is": 'satelliteInstruments', - "process_center": 'centre', - "reference_sat_id": 'platformTransmitterIdNumber', - "gnss_sat_class": 'satelliteClassification', + "qualityFlags": 'radioOccultationDataQualityFlags', + "geoidUndulation": 'geoidUndulation', + "sensorAzimuthAngle": 'bearingOrAzimuth', + # "timeIncrement": 'timeIncrement', + "earthRadiusCurvature": 'earthLocalRadiusOfCurvature', + "satelliteIdentifier": 'satelliteIdentifier', + "satelliteInstrument": 'satelliteInstruments', + "dataProviderOrigin": 'centre', + "satelliteTransmitterId": 'platformTransmitterIdNumber', + "satelliteConstellationRO": 'satelliteClassification', } return meta_data_keys @@ -302,18 +315,18 @@ def def_meta_types(): meta_data_types = { "latitude": "float", "longitude": "float", - "datetime": "string", - 'impact_parameter': 'float', - 'impact_height': 'float', + "dateTime": "long", + 'impactParameterRO': 'float', + 'impactHeightRO': 'float', 'height': 'float', - "qualityFlag": 'integer', - "geoid_height_above_reference_ellipsoid": 'float', - "earth_radius_of_curvature": 'float', - "occulting_sat_id": 'integer', - "occulting_sat_is": 'integer', - "process_center": 'string', - "reference_sat_id": 'integer', - "gnss_sat_class": 'integer', + "qualityFlags": 'integer', + "geoidUndulation": 'float', + "earthRadiusCurvature": 'float', + "satelliteIdentifier": 'integer', + "satelliteInstrument": 'integer', + "dataProviderOrigin": 'string', + "satelliteTransmitterId": 'integer', + "satelliteConstellationRO": 'integer', } return meta_data_types @@ -347,11 +360,13 @@ def concat_obs_dict(obs_data, append_obs_data): else: if obs_data[gv_key].dtype == float: fill_data = np.repeat(float_missing_value, append_length, dtype=ioda_float_type) + elif obs_data[gv_key].dtype == np.int64: + fill_data = np.repeat(long_missing_value, append_length, dtype=np.int64) elif obs_data[gv_key].dtype == int: fill_data = np.repeat(int_missing_value, append_length, dtype=ioda_int_type) elif obs_data[gv_key].dtype == object: - # string type, extend with empty strings - fill_data = np.repeat("", append_length, dtype=object) + # string type, extend with string missing value + fill_data = np.repeat(string_missing_value, append_length, dtype=object) obs_data[gv_key] = np.append(obs_data[gv_key], fill_data) diff --git a/src/goes/goes.py b/src/goes/goes.py index b1095e901..cfe4ac1b0 100644 --- a/src/goes/goes.py +++ b/src/goes/goes.py @@ -2,8 +2,8 @@ # goes.py # # This class loads, calculates, filters, and makes accessible the variables and attributes required by the -# GoesConverter class for a single GOES-16 or GOES-17 LB1 ABI channel (1-16). The brightness temperature and reflectance -# factor calculations used in this class are derived from sections 3.4.1.2 and 3.4.1.3 in the +# GoesConverter class for a single GOES-16 or GOES-17 LB1 ABI channel (1-16). The brightness temperature and albedo +# calculations used in this class are derived from sections 3.4.1.2 and 3.4.1.3 in the # "GOES-R Advanced Baseline Imager (ABI) Algorithm Theoretical Basis Document For Cloud and Moisture Imagery Product # (CMIP)" Version 3.0 July 30, 2012 (https://www.star.nesdis.noaa.gov/goesr/docs/ATBD/Imagery.pdf). The calculations for # the propagation of standard error are from section 2.5.5 of the "NIST/SEMATECH e-Handbook of Statistical Methods" @@ -184,7 +184,7 @@ def _load_rad_data_array(self): def _create_obsvalue_rf_data_array(self): """ - Creates a local data array variable containing the calculated obsvalue reflectance factor data + Creates a local data array variable containing the calculated obsvalue albedo data after fill value filtering by the DQF flags. """ self._obsvalue_rf_data_array = self._rad_data_array * self._kappa0 @@ -199,7 +199,7 @@ def _create_obsvalue_bt_data_array(self): def _create_obserror_rf_data_array(self): """ - Creates a local data array variable containing the calculated obserror reflectance factor data + Creates a local data array variable containing the calculated obserror albedo data after fill value filtering by the DQF flags. """ sqrt_comp = np.power(self._kappa0, 2) * np.power(self._std_dev_radiance_value_of_valid_pixels, 2) @@ -225,7 +225,7 @@ def get_input_file_path(self): def get_obsvalue_rf_data_array(self): """ - Returns the obsvalue reflectance factor data array. + Returns the obsvalue albedo data array. """ return self._obsvalue_rf_data_array @@ -237,7 +237,7 @@ def get_obsvalue_bt_data_array(self): def get_obserror_rf_data_array(self): """ - Returns the obserror reflectance factor data array. + Returns the obserror albedo data array. """ return self._obserror_rf_data_array diff --git a/src/goes/goes_converter.py b/src/goes/goes_converter.py index ae30730f4..c858a6572 100755 --- a/src/goes/goes_converter.py +++ b/src/goes/goes_converter.py @@ -17,24 +17,24 @@ # /MetaData/dateTime # /MetaData/latitude -> units # /MetaData/longitude -> units -# /MetaData/elevation_angle -> units -# /MetaData/scan_angle -> units -# /MetaData/scan_position -# /MetaData/sensor_azimuth_angle -> units -# /MetaData/sensor_view_angle -> units -# /MetaData/sensor_zenith_angle -> units -# /MetaData/solar_azimuth_angle -> units -# /MetaData/solar_zenith_angle -> units -# /MetaData/sensor_channel -# /ObsError/reflectance_factor or /ObsError/brightness_temperature -# /ObsValue/reflectance_factor or /ObsValue/brightness_temperature -# /ObsError/brightness_temperature -> units -# /ObsValue/brightness_temperature -> units -# /PreQC/reflectance_factor or /PreQC/brightness_temperature -# /PreQC/reflectance_factor -> flag_values or /PreQC/brightness_temperature -> flag_values -# /PreQC/reflectance_factor -> flag_meanings or /PreQC/brightness_temperature -> flag_meanings -# /nchans -# /nlocs +# /MetaData/sensorElevationAngle -> units +# /MetaData/sensorScanAngle -> units +# /MetaData/sensorScanPosition +# /MetaData/sensorAzimuthAngle -> units +# /MetaData/sensorViewAngle -> units +# /MetaData/sensorZenithAngle -> units +# /MetaData/solarAzimuthAngle -> units +# /MetaData/solarZenithAngle -> units +# /MetaData/sensorChannelNumber +# /ObsError/albedo or /ObsError/brightnessTemperature +# /ObsValue/albedo or /ObsValue/brightnessTemperature +# /ObsError/brightnessTemperature -> units +# /ObsValue/brightnessTemperature -> units +# /PreQC/albedo or /PreQC/brightnessTemperature +# /PreQC/albedo -> flag_values or /PreQC/brightnessTemperature -> flag_values +# /PreQC/albedo -> flag_meanings or /PreQC/brightnessTemperature -> flag_meanings +# /Channel +# /Location # import os @@ -56,9 +56,9 @@ def __init__(self, input_file_paths, latlon_file_path, output_file_path_rf, outp Constructor input_file_paths - A list of the absolute paths to all 16 ABI channels from the same hour latlon_file_path - The path to an existing GoesLatLon file or if it does not exist the path to write the file - output_file_path_rf - The path to write the IODAv2 reflectance factor data file + output_file_path_rf - The path to write the IODAv2 albedo (reflectance factor) data file output_file_path_bt - The path to write the IODAv2 brightness temperature data file - include_rf - Boolean value indicating whether to create the reflectance factor output data file: False (default) + include_rf - Boolean value indicating whether to create the albedo output data file: False (default) resolution - The resolution in km: 8 (default), 4, 8, 16, 32, 64 """ self._input_file_paths = input_file_paths @@ -92,7 +92,7 @@ def _check_arguments(self): def _initialize(self): """ - Create two local dictionaries contained the Goes class instances for reflectance factor (ABI channels 1-6) + Create two local dictionaries contained the Goes class instances for albedo (ABI channels 1-6) and brightness temperature (ABI channels 7-16). This function also assigns the file path for a template GOES file from ABI channel 7. """ @@ -161,7 +161,7 @@ def _create_metadata_latitude_variable(self, output_dataset): output_dataset - A netCDF Dataset object """ latitude_data_array = self._latlon_dataset['MetaData'].variables['latitude'][:].real - output_dataset.createVariable('/MetaData/latitude', 'f4', 'nlocs', fill_value=-999) + output_dataset.createVariable('/MetaData/latitude', 'f4', 'Location', fill_value=-999) output_dataset['/MetaData/latitude'][:] = latitude_data_array output_dataset['/MetaData/latitude'].setncattr('units', 'degrees_north') @@ -171,72 +171,72 @@ def _create_metadata_longitude_variable(self, output_dataset): output_dataset - A netCDF4 Dataset object """ longitude_data_array = self._latlon_dataset['MetaData'].variables['longitude'][:].real - output_dataset.createVariable('/MetaData/longitude', 'f4', 'nlocs', fill_value=-999) + output_dataset.createVariable('/MetaData/longitude', 'f4', 'Location', fill_value=-999) output_dataset['/MetaData/longitude'][:] = longitude_data_array output_dataset['/MetaData/longitude'].setncattr('units', 'degrees_east') def _create_metadata_scan_angle_variable(self, output_dataset): """ - Creates the /MetaData/scan_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorScanAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - scan_angle_data_array = self._latlon_dataset['MetaData'].variables['scan_angle'][:].real - output_dataset.createVariable('/MetaData/scan_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/scan_angle'][:] = scan_angle_data_array - output_dataset['/MetaData/scan_angle'].setncattr('units', 'degrees') + scan_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorScanAngle'][:].real + output_dataset.createVariable('/MetaData/sensorScanAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorScanAngle'][:] = scan_angle_data_array + output_dataset['/MetaData/sensorScanAngle'].setncattr('units', 'degrees') def _create_metadata_elevation_angle_variable(self, output_dataset): """ - Creates the /MetaData/elevation_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorElevationAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - elevation_angle_data_array = self._latlon_dataset['MetaData'].variables['elevation_angle'][:].real - output_dataset.createVariable('/MetaData/elevation_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/elevation_angle'][:] = elevation_angle_data_array - output_dataset['/MetaData/elevation_angle'].setncattr('units', 'degrees') + elevation_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorElevationAngle'][:].real + output_dataset.createVariable('/MetaData/sensorElevationAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorElevationAngle'][:] = elevation_angle_data_array + output_dataset['/MetaData/sensorElevationAngle'].setncattr('units', 'degrees') def _create_metadata_scan_position_variable(self, output_dataset): """ - Creates the /MetaData/scan_position variable in an output netCDF4 dataset. + Creates the /MetaData/sensorScanPosition variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - scan_position_data_array = self._latlon_dataset['MetaData'].variables['scan_position'][:].real - output_dataset.createVariable('/MetaData/scan_position', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/scan_position'][:] = scan_position_data_array + scan_position_data_array = self._latlon_dataset['MetaData'].variables['sensorScanPosition'][:].real + output_dataset.createVariable('/MetaData/sensorScanPosition', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorScanPosition'][:] = scan_position_data_array def _create_metadata_sensor_zenith_angle_variable(self, output_dataset): """ - Creates the /MetaData/sensor_zenith_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorZenithAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - sensor_zenith_angle_data_array = self._latlon_dataset['MetaData'].variables['sensor_zenith_angle'][:].real - output_dataset.createVariable('/MetaData/sensor_zenith_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/sensor_zenith_angle'][:] = sensor_zenith_angle_data_array - output_dataset['/MetaData/sensor_zenith_angle'].setncattr('units', 'degrees') + sensor_zenith_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorZenithAngle'][:].real + output_dataset.createVariable('/MetaData/sensorZenithAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorZenithAngle'][:] = sensor_zenith_angle_data_array + output_dataset['/MetaData/sensorZenithAngle'].setncattr('units', 'degrees') def _create_metadata_sensor_azimuth_angle_variable(self, output_dataset): """ - Creates the /MetaData/sensor_azimuth_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorAzimuthAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - sensor_azimuth_angle_data_array = self._latlon_dataset['MetaData'].variables['sensor_azimuth_angle'][:].real - output_dataset.createVariable('/MetaData/sensor_azimuth_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/sensor_azimuth_angle'][:] = sensor_azimuth_angle_data_array - output_dataset['/MetaData/sensor_azimuth_angle'].setncattr('units', 'degrees') + sensor_azimuth_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorAzimuthAngle'][:].real + output_dataset.createVariable('/MetaData/sensorAzimuthAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorAzimuthAngle'][:] = sensor_azimuth_angle_data_array + output_dataset['/MetaData/sensorAzimuthAngle'].setncattr('units', 'degrees') def _create_metadata_sensor_view_angle_variable(self, output_dataset): """ - Creates the /MetaData/sensor_view_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorViewAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - sensor_view_angle_data_array = self._latlon_dataset['MetaData'].variables['sensor_view_angle'][:].real - output_dataset.createVariable('/MetaData/sensor_view_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/sensor_view_angle'][:] = sensor_view_angle_data_array - output_dataset['/MetaData/sensor_view_angle'].setncattr('units', 'degrees') + sensor_view_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorViewAngle'][:].real + output_dataset.createVariable('/MetaData/sensorViewAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorViewAngle'][:] = sensor_view_angle_data_array + output_dataset['/MetaData/sensorViewAngle'].setncattr('units', 'degrees') def _create_metadata_solar_zenith_angle_variable(self, output_dataset): """ - Creates the /MetaData/solar_zenith_angle variable in an output netCDF4 dataset. + Creates the /MetaData/solarZenithAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ dataset = Dataset(self._input_file_path_template, 'r') @@ -269,13 +269,13 @@ def _create_metadata_solar_zenith_angle_variable(self, output_dataset): dataset.close() dataset_latlon.close() solar_zenith_angle_data_array = np.nan_to_num(solar_zenith_angle_data_array, nan=-999) - output_dataset.createVariable('/MetaData/solar_zenith_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/solar_zenith_angle'][:] = solar_zenith_angle_data_array - output_dataset['/MetaData/solar_zenith_angle'].setncattr('units', 'degrees') + output_dataset.createVariable('/MetaData/solarZenithAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/solarZenithAngle'][:] = solar_zenith_angle_data_array + output_dataset['/MetaData/solarZenithAngle'].setncattr('units', 'degrees') def _create_metadata_solar_azimuth_angle_variable(self, output_dataset): """ - Creates the /MetaData/solar_azimuth_angle variable in an output netCDF4 dataset. + Creates the /MetaData/solarAzimuthAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ dataset = Dataset(self._input_file_path_template, 'r') @@ -299,20 +299,20 @@ def _create_metadata_solar_azimuth_angle_variable(self, output_dataset): dataset.close() dataset_latlon.close() solar_azimuth_angle_data_array = np.nan_to_num(solar_azimuth_angle_data_array, nan=-999) - output_dataset.createVariable('/MetaData/solar_azimuth_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/solar_azimuth_angle'][:] = solar_azimuth_angle_data_array - output_dataset['/MetaData/solar_azimuth_angle'].setncattr('units', 'degrees') + output_dataset.createVariable('/MetaData/solarAzimuthAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/solarAzimuthAngle'][:] = solar_azimuth_angle_data_array + output_dataset['/MetaData/solarAzimuthAngle'].setncattr('units', 'degrees') - def _create_nlocs_dimension(self, output_dataset): + def _create_location_dimension(self, output_dataset): """ - Creates the nlocs dimension in an output netCDF4 dataset. + Creates the Location dimension in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - nlocs = self._latlon_dataset.dimensions['nlocs'].size - output_dataset.createDimension('nlocs', nlocs) - output_dataset.createVariable('nlocs', 'i4', 'nlocs') - output_dataset.variables['nlocs'].setncattr('suggested_chunk_dim', nlocs) - output_dataset.variables['nlocs'][:] = np.arange(1, nlocs + 1, 1, dtype='int32') + Location = self._latlon_dataset.dimensions['Location'].size + output_dataset.createDimension('Location', Location) + output_dataset.createVariable('Location', 'i4', 'Location') + output_dataset.variables['Location'].setncattr('suggested_chunk_dim', round(Location*0.01)) + output_dataset.variables['Location'][:] = np.arange(1, Location + 1, 1, dtype='int32') @staticmethod def _create_groups(output_dataset): @@ -326,28 +326,28 @@ def _create_groups(output_dataset): output_dataset.createGroup('PreQC') @staticmethod - def _create_nchans_dimension(output_dataset, nchans): + def _create_channel_dimension(output_dataset, Channel): """ - Creates the nchans dimension in an output netCDF4 dataset. + Creates the Channel dimension in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object - nchans - An integer indicating the number of nchans: 6 (for reflectance factor) + Channel - An integer indicating the number of Channel: 6 (for albedo) or 10 (for brightness temperature) """ - output_dataset.createDimension('nchans', nchans) - output_dataset.createVariable('nchans', 'i4', 'nchans') - if nchans == 6: - output_dataset.variables['nchans'][:] = [1, 2, 3, 4, 5, 6] - elif nchans == 10: - output_dataset.variables['nchans'][:] = [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + output_dataset.createDimension('Channel', Channel) + output_dataset.createVariable('Channel', 'i4', 'Channel') + if Channel == 6: + output_dataset.variables['Channel'][:] = [1, 2, 3, 4, 5, 6] + elif Channel == 10: + output_dataset.variables['Channel'][:] = [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] @staticmethod def _create_metadata_sensor_channel_variable(output_dataset): """ - Creates the /MetaData/sensor_channel variable in an output netCDF4 dataset. + Creates the /MetaData/sensorChannelNumber variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - output_dataset.createVariable('/MetaData/sensor_channel', 'i4', 'nchans') - output_dataset['/MetaData/sensor_channel'][:] = output_dataset['nchans'][:] + output_dataset.createVariable('/MetaData/sensorChannelNumber', 'i4', 'Channel') + output_dataset['/MetaData/sensorChannelNumber'][:] = output_dataset['Channel'][:] @staticmethod def _create_root_group_attributes(output_dataset, resolution, platform_id): @@ -361,16 +361,16 @@ def _create_root_group_attributes(output_dataset, resolution, platform_id): output_dataset.setncattr('platform_identifier', platform_id) @staticmethod - def _get_nlocs(dataset): + def _get_Location(dataset): """ - Returns the nlocs dimension size for the provided netCDF4 Dataset. - dataset - the dataset to extract the nlocs size + Returns the Location dimension size for the provided netCDF4 Dataset. + dataset - the dataset to extract the Location size """ - return dataset.dimensions['nlocs'].size + return dataset.dimensions['Location'].size - def _create_preqc_reflectance_factor_variable(self, output_dataset): + def _create_preqc_albedo_variable(self, output_dataset): """ - Creates the /PreQC/reflectance_factor variable variable and associated attributes in an output netCDF4 dataset. + Creates the /PreQC/albedo variable and associated attributes in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -382,16 +382,16 @@ def _create_preqc_reflectance_factor_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/PreQC/reflectance_factor', 'i4', ('nlocs', 'nchans'), fill_value=-999) - output_dataset['/PreQC/reflectance_factor'][:] = data_array - output_dataset['/PreQC/reflectance_factor'].setncattr('flag_values', '0,1,2,3') - output_dataset['/PreQC/reflectance_factor'].setncattr('flag_meanings', - 'good_pixel_qf,conditionally_usable_pixel_qf,' - 'out_of_range_pixel_qf,no_value_pixel_qf') + output_dataset.createVariable('/PreQC/albedo', 'i4', ('Location', 'Channel'), fill_value=-999) + output_dataset['/PreQC/albedo'][:] = data_array + output_dataset['/PreQC/albedo'].setncattr('flag_values', '0,1,2,3') + output_dataset['/PreQC/albedo'].setncattr('flag_meanings', + 'good_pixel_qf,conditionally_usable_pixel_qf,' + 'out_of_range_pixel_qf,no_value_pixel_qf') def _create_preqc_brightness_temperature_variable(self, output_dataset): """ - Creates the /PreQC/brightness_temperature variable and associated attributes in an output netCDF4 dataset. + Creates the /PreQC/brightnessTemperature variable and associated attributes in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -403,17 +403,17 @@ def _create_preqc_brightness_temperature_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/PreQC/brightness_temperature', 'i4', ('nlocs', 'nchans'), + output_dataset.createVariable('/PreQC/brightnessTemperature', 'i4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/PreQC/brightness_temperature'][:] = data_array - output_dataset['/PreQC/brightness_temperature'].setncattr('flag_values', '0,1,2,3') - output_dataset['/PreQC/brightness_temperature'].setncattr('flag_meanings', - 'good_pixel_qf,conditionally_usable_pixel_qf,' - 'out_of_range_pixel_qf,no_value_pixel_qf') + output_dataset['/PreQC/brightnessTemperature'][:] = data_array + output_dataset['/PreQC/brightnessTemperature'].setncattr('flag_values', '0,1,2,3') + output_dataset['/PreQC/brightnessTemperature'].setncattr('flag_meanings', + 'good_pixel_qf,conditionally_usable_pixel_qf,' + 'out_of_range_pixel_qf,no_value_pixel_qf') - def _create_obsvalue_reflectance_factor_variable(self, output_dataset): + def _create_obsvalue_albedo_variable(self, output_dataset): """ - Creates the /ObsValue/reflectance_factor variable in an output netCDF4 dataset. + Creates the /ObsValue/albedo variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -425,13 +425,13 @@ def _create_obsvalue_reflectance_factor_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsValue/reflectance_factor', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsValue/albedo', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsValue/reflectance_factor'][:] = data_array + output_dataset['/ObsValue/albedo'][:] = data_array def _create_obsvalue_brightness_temperature_variable(self, output_dataset): """ - Creates the /ObsValue/brightness_temperature variable in an output netCDF4 dataset. + Creates the /ObsValue/brightnessTemperature variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -443,14 +443,14 @@ def _create_obsvalue_brightness_temperature_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsValue/brightness_temperature', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsValue/brightnessTemperature', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsValue/brightness_temperature'][:] = data_array - output_dataset['/ObsValue/brightness_temperature'].setncattr('units', 'K') + output_dataset['/ObsValue/brightnessTemperature'][:] = data_array + output_dataset['/ObsValue/brightnessTemperature'].setncattr('units', 'K') - def _create_obserror_reflectance_factor_variable(self, output_dataset): + def _create_obserror_albedo_variable(self, output_dataset): """ - Creates the /ObsError/reflectance_factor variable in an output netCDF4 dataset. + Creates the /ObsError/albedo variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -462,13 +462,13 @@ def _create_obserror_reflectance_factor_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsError/reflectance_factor', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsError/albedo', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsError/reflectance_factor'][:] = data_array + output_dataset['/ObsError/albedo'][:] = data_array def _create_obserror_brightness_temperature_variable(self, output_dataset): """ - Creates the /ObsError/brightness_temperature variable in an output netCDF4 dataset. + Creates the /ObsError/brightnessTemperature variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -480,10 +480,10 @@ def _create_obserror_brightness_temperature_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsError/brightness_temperature', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsError/brightnessTemperature', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsError/brightness_temperature'][:] = data_array - output_dataset['/ObsError/brightness_temperature'].setncattr('units', 'K') + output_dataset['/ObsError/brightnessTemperature'][:] = data_array + output_dataset['/ObsError/brightnessTemperature'].setncattr('units', 'K') def _create_metadata_time_variable(self, output_dataset): """ @@ -495,9 +495,9 @@ def _create_metadata_time_variable(self, output_dataset): time_offset = round((self._start_date - epoch).total_seconds()) # seconds since epoch. start_date = self._start_date.strftime('%Y-%m-%dT%H:%M:%SZ') output_dataset.setncattr("date_time", start_date) - datetime_array = np.full(self._get_nlocs(output_dataset), np.int64(time_offset)) + datetime_array = np.full(self._get_Location(output_dataset), np.int64(time_offset)) - output_dataset.createVariable('/MetaData/dateTime', 'i8', 'nlocs') + output_dataset.createVariable('/MetaData/dateTime', 'i8', 'Location') output_dataset['/MetaData/dateTime'][:] = datetime_array output_dataset['/MetaData/dateTime'].setncattr('units', iso8601_string) @@ -513,7 +513,7 @@ def _load_all_goes(self): def convert(self): """ - Creates the reflectance factor (if include_rf is True) and brightness temperature IODAv2 data files. + Creates the albedo (if include_rf is True) and brightness temperature IODAv2 data files. This functions also checks for the existence and nadir change of the GoesLatLon data file. """ self._initialize() @@ -538,8 +538,8 @@ def _convert_bt(self): dataset = Dataset(self._output_file_path_bt, 'w') GoesConverter._create_groups(dataset) GoesConverter._create_root_group_attributes(dataset, self._resolution, self._platform_id) - GoesConverter._create_nchans_dimension(dataset, 10) - self._create_nlocs_dimension(dataset) + GoesConverter._create_channel_dimension(dataset, 10) + self._create_location_dimension(dataset) GoesConverter._create_metadata_sensor_channel_variable(dataset) self._create_metadata_latitude_variable(dataset) self._create_metadata_longitude_variable(dataset) @@ -559,13 +559,13 @@ def _convert_bt(self): def _convert_rf(self): """ - Creates the reflectance factor IODAv2 data file. + Creates the albedo IODAv2 data file. """ dataset = Dataset(self._output_file_path_rf, 'w') GoesConverter._create_groups(dataset) GoesConverter._create_root_group_attributes(dataset, self._resolution, self._platform_id) - GoesConverter._create_nchans_dimension(dataset, 6) - self._create_nlocs_dimension(dataset) + GoesConverter._create_channel_dimension(dataset, 6) + self._create_location_dimension(dataset) GoesConverter._create_metadata_sensor_channel_variable(dataset) self._create_metadata_latitude_variable(dataset) self._create_metadata_longitude_variable(dataset) @@ -578,7 +578,7 @@ def _convert_rf(self): self._create_metadata_solar_zenith_angle_variable(dataset) self._create_metadata_solar_azimuth_angle_variable(dataset) self._create_metadata_time_variable(dataset) - self._create_obsvalue_reflectance_factor_variable(dataset) - self._create_obserror_reflectance_factor_variable(dataset) - self._create_preqc_reflectance_factor_variable(dataset) + self._create_obsvalue_albedo_variable(dataset) + self._create_obserror_albedo_variable(dataset) + self._create_preqc_albedo_variable(dataset) dataset.close() diff --git a/src/goes/goes_latlon.py b/src/goes/goes_latlon.py index 6309eecdb..781055c02 100644 --- a/src/goes/goes_latlon.py +++ b/src/goes/goes_latlon.py @@ -13,17 +13,17 @@ # # /GROUP/VARIABLE -> ATTRIBUTE # -# /MetaData/elevation_angle -# /MetaData/scan_angle -# /MetaData/scan_position -# /MetaData/sensor_azimuth_angle -> units -# /MetaData/sensor_view_angle -> units -# /MetaData/sensor_zenith_angle -> units +# /MetaData/sensorElevationAngle +# /MetaData/sensorScanAngle +# /MetaData/sensorScanPosition +# /MetaData/sensorAzimuthAngle +# /MetaData/sensorViewAngle +# /MetaData/sensorZenithAngle # /MetaData/latitude # /MetaData/latitude -> lat_nadir # /MetaData/longitude # /MetaData/longitude -> lon_nadir -# /nlocs +# /Location # from netCDF4 import Dataset from numpy import ma @@ -192,31 +192,31 @@ def create(self): sensor_zenith_angle, sensor_azimuth_angle, sensor_view_angle = \ self._calc_sensor_zenith_azimuth_view_angles(latitude, longitude) latlon_dataset = Dataset(self._latlon_file_path, 'w') - nlocs = len(latitude) - latlon_dataset.createDimension('nlocs', nlocs) - latlon_dataset.createVariable('nlocs', 'i4', ('nlocs',)) - latlon_dataset.variables['nlocs'][:] = np.arange(1, nlocs + 1, 1, dtype='int32') + Location = len(latitude) + latlon_dataset.createDimension('Location', Location) + latlon_dataset.createVariable('Location', 'i4', ('Location',)) + latlon_dataset.variables['Location'][:] = np.arange(1, Location + 1, 1, dtype='int32') nonexistent_indices = len(self._lat_fill_value_index_array[0]) latlon_dataset.createDimension('nonexistent_indices', nonexistent_indices) latlon_dataset.createVariable('nonexistent_indices', 'i4', ('nonexistent_indices',)) latlon_dataset.variables['nonexistent_indices'][:] = np.array(self._lat_fill_value_index_array[0]) latlon_dataset.createGroup('MetaData') - latlon_dataset.createVariable('/MetaData/latitude', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/longitude', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/scan_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/scan_position', 'i4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/elevation_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/sensor_zenith_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/sensor_azimuth_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/sensor_view_angle', 'f4', 'nlocs', fill_value=-999) + latlon_dataset.createVariable('/MetaData/latitude', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/longitude', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorScanAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorScanPosition', 'i4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorElevationAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorZenithAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorAzimuthAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorViewAngle', 'f4', 'Location', fill_value=-999) latlon_dataset['/MetaData/latitude'][:] = latitude latlon_dataset['/MetaData/longitude'][:] = longitude - latlon_dataset['/MetaData/scan_angle'][:] = scan_angle - latlon_dataset['/MetaData/scan_position'][:] = scan_position - latlon_dataset['/MetaData/elevation_angle'][:] = elevation_angle - latlon_dataset['/MetaData/sensor_zenith_angle'][:] = sensor_zenith_angle - latlon_dataset['/MetaData/sensor_azimuth_angle'][:] = sensor_azimuth_angle - latlon_dataset['/MetaData/sensor_view_angle'][:] = sensor_view_angle + latlon_dataset['/MetaData/sensorScanAngle'][:] = scan_angle + latlon_dataset['/MetaData/sensorScanPosition'][:] = scan_position + latlon_dataset['/MetaData/sensorElevationAngle'][:] = elevation_angle + latlon_dataset['/MetaData/sensorZenithAngle'][:] = sensor_zenith_angle + latlon_dataset['/MetaData/sensorAzimuthAngle'][:] = sensor_azimuth_angle + latlon_dataset['/MetaData/sensorViewAngle'][:] = sensor_view_angle lat_nadir, lon_nadir = self._get_nadir_attributes() latlon_dataset['/MetaData/latitude'].setncattr('lat_nadir', lat_nadir) latlon_dataset['/MetaData/longitude'].setncattr('lon_nadir', lon_nadir) diff --git a/src/gsi-ncdiag/gsi_ncdiag.py b/src/gsi-ncdiag/gsi_ncdiag.py index a216d4c64..8f149aa12 100644 --- a/src/gsi-ncdiag/gsi_ncdiag.py +++ b/src/gsi-ncdiag/gsi_ncdiag.py @@ -80,38 +80,60 @@ # 132 are dropsondes } +# WMO satellite IDs +wmo_satid = { + 'aqua': 784, + 'metop-a': 4, + 'metop-b': 3, + 'metop-c': 5, + 'noaa-15': 206, + 'noaa-18': 209, + 'noaa-19': 223, + 'noaa-20': 225, + 'npp': 224, +} + +wmo_instid = { + 'amsua': 570, + 'atms': 621, + 'iasi': 221, + 'abi': 617, + 'omps': 947, + 'sbuv': 956, +} + # LocKeyList = { 'gsiname':('IODAname','dtype')} all_LocKeyList = { - 'Station_ID': ('station_id', 'string'), - 'Time': ('datetime', 'string'), + 'Station_ID': ('stationIdentification', 'string'), + 'Time': ('dateTime', 'string'), 'time': ('time', 'string'), - 'ascending_flag': ('ascending_flag', 'string'), - 'earth_radius_of_curvature': ('earth_radius_of_curvature', 'string'), - 'reference_sat_id': ('reference_sat_id', 'string'), - 'occulting_sat_id': ('occulting_sat_id', 'string'), - 'record_number': ('record_number', 'string'), - 'geoid_height_above_reference_ellipsoid': ('geoid_height_above_reference_ellipsoid', 'string'), - 'gnss_sat_class': ('gnss_sat_class', 'string'), - 'impact_height': ('impact_height', 'string'), - 'impact_parameter': ('impact_parameter', 'string'), + 'ascending_flag': ('satelliteAscendingFlag', 'integer'), + 'earth_radius_of_curvature': ('earthRadiusCurvature', 'float'), + 'reference_sat_id': ('satelliteTransmitterId', 'integer'), + 'occulting_sat_id': ('satelliteIdentifier', 'integer'), + 'record_number': ('sequenceNumber', 'integer'), + 'geoid_height_above_reference_ellipsoid': ('geoidUndulation', 'float'), + 'gnss_sat_class': ('satelliteConstellationRO', 'string'), + 'impact_height': ('impactHeightRO', 'float'), + 'impact_parameter': ('impactParameterRO', 'float'), 'Latitude': ('latitude', 'float'), 'Longitude': ('longitude', 'float'), - 'Station_Elevation': ('station_elevation', 'float'), - 'Pressure': ('air_pressure', 'float'), + 'Station_Elevation': ('stationElevation', 'float'), + 'Pressure': ('pressure', 'float'), 'Height': ('height', 'float'), - 'Elevation': ('height_above_mean_sea_level', 'float'), - 'Obs_Time': ('datetime', 'string'), - 'Scan_Position': ('scan_position', 'float'), - 'Sat_Zenith_Angle': ('sensor_zenith_angle', 'float'), - 'Sat_Azimuth_Angle': ('sensor_azimuth_angle', 'float'), - 'Sol_Zenith_Angle': ('solar_zenith_angle', 'float'), - 'Sol_Azimuth_Angle': ('solar_azimuth_angle', 'float'), - 'Scan_Angle': ('sensor_view_angle', 'float'), - 'Surface_type': ('surface_type', 'integer'), - 'Dominant_Sfc_Type': ('surface_type', 'integer'), + 'Elevation': ('heightOfSurface', 'float'), + 'Obs_Time': ('dateTime', 'string'), + 'Scan_Position': ('sensorScanPosition', 'float'), + 'Sat_Zenith_Angle': ('sensorZenithAngle', 'float'), + 'Sat_Azimuth_Angle': ('sensorAzimuthAngle', 'float'), + 'Sol_Zenith_Angle': ('solarZenithAngle', 'float'), + 'Sol_Azimuth_Angle': ('solarAzimuthAngle', 'float'), + 'Scan_Angle': ('sensorViewAngle', 'float'), + 'Surface_type': ('surfaceQualifier', 'integer'), + 'Dominant_Sfc_Type': ('surfaceQualifier', 'integer'), 'MODIS_deep_blue_flag': ('modis_deep_blue_flag', 'integer'), - 'Reference_Pressure': ('air_pressure', 'float'), - 'Solar_Zenith_Angle': ('solar_zenith_angle', 'float'), + 'Reference_Pressure': ('pressure', 'float'), + 'Solar_Zenith_Angle': ('solarZenithAngle', 'float'), 'Row_Anomaly_Index': ('row_anomaly_index', 'float'), 'TopLevelPressure': ('top_level_pressure', 'float'), 'BottomLevelPressure': ('bottom_level_pressure', 'float'), @@ -121,29 +143,29 @@ 'YoverR': ('radar_tilt', 'float'), 'ZoverR': ('radar_dir3', 'float'), 'Vterminal': ('vterminal', 'float'), - 'SWCM_spec_type': ('satwind_spectral_type', 'float'), - 'SAZA_sat_zen_angle': ('sensor_zenith_angle', 'float'), - 'SCCF_chan_wavelen': ('channel_wavelength', 'float'), - 'QI_with_FC': ('satwind_quality_ind_with_fc', 'float'), - 'QI_without_FC': ('satwind_quality_ind_no_fc', 'float'), - 'Data_Vertical_Velocity': ('data_vertical_velocity', 'float'), - 'LaunchTime': ('LaunchTime', 'float'), + 'SWCM_spec_type': ('windComputationMethod', 'integer'), + 'SAZA_sat_zen_angle': ('sensorZenithAngle', 'float'), + 'SCCF_chan_wavelen': ('sensorCentralFrequency', 'double'), + 'QI_with_FC': ('percentConfidenceWithForecast', 'float'), + 'QI_without_FC': ('percentConfidenceWithoutForecast', 'float'), + 'Data_Vertical_Velocity': ('windUpward', 'float'), + 'LaunchTime': ('releaseTime', 'float'), } checkuv = { - "eastward_wind": "u", - "northward_wind": "v", + "windEastward": "u", + "windNorthward": "v", } conv_varnames = { - "tv": ["virtual_temperature"], - "tsen": ["air_temperature"], - "uv": ["eastward_wind", "northward_wind"], - "ps": ["surface_pressure"], - "q": ["specific_humidity"], - "bend": ["bending_angle"], - "refract": ["refractivity"], - "sst": ["sea_surface_temperature"], + "tv": ["virtualTemperature"], + "tsen": ["airTemperature"], + "uv": ["windEastward", "windNorthward"], + "ps": ["stationPressure"], + "q": ["specificHumidity"], + "bend": ["bendingAngle"], + "refract": ["atmosphericRefractivity"], + "sst": ["seaSurfaceTemperature"], } conv_gsivarnames = { @@ -159,7 +181,6 @@ gsi_add_vars_allsky = { 'Observation_Type': 'ObsType', - 'Observation_Subtype': 'ObsSubType', 'Prep_Use_Flag': 'PreUseFlag', 'Analysis_Use_Flag': 'GsiUseFlag', 'Nonlinear_QC_Rel_Wgt': 'GsiQCWeight', @@ -189,7 +210,6 @@ gsi_add_vars = { 'ObsBias': 'GsiObsBias', 'Observation_Type': 'ObsType', - 'Observation_Subtype': 'ObsSubType', 'Prep_Use_Flag': 'PreUseFlag', 'Analysis_Use_Flag': 'GsiUseFlag', 'Nonlinear_QC_Rel_Wgt': 'GsiQCWeight', @@ -222,7 +242,6 @@ gsi_add_vars_uv = { 'Observation_Type': 'ObsType', - 'Observation_Subtype': 'ObsSubType', 'Prep_Use_Flag': 'PreUseFlag', 'Analysis_Use_Flag': 'GsiUseFlag', 'Nonlinear_QC_Rel_Wgt': 'GsiQCWeight', @@ -254,7 +273,9 @@ 'PreUseFlag', 'GsiUseFlag', 'ObsType', - 'ObsSubType', + 'Observation_Subtype', + 'observationTypeNum', + 'observationSubTypeNum', 'Analysis_Use_Flag', ] @@ -298,14 +319,14 @@ ] chan_metadata_dict = { - 'sensor_chan': 'sensor_channel', - 'use_flag': 'gsi_use_flag', - 'frequency': 'sensor_band_central_radiation_frequency', - 'polarization': 'polarization', - 'wavenumber': 'sensor_band_central_radiation_wavenumber', - 'error_variance': 'ObsError', - 'mean_lapse_rate': 'mean_lapse_rate', + 'sensor_chan': 'sensorChannelNumber', + 'frequency': 'sensorCentralFrequency', + 'polarization': 'sensorPolarizationDirection', + 'wavenumber': 'sensorCentralWavenumber', } +# 'use_flag': 'gsi_use_flag', +# 'error_variance': 'ObsError', +# 'mean_lapse_rate': 'lapseRate', chan_metadata_int = [ 'sensor_channel', @@ -423,35 +444,54 @@ 'ompslpnc', 'mls55', ] + # units # 'IODA/UFO_variable_name': 'Unit' units_values = { - 'virtual_temperature': 'K', + 'latitude': 'degree_north', + 'longitude': 'degree_east', + 'stationElevation': 'm', + 'height': 'm', + 'pressure': 'Pa', + 'airTemperature': 'K', + 'airTemperatureAt2M': 'K', + 'virtualTemperature': 'K', + 'virtualTemperatureAt2M': 'K', + 'specificHumidity': 'kg kg-1', + 'specificHumidityAt2M': 'kg kg-1', + 'waterVaporMixingRatio': 'kg kg-1', + 'relativeHumidity': '1', + 'windNorthward': 'm s-1', + 'windEastward': 'm s-1', + 'windSpeed': 'm s-1', + 'windDirection': 'degree', + 'windSpeedAt10M': 'm s-1', + 'windDirectionAt10M': 'degree', + 'geopotentialHeight': 'm', + 'height': 'm', + 'heightOfSurface': 'm', + 'stationPressure': 'Pa', + 'seaSurfaceTemperature': 'K', 'atmosphere_ln_pressure_coordinate': '1', - 'specific_humidity': '1', - 'northward_wind': 'm s-1', - 'eastward_wind': 'm s-1', - 'geopotential_height': 'm', + 'air_pressure_levels': 'Pa', 'geopotential_height_levels': 'm', - 'height_above_mean_sea_level': 'm', - 'surface_pressure': 'Pa', - 'sea_surface_temperature': 'K', 'surface_temperature': 'K', 'surface_roughness_length': 'm', 'surface_geometric_height': 'm', - 'surface_altitude': 'm', - 'geoid_height_above_reference_ellipsoid': 'Meters', - 'earth_radius_of_curvature': 'Meters', - 'impact_height': 'Meters', - 'impact_parameter': 'Meters', - 'land_area_fraction': '1', - 'air_temperature': 'K', - 'air_pressure': 'Pa', - 'air_pressure_levels': 'Pa', - 'humidity_mixing_ratio': '1', - 'mole_fraction_of_carbon_dioxide_in_air': '1', - 'mole_fraction_of_ozone_in_air': '1', - 'integrated_layer_ozone_in_air': 'DU', + 'surface_geopotential_height': 'm', + 'geoidUndulation': 'm', + 'earthRadiusCurvature': 'm', + 'impactHeightRO': 'm', + 'impactParameterRO': 'm', + 'bendingAngle': 'radians', + 'landAreaFraction': '1', + 'waterAreaFraction': '1', + 'seaIceFraction': '1', + 'surface_snow_area_fraction': '1', + 'vegetation_area_fraction': '1', + 'ozoneLayer': 'DU', + 'ozoneTotal': 'DU', + 'carbondioxideLayer': '1', 'atmosphere_mass_content_of_cloud_liquid_water': 'kg m-2', 'effective_radius_of_cloud_liquid_water_particle': 'm', 'atmosphere_mass_content_of_cloud_ice': 'kg m-2', @@ -460,26 +500,17 @@ 'effective_radius_of_rain_particle': '1e-6 m', 'mass_content_of_snow_in_atmosphere_layer': 'kg m-2', 'effective_radius_of_snow_particle': '1e-6 m', - 'water_area_fraction': '1', - 'land_area_fraction': '1', - 'ice_area_fraction': '1', - 'surface_snow_area_fraction': '1', - 'vegetation_area_fraction': '1', 'surface_temperature_where_sea': 'K', 'surface_temperature_where_land': 'K', 'surface_temperature_where_ice': 'K', 'surface_temperature_where_snow': 'K', - 'surface_wind_speed': 'm s-1', - 'wind_speed': 'm s-1', - 'surface_wind_from_direction': 'degree', 'leaf_area_index': '1', 'volume_fraction_of_condensed_water_in_soil': '1', - 'soil_temperature': 'K', + 'soilTemperature': 'K', 'land_type_index_NPOESS': '1', 'vegetation_type_index': '1', 'soil_type': '1', 'surface_snow_thickness': 'm', - 'humidity_mixing_ratio': '1', 'wind_reduction_factor_at_10m': '1', 'sulf': '1', 'bc1': '1', @@ -495,33 +526,32 @@ 'seas2': '1', 'seas3': '1', 'seas4': '1', - 'latitude': 'degrees_north', - 'longitude': 'degrees_east', - 'station_elevation': 'm', - 'height': 'm', - 'height_above_mean_sea_level': 'm', - 'scan_position': '1', - 'sensor_zenith_angle': 'degree', - 'sensor_azimuth_angle': 'degree', - 'solar_zenith_angle': 'degree', - 'solar_azimuth_angle': 'degree', + 'sensorViewAngle': 'degree', + 'sensorZenithAngle': 'degree', + 'sensorAzimuthAngle': 'degree', + 'solarZenithAngle': 'degree', + 'solarAzimuthAngle': 'degree', 'modis_deep_blue_flag': '1', 'row_anomaly_index': '1', 'total_ozone_error_flag': '1', 'profile_ozone_error_flag': '1', 'top_level_pressure': 'Pa', 'bottom_level_pressure': 'Pa', - 'tropopause_pressure': 'Pa', + 'tropopausePressure': 'Pa', 'brightness_temperature_jacobian_surface_temperature': '1', 'brightness_temperature_jacobian_surface_emissivity': 'K', 'brightness_temperature_jacobian_air_temperature': '1', - 'brightness_temperature_jacobian_humidity_mixing_ratio': 'K/g/Kg ', + 'brightness_temperature_jacobian_humidity_mixing_ratio': 'K/g/kg ', 'optical_thickness_of_atmosphere_layer': '1', - 'clw_retrieved_from_observation': 'kg/m/m', - 'clw_retrieved_from_background': 'kg/m/m', - 'scat_retrieved_from_observation': '1', - 'LaunchTime': 'hours', - 'bending_angle': 'radians', + 'cloudWaterRetrievedFromObservation': 'kg m-2', + 'cloudWaterRetrievedFromSimulatedObservation': 'kg m-2', + 'scatteringIndexRetrievedFromObservation': '1', + 'releaseTime': 'seconds since 1970-01-01T00:00:00Z', + 'sensorCentralWavenumber': 'm-1', + 'sensorCentralFrequency': 'Hz', + 'brightnessTemperature': 'K', + 'percentConfidenceWithForecast': 'percent', + 'percentConfidenceWithoutForecast': 'percent', } # @TestReference @@ -535,10 +565,10 @@ test_fields = {} test_fields_allsky = { - 'clwp_amsua': ('clw_retrieved_from_observation', 'float'), - 'clw_guess_retrieval': ('clw_retrieved_from_background', 'float'), + 'clwp_amsua': ('cloudWaterRetrievedFromObservation', 'float'), + 'clw_guess_retrieval': ('cloudWaterRetrievedFromSimulatedObservation', 'float'), 'clw_symmetric_amount': ('clw_symmetric_amount', 'float'), - 'scat_amsua': ('scat_retrieved_from_observation', 'float'), + 'scat_amsua': ('scatteringIndexRetrievedFromObservation', 'float'), } test_fields_with_channels_allsky = { 'Hydrometeor_Affected_Channels': ('Hydrometeor_Affected_Channels', 'float'), @@ -691,7 +721,6 @@ def toGeovals(self, OutDir, clobber=True): # for isort in range(len(record_number_new)): # print('isort, idx, record_number, record_number_new',isort,\ # record_number[isort], record_number_new[isort] ) - # set up output file ncout = nc.Dataset(outname, 'w', format='NETCDF4') ncout.setncattr( @@ -699,11 +728,11 @@ def toGeovals(self, OutDir, clobber=True): self.validtime.strftime("%Y%m%d%H"))) # get nlocs nlocs = np.sum(idx) - ncout.createDimension("nlocs", nlocs) + ncout.createDimension("Location", nlocs) # other dims if (v != "sst"): ncout.createDimension( - "nlevs", self.df.dimensions["atmosphere_pressure_coordinate_arr_dim"].size) + "Layer", self.df.dimensions["atmosphere_pressure_coordinate_arr_dim"].size) ncout.createDimension( "ninterfaces", self.df.dimensions["atmosphere_pressure_coordinate_interface_arr_dim"].size) dimname = "Station_ID_maxstrlen" @@ -719,7 +748,7 @@ def toGeovals(self, OutDir, clobber=True): vdata = np.frombuffer(vdata, dtype=var.dtype) vdata = np.reshape(vdata, dims) if vname in geovals_metadata_dict.keys(): - dims = ("nlocs",) + var.dimensions[1:] + dims = ("Location",) + var.dimensions[1:] var_out = ncout.createVariable(geovals_metadata_dict[vname], vdata.dtype, dims) if v == 'bend': var_out[...] = vdata[idx_sorted, ...] @@ -727,19 +756,18 @@ def toGeovals(self, OutDir, clobber=True): var_out[...] = vdata[idx, ...] if vname in geovals_vars.keys(): if (len(var.dimensions) == 1): - dims = ("nlocs",) + dims = ("Location",) else: if (vname == "atmosphere_pressure_coordinate_interface") or ( vname == "geopotential_height_levels"): - dims = ("nlocs", "ninterfaces") + dims = ("Location", "ninterfaces") else: - dims = ("nlocs", "nlevs") + dims = ("Location", "Layer") var_out = ncout.createVariable(geovals_vars[vname], vdata.dtype, dims) if v == 'bend': var_out[...] = vdata[idx_sorted, ...] else: var_out[...] = vdata[idx, ...] - ncout.close() def toIODAobs(self, OutDir, clobber=True, platforms=None): @@ -800,16 +828,16 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): varDict[value]['valKey'] = value, iconv.OvalName() varDict[value]['errKey'] = value, iconv.OerrName() varDict[value]['qcKey'] = value, iconv.OqcName() - VarDims[value] = ['nlocs'] + VarDims[value] = ['Location'] varAttrs[varDict[value]['valKey']]['units'] = units_values[value] varAttrs[varDict[value]['errKey']]['units'] = units_values[value] - varAttrs[varDict[value]['qcKey']]['units'] = 'unitless' varAttrs[varDict[value]['valKey']]['coordinates'] = 'longitude latitude' varAttrs[varDict[value]['errKey']]['coordinates'] = 'longitude latitude' varAttrs[varDict[value]['qcKey']]['coordinates'] = 'longitude latitude' varAttrs[varDict[value]['valKey']]['_FillValue'] = self.FLOAT_FILL varAttrs[varDict[value]['errKey']]['_FillValue'] = self.FLOAT_FILL varAttrs[varDict[value]['qcKey']]['_FillValue'] = self.INT_FILL + if v == 'bend': # sort record_number record_number = self.var('record_number')[idx] @@ -897,6 +925,8 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): else: tmp[tmp > 4e8] = self.FLOAT_FILL outdata[gvname] = tmp + if gvname[1] != 'PreUseFlag' and gvname[1] != 'ObsType' and gvname[1] != 'GsiUseFlag' and gvname[1] != 'GsiQCWeight': + varAttrs[gvname]['units'] = units_values[gvname[0]] # create a GSI effective QC variable gsiqcname = outvars[o], 'GsiEffectiveQC' errname = outvars[o], 'GsiFinalObsError' @@ -904,7 +934,6 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): gsiqc[outdata[errname] == 1e8] = 1 gsiqc[outdata[(outvars[o], "GsiUseFlag")] < 0] = 1 outdata[gsiqcname] = gsiqc.astype(np.int32) - varAttrs[gsiqcname]['units'] = 'unitless' varAttrs[gsiqcname]['_FillValue'] = self.INT_FILL # store values in output data dictionary outdata[varDict[outvars[o]]['valKey']] = obsdata @@ -913,6 +942,7 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): for lvar in LocVars: loc_mdata_name = all_LocKeyList[lvar][0] + dtype = all_LocKeyList[lvar][1] if lvar == 'Station_ID': tmp = self.var(lvar)[idx] StationIDs = [bytes((b''.join(tmp[a])).decode('iso-8859-1').encode('utf8')) for a in range(len(tmp))] @@ -922,7 +952,7 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): obstimes = [self.validtime + dt.timedelta(hours=float(tmp[a])) for a in range(len(tmp))] obstimes = [a.strftime("%Y-%m-%dT%H:%M:%SZ") for a in obstimes] outdata[(loc_mdata_name, 'MetaData')] = np.array(obstimes, dtype=object) - varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'UTC Time in YYYY-MM-DDTHH:MM:SSZ format' + # varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'seconds since 1970-01-01T00:00:00Z' # special logic for unit conversions depending on GSI version elif lvar == 'Pressure': tmpps = self.var(lvar)[idx] @@ -958,9 +988,15 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): outdata[(loc_mdata_name, 'MetaData')] = self.var(lvar)[idx] varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'm' else: - outdata[(loc_mdata_name, 'MetaData')] = self.var(lvar)[idx] + if dtype == 'integer': + outdata[(loc_mdata_name, 'MetaData')] = self.var(lvar)[idx].astype(np.int32) + elif dtype == 'long': + outdata[(loc_mdata_name, 'MetaData')] = self.var(lvar)[idx].astype(np.int64) + else: + outdata[(loc_mdata_name, 'MetaData')] = self.var(lvar)[idx] if loc_mdata_name in units_values.keys(): varAttrs[(loc_mdata_name, 'MetaData')]['units'] = units_values[loc_mdata_name] + # put the TestReference fields in the structure for writing out for tvar in TestVars: if tvar in test_fields_: @@ -970,12 +1006,12 @@ def toIODAobs(self, OutDir, clobber=True, platforms=None): outdata[(test_mdata_name, 'TestReference')] = tmp # writer metadata - DimDict['nlocs'] = len(StationIDs) + DimDict['Location'] = len(StationIDs) writer = iconv.IodaWriter(outname, LocKeyList, DimDict) writer.BuildIoda(outdata, VarDims, varAttrs, globalAttrs) - print("ProcessedL %d Conventional obs processed to: %s" % (len(obsdata), outname)) + print("Processed %d Conventional obs processed to: %s" % (len(obsdata), outname)) def grabobsidx(obsdata, platform, var): @@ -1083,28 +1119,30 @@ def toGeovals(self, OutDir, clobber=True): ncout.setncattr("date_time", np.int32(self.validtime.strftime("%Y%m%d%H"))) ncout.setncattr("satellite", self.satellite) ncout.setncattr("sensor", self.sensor) + # get nlocs - nlocs = self.nobs / self.nchans - ncout.createDimension("nlocs", nlocs) + nlocs = int(self.nobs / self.nchans) + ncout.createDimension("Location", nlocs) + # other dims - ncout.createDimension("nlevs", self.df.dimensions["air_temperature_arr_dim"].size) - ncout.createDimension("nlevsp1", self.df.dimensions["air_pressure_levels_arr_dim"].size) + ncout.createDimension("Layer", self.df.dimensions["air_temperature_arr_dim"].size) + ncout.createDimension("Level", self.df.dimensions["air_pressure_levels_arr_dim"].size) for var in self.df.variables.values(): vname = var.name if vname in geovals_metadata_dict.keys(): - dims = ("nlocs",) + dims = ("Location",) var_out = ncout.createVariable(geovals_metadata_dict[vname], var.dtype, dims) vdata = var[:] vdata = vdata[::self.nchans] var_out[:] = vdata elif vname in geovals_vars.keys(): if (len(var.dimensions) == 1): - dims = ("nlocs",) + dims = ("Location",) elif "_levels" in vname: - dims = ("nlocs", "nlevsp1") + dims = ("Location", "Level") else: - dims = ("nlocs", "nlevs") + dims = ("Location", "Level") var_out = ncout.createVariable(geovals_vars[vname], var.dtype, dims) vdata = var[...] vdata = vdata[::self.nchans, ...] @@ -1138,16 +1176,18 @@ def toObsdiag(self, OutDir, clobber=True): # set up output file ncout = nc.Dataset(outname, 'w', format='NETCDF4') ncout.setncattr("date_time", np.int32(self.validtime.strftime("%Y%m%d%H"))) - ncout.setncattr("satellite", self.satellite) + ncout.setncattr("platform", self.satellite) ncout.setncattr("sensor", self.sensor) + # get nlocs - nlocs = self.nobs / self.nchans - ncout.createDimension("nlocs", nlocs) + nlocs = int(self.nobs / self.nchans) + ncout.createDimension("Location", nlocs) + # other dims - nlevs = self.df.dimensions["air_pressure_arr_dim"].size - nlevsp1 = self.df.dimensions["air_pressure_levels_arr_dim"].size + Layer = self.df.dimensions["air_pressure_arr_dim"].size + Level = self.df.dimensions["air_pressure_levels_arr_dim"].size - ncout.createDimension("nlevs", self.df.dimensions["air_pressure_arr_dim"].size) + ncout.createDimension("Layer", self.df.dimensions["air_pressure_arr_dim"].size) # get channel info and list chan_number = self.darr('sensor_chan') @@ -1161,7 +1201,7 @@ def toObsdiag(self, OutDir, clobber=True): for var in self.df.variables.values(): vname = var.name if vname in obsdiag_metadata_dict.keys(): - dims = ("nlocs",) + dims = ("Location",) var_out = ncout.createVariable(obsdiag_metadata_dict[vname], var.dtype, dims) vdata = var[:] vdata = vdata[::self.nchans] @@ -1169,7 +1209,7 @@ def toObsdiag(self, OutDir, clobber=True): elif vname in obsdiag_vars.keys(): # print("toObsdiag: var.shape = ", var.shape) if (len(var.dimensions) == 1): - dims = ("nlocs",) + dims = ("Location",) for c in range(len(chanlist)): var_name = obsdiag_vars[vname]+"_"+"{:d}".format(chanlist[c]) idx = chan_indx == c+1 @@ -1181,9 +1221,9 @@ def toObsdiag(self, OutDir, clobber=True): vdata = vdata[idx] var_out[:] = vdata elif "_levels" in vname: - dims = ("nlocs", "nlevsp1") + dims = ("Location", "Level") else: - dims = ("nlocs", "nlevs") + dims = ("Location", "Level") for c in range(len(chanlist)): var_name = obsdiag_vars[vname]+"_"+"{:d}".format(chanlist[c]) idx = chan_indx == c+1 @@ -1251,17 +1291,17 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): chanlist = chan_number - value = "brightness_temperature" + value = "brightnessTemperature" varDict[value]['valKey'] = value, iconv.OvalName() varDict[value]['errKey'] = value, iconv.OerrName() varDict[value]['qcKey'] = value, iconv.OqcName() - VarDims[value] = ['nlocs', 'nchans'] + VarDims[value] = ['Location', 'Channel'] varAttrs[varDict[value]['valKey']]['units'] = 'K' varAttrs[varDict[value]['errKey']]['units'] = 'K' - varAttrs[varDict[value]['qcKey']]['units'] = 'unitless' - varAttrs[varDict[value]['valKey']]['coordinates'] = 'longitude latitude' - varAttrs[varDict[value]['errKey']]['coordinates'] = 'longitude latitude' - varAttrs[varDict[value]['qcKey']]['coordinates'] = 'longitude latitude' +# varAttrs[varDict[value]['qcKey']]['units'] = 'unitless' +# varAttrs[varDict[value]['valKey']]['coordinates'] = 'longitude latitude' +# varAttrs[varDict[value]['errKey']]['coordinates'] = 'longitude latitude' +# varAttrs[varDict[value]['qcKey']]['coordinates'] = 'longitude latitude' varAttrs[varDict[value]['valKey']]['_FillValue'] = self.FLOAT_FILL varAttrs[varDict[value]['errKey']]['_FillValue'] = self.FLOAT_FILL varAttrs[varDict[value]['qcKey']]['_FillValue'] = self.INT_FILL @@ -1285,12 +1325,13 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): for vbc in valuebc: varDict[vbc]['bctKey'] = vbc, iconv.ObiastermName() varDict[vbc]['bcpKey'] = vbc, iconv.ObiaspredName() - VarDims[(vbc, 'MetaData')] = ['nlocs'] + VarDims[(vbc, 'MetaData')] = ['Location'] ibc += 1 obsdata = self.var('Observation') try: obserr = self.var('Input_Observation_Error') except IndexError: + # obserr = 1./self.var('Inverse_Observation_Error') obserr = np.repeat(self.var('error_variance'), nlocs, axis=0) obsqc = self.var('QC_Flag').astype(np.int32) if (ObsBias): @@ -1340,7 +1381,7 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): obstimes = [self.validtime + dt.timedelta(hours=float(tmp[a])) for a in range(len(tmp))] obstimes = [a.strftime("%Y-%m-%dT%H:%M:%SZ") for a in obstimes] outdata[(loc_mdata_name, 'MetaData')] = np.array(obstimes, dtype=object) - varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'UTC Time in YYYY-MM-DDTHH:MM:SSZ format' + # varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'seconds since 1970-01-01T00:00:00Z' elif self.sensor == "gmi" and lvar in gmi_chan_dep_loc_vars: # Channels 1-9 tmp = self.var(lvar)[::nchans] @@ -1354,6 +1395,11 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): outdata[(loc_mdata_name+'1', 'MetaData')] = tmp if loc_mdata_name in units_values.keys(): varAttrs[(loc_mdata_name+'1', 'MetaData')]['units'] = units_values[loc_mdata_name] + # tmp = self.var(lvar)[::nchans] + # tmp[tmp > 4e8] = self.FLOAT_FILL + # outdata[(loc_mdata_name, 'MetaData')] = tmp + # if loc_mdata_name in units_values.keys(): + # varAttrs[(loc_mdata_name, 'MetaData')]['units'] = units_values[loc_mdata_name] else: tmp = self.var(lvar)[::nchans] tmp[tmp > 4e8] = self.FLOAT_FILL @@ -1368,7 +1414,7 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): tmp = self.var(tvar)[:] tmp[tmp > 4e8] = self.FLOAT_FILL outdata[test_mdata_name] = np.reshape(tmp, (nlocs, nchans)) - VarDims[test_mdata_name] = ['nlocs', 'nchans'] + VarDims[test_mdata_name] = ['Location', 'Channel'] if test_fields_with_channels_[tvar][0] in units_values.keys(): varAttrs[test_mdata_name]['units'] = units_values[test_fields_with_channels_[tvar][0]] @@ -1377,9 +1423,10 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): tmp = self.var(tvar)[::nchans] tmp[tmp > 4e8] = self.FLOAT_FILL outdata[test_mdata_name] = tmp - VarDims[test_mdata_name] = ['nlocs'] + VarDims[test_mdata_name] = ['Location'] if test_fields_[tvar][0] in units_values.keys(): - varAttrs[test_mdata_name]['units'] = units_values[test_fields_[tvar][0]] + if tvar != 'scat_amsua': + varAttrs[test_mdata_name]['units'] = units_values[test_fields_[tvar][0]] gsi_add_radvars = gsi_add_vars if (QCVars): @@ -1415,12 +1462,12 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): tmp = tmp.astype(np.int32) else: tmp[tmp > 4e8] = self.FLOAT_FILL - gvname = "brightness_temperature", iodavar + gvname = "brightnessTemperature", iodavar outdata[gvname] = np.reshape(tmp, (nlocs, nchans)) - VarDims[gvname] = ['nlocs', 'nchans'] + VarDims[gvname] = ['Location', 'Channel'] # brightness temperature variables - value = 'brightness_temperature' + value = 'brightnessTemperature' obsdata[obsdata > 9e5] = self.FLOAT_FILL obsqc[obsdata > 9e5] = self.INT_FILL @@ -1428,14 +1475,25 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): outdata[varDict[value]['valKey']] = np.reshape(obsdata, (nlocs, nchans)) outdata[varDict[value]['errKey']] = np.reshape(obserr, (nlocs, nchans)) outdata[varDict[value]['qcKey']] = np.reshape(obsqc.astype(np.int32), (nlocs, nchans)) - # create a GSI effective QC variable + + # create a GSI effective QC variable (group) gsiqcname = value, 'GsiEffectiveQC' errname = value, 'GsiFinalObsError' gsiqc = np.zeros_like(outdata[varDict[value]['valKey']]) gsiqc[outdata[errname] > 1e8] = 1 gsiqc[np.reshape(self.var('QC_Flag'), (nlocs, nchans)) < 0] = 1 outdata[gsiqcname] = gsiqc.astype(np.int32) - varAttrs[gsiqcname]['units'] = 'unitless' + + # create a GSI related variable (group) + varAttrs[errname]['units'] = 'K' + grpname = value, 'GsiBc' + varAttrs[grpname]['units'] = 'K' + grpname = value, 'GsiHofXBc' + varAttrs[grpname]['units'] = 'K' + grpname = value, 'GsiHofX' + varAttrs[grpname]['units'] = 'K' + grpname = value, 'GsiHofXClr' + varAttrs[grpname]['units'] = 'K' if (ObsBias): valuebc = [ @@ -1473,20 +1531,26 @@ def toIODAobs(self, OutDir, ObsBias, QCVars, TestRefs, clobber=True): outdata[(value2, 'MetaData')] = self.var(key).astype(np.int32) else: outdata[(value2, 'MetaData')] = self.var(key).astype(np.float32) - VarDims[(value2, 'MetaData')] = ['nchans'] + # Frequency units is GHz in CRTM/GSI + if value2 == 'sensorCentralFrequency': + outdata[(value2, 'MetaData')] = outdata[(value2, 'MetaData')]*1.e9 + # Wavenumber unit is cm-1 in CRTM/GSI + if value2 == 'sensorCentralWavenumber': + outdata[(value2, 'MetaData')] = outdata[(value2, 'MetaData')]*1.e2 + VarDims[(value2, 'MetaData')] = ['Channel'] if value2 in units_values.keys(): varAttrs[(value2, 'MetaData')]['units'] = units_values[value2] except IndexError: pass # global attributes - globalAttrs["satellite"] = self.satellite - globalAttrs["sensor"] = self.sensor + globalAttrs["platform"] = np.array([wmo_satid[self.satellite]], dtype=np.int32) + globalAttrs["sensor"] = np.array([wmo_instid[self.sensor]], dtype=np.int32) # set dimension lengths in the writer since we are bypassing # ExtractObsData - DimDict['nlocs'] = nlocs - DimDict['nchans'] = chanlist + DimDict['Location'] = nlocs + DimDict['Channel'] = chanlist writer = iconv.IodaWriter(outname, LocKeyList, DimDict) writer.BuildIoda(outdata, VarDims, varAttrs, globalAttrs) @@ -1559,13 +1623,15 @@ def toGeovals(self, OutDir, clobber=True): ncout.setncattr("date_time", np.int32(self.validtime.strftime("%Y%m%d%H"))) ncout.setncattr("satellite", self.satellite) ncout.setncattr("sensor", self.sensor) + # get nlocs nlocs = self.nobs - ncout.createDimension("nlocs", nlocs) + ncout.createDimension("Location", nlocs) + # other dims - ncout.createDimension("nlevs", self.df.dimensions["mole_fraction_of_ozone_in_air_arr_dim"].size) + ncout.createDimension("Layer", self.df.dimensions["mole_fraction_of_ozone_in_air_arr_dim"].size) if (self.sensor in oz_lay_sensors): - ncout.createDimension("nlevsp1", self.df.dimensions["air_pressure_levels_arr_dim"].size) + ncout.createDimension("Level", self.df.dimensions["air_pressure_levels_arr_dim"].size) for var in self.df.variables.values(): vname = var.name if vname in geovals_metadata_dict.keys(): @@ -1575,11 +1641,11 @@ def toGeovals(self, OutDir, clobber=True): var_out[:] = vdata elif vname in geovals_vars.keys(): if (len(var.dimensions) == 1): - dims = ("nlocs",) + dims = ("Location",) elif "_levels" in vname: - dims = ("nlocs", "nlevsp1") + dims = ("Location", "Level") else: - dims = ("nlocs", "nlevs") + dims = ("Location", "Layer") var_out = ncout.createVariable(geovals_vars[vname], var.dtype, dims) vdata = var[...] var_out[...] = vdata @@ -1612,20 +1678,19 @@ def toIODAobs(self, OutDir, clobber=True): nlocs = self.nobs vname = "integrated_layer_ozone_in_air" - if (self.sensor in oz_lev_sensors): + if (self.sensor in oz_lay_sensors): vname = "mole_fraction_of_ozone_in_air" varDict[vname]['valKey'] = vname, iconv.OvalName() varDict[vname]['errKey'] = vname, iconv.OerrName() varDict[vname]['qcKey'] = vname, iconv.OqcName() - VarDims[vname] = ['nlocs'] + VarDims[vname] = ['Location'] if (self.sensor in oz_lev_sensors): varAttrs[varDict[vname]['valKey']]['units'] = 'mol mol-1' varAttrs[varDict[vname]['errKey']]['units'] = 'mol mol-1' else: varAttrs[varDict[vname]['valKey']]['units'] = 'DU' varAttrs[varDict[vname]['errKey']]['units'] = 'DU' - - varAttrs[varDict[vname]['qcKey']]['units'] = 'unitless' + # varAttrs[varDict[vname]['qcKey']]['units'] = 'unitless' varAttrs[varDict[vname]['valKey']]['_FillValue'] = self.FLOAT_FILL varAttrs[varDict[vname]['errKey']]['_FillValue'] = self.FLOAT_FILL varAttrs[varDict[vname]['qcKey']]['_FillValue'] = self.INT_FILL @@ -1634,26 +1699,27 @@ def toIODAobs(self, OutDir, clobber=True): try: tmp = self.var('Input_Observation_Error') except IndexError: - tmp = 1./self.var('Inverse_Observation_Error') + # tmp = 1./self.var('Inverse_Observation_Error') + tmp = np.repeat(self.var('error_variance'), nlocs, axis=0) tmp[tmp < self.EPSILON] = 0 obserr = tmp obserr[np.isinf(obserr)] = self.FLOAT_FILL obsqc = self.var('Analysis_Use_Flag').astype(np.int32) for lvar in LocVars: loc_mdata_name = all_LocKeyList[lvar][0] - if lvar == 'Time': + if lvar == 'Time' or lvar == 'time': tmp = self.var(lvar) obstimes = [self.validtime+dt.timedelta(hours=float(tmp[a])) for a in range(len(tmp))] obstimes = [a.strftime("%Y-%m-%dT%H:%M:%SZ") for a in obstimes] outdata[(loc_mdata_name, 'MetaData')] = np.array(obstimes, dtype=object) - varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'UTC Time in YYYY-MM-DDTHH:MM:SSZ format' + # varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'seconds since 1970-01-01T00:00:00Z' else: tmp = self.var(lvar) tmp[tmp > 4e8] = self.FLOAT_FILL outdata[(loc_mdata_name, 'MetaData')] = tmp if loc_mdata_name in units_values.keys(): varAttrs[(loc_mdata_name, 'MetaData')]['units'] = units_values[loc_mdata_name] - VarDims[(loc_mdata_name, 'MetaData')] = ['nlocs'] + VarDims[(loc_mdata_name, 'MetaData')] = ['Location'] for gsivar, iodavar in gsi_add_vars.items(): # some special actions need to be taken depending on var name... @@ -1684,12 +1750,12 @@ def toIODAobs(self, OutDir, clobber=True): outdata[varDict[vname]['errKey']] = obserr outdata[varDict[vname]['qcKey']] = obsqc - globalAttrs["satellite"] = self.satellite - globalAttrs["sensor"] = self.sensor + globalAttrs["platform"] = np.array([wmo_satid[self.satellite]], dtype=np.int32) + globalAttrs["sensor"] = np.array([wmo_instid[self.sensor]], dtype=np.int32) # set dimension lengths in the writer since we are bypassing # ExtractObsData - DimDict['nlocs'] = nlocs + DimDict['Location'] = nlocs writer = iconv.IodaWriter(outname, LocKeyList, DimDict) writer.BuildIoda(outdata, VarDims, varAttrs, globalAttrs) @@ -1759,24 +1825,24 @@ def toGeovals(self, OutDir, clobber=True): ncout.setncattr("date_time", np.int32(self.validtime.strftime("%Y%m%d%H"))) # get nlocs nlocs = self.nobs - ncout.createDimension("nlocs", nlocs) + ncout.createDimension("Location", nlocs) # other dims - ncout.createDimension("nlevs", self.df.dimensions["nlevs"].size) + ncout.createDimension("Layer", self.df.dimensions["Layer"].size) # ncout.createDimension("nlevsp1", self.df.dimensions["air_pressure_levels_arr_dim"].size) for var in self.df.variables.values(): vname = var.name if vname in geovals_metadata_dict.keys(): - dims = ("nlocs",) + dims = ("Location",) var_out = ncout.createVariable(geovals_metadata_dict[vname], var.dtype, dims) vdata = var[:] var_out[:] = vdata elif vname in geovals_vars.keys(): if (len(var.dimensions) == 1): - dims = ("nlocs",) + dims = ("Location",) elif "_levels" in vname: - dims = ("nlocs", "nlevsp1") + dims = ("Location", "Level") else: - dims = ("nlocs", "nlevs") + dims = ("Location", "Layer") var_out = ncout.createVariable(geovals_vars[vname], var.dtype, dims) vdata = var[...] var_out[...] = vdata @@ -1823,7 +1889,7 @@ def toIODAobs(self, OutDir, clobber=True): varDict[value]['valKey'] = value, iconv.OvalName() varDict[value]['errKey'] = value, iconv.OerrName() varDict[value]['qcKey'] = value, iconv.OqcName() - VarDims[value] = ['nlocs'] + VarDims[value] = ['Location'] varAttrs[varDict[value]['valKey']]['units'] = myunits varAttrs[varDict[value]['errKey']]['units'] = myunits varAttrs[varDict[value]['qcKey']]['units'] = 'unitless' @@ -1866,7 +1932,7 @@ def toIODAobs(self, OutDir, clobber=True): obstimes = [self.validtime+dt.timedelta(hours=float(tmp[a])) for a in range(len(tmp))] obstimes = [a.strftime("%Y-%m-%dT%H:%M:%SZ") for a in obstimes] outdata[(loc_mdata_name, 'MetaData')] = np.array(obstimes, dtype=object) - varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'UTC Time in YYYY-MM-DDTHH:MM:SSZ format' + # varAttrs[(loc_mdata_name, 'MetaData')]['units'] = 'seconds since 1970-01-01T00:00:00Z' else: tmp = self.var(lvar)[:] tmp[tmp > 4e8] = self.FLOAT_FILL @@ -1878,7 +1944,7 @@ def toIODAobs(self, OutDir, clobber=True): # set dimension lengths in the writer since we are bypassing # ExtractObsData - DimDict['nlocs'] = nlocs + DimDict['Location'] = nlocs writer = iconv.IodaWriter(outname, LocKeyList, DimDict) writer.BuildIoda(outdata, VarDims, varAttrs, globalAttrs) diff --git a/src/hdf5/atms_netcdf_hdf5_2ioda.py b/src/hdf5/atms_netcdf_hdf5_2ioda.py index 149bc99be..f97d3f368 100755 --- a/src/hdf5/atms_netcdf_hdf5_2ioda.py +++ b/src/hdf5/atms_netcdf_hdf5_2ioda.py @@ -5,7 +5,7 @@ """ import argparse -from datetime import datetime, timedelta +from datetime import datetime import glob # from concurrent.futures import ProcessPoolExecutor from pathlib import Path @@ -27,8 +27,18 @@ SNPP_WMO_sat_ID = 224 NOAA20_WMO_sat_ID = 225 NOAA21_WMO_sat_ID = 226 +ATMS_WMO_sensor_ID = 621 + +float_missing_value = iconv.get_default_fill_val(np.float32) +int_missing_value = iconv.get_default_fill_val(np.int32) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() GlobalAttrs = { + "sensor": str(ATMS_WMO_sensor_ID), "platformCommonName": "ATMS", "platformLongDescription": "ATMS Brightness Temperature Data", "sensorCentralFrequency": [23.8, @@ -40,7 +50,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "object") ] @@ -63,8 +73,11 @@ def main(args): # else: # obs_data = file_obs_data + WMO_sat_ID = get_WMO_satellite_ID(input_files[0]) + GlobalAttrs['platform'] = np.int32(WMO_sat_ID) for afile in input_files: file_obs_data = get_data_from_files(afile) + WMO_sat_ID = get_WMO_satellite_ID(afile) if not file_obs_data: print("INFO: non-nominal file skipping") continue @@ -72,51 +85,39 @@ def main(args): concat_obs_dict(obs_data, file_obs_data) else: obs_data = file_obs_data + if WMO_sat_ID != GlobalAttrs['platform']: + print(' ERROR: IODA and subsequent UFO expect individual files to be a single satellite and sensor ') + print(' .... initial file satellite: ', GlobalAttrs['platform']) + print(' ...... final file satellite: ', WMO_sat_ID) + sys.exit() -# V2 nlocs_int32 = np.array(len(obs_data[('latitude', 'MetaData')]), dtype='int32') - nlocs_int32 = np.array(len(obs_data[('latitude', 'MetaData')]), dtype='float32') # this is float32 in old convention - nlocs = nlocs_int32.item() - nchans = len(obs_data[('channelNumber', 'MetaData')]) + nlocs_int = np.array(len(obs_data[('latitude', metaDataName)]), dtype='int64') + nlocs = nlocs_int.item() + nchans = len(obs_data[('sensorChannelNumber', metaDataName)]) # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = dtg.strftime("%Y-%m-%dT%H:%M:%SZ") - date_time_int32 = np.array(int(dtg.strftime("%Y%m%d%H")), dtype='int32') - GlobalAttrs['date_time'] = date_time_int32.item() - GlobalAttrs['converter'] = os.path.basename(__file__) + GlobalAttrs['datetimeRange'] = np.array([obs_data[('dateTime', metaDataName)][0].strftime("%Y-%m-%dT%H:%M:%SZ"), + obs_data[('dateTime', metaDataName)][-1].strftime("%Y-%m-%dT%H:%M:%SZ")], dtype=object) + GlobalAttrs['datetimeReference'] = dtg.strftime("%Y-%m-%dT%H:%M:%SZ") # pass parameters to the IODA writer -# V2 'brightnessTemperature': ['nlocs', 'nchans'] VarDims = { - 'brightness_temperature': ['nlocs', 'nchans'], - 'channelNumber': ['nchans'], + 'brightnessTemperature': ['Location', 'Channel'], + 'sensorChannelNumber': ['Channel'], } DimDict = { - 'nlocs': nlocs, - 'nchans': obs_data[('channelNumber', 'MetaData')], + 'Location': nlocs, + 'Channel': obs_data[('sensorChannelNumber', metaDataName)], } writer = iconv.IodaWriter(output_filename, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) -# V2 VarAttrs[('brightnessTemperature', 'ObsValue')]['units'] = 'K' -# V2 VarAttrs[('brightnessTemperature', 'ObsError')]['units'] = 'K' -# V2 VarAttrs[('brightnessTemperature', 'PreQC')]['units'] = 'unitless' - VarAttrs[('brightness_temperature', 'ObsValue')]['units'] = 'K' - VarAttrs[('brightness_temperature', 'ObsError')]['units'] = 'K' - VarAttrs[('brightness_temperature', 'PreQC')]['units'] = 'unitless' - - missing_value = 9.96921e+36 - int_missing_value = -2147483647 -# V2 VarAttrs[('brightnessTemperature', 'ObsValue')]['_FillValue'] = missing_value -# V2 VarAttrs[('brightnessTemperature', 'ObsError')]['_FillValue'] = missing_value -# V2 VarAttrs[('brightnessTemperature', 'PreQC')]['_FillValue'] = int_missing_value - VarAttrs[('brightness_temperature', 'ObsValue')]['_FillValue'] = missing_value - VarAttrs[('brightness_temperature', 'ObsError')]['_FillValue'] = missing_value - VarAttrs[('brightness_temperature', 'PreQC')]['_FillValue'] = int_missing_value - -# there can be more than one input file -# GlobalAttrs['converter'] = os.path.basename(__file__) + + set_obspace_attributes(VarAttrs) + set_metadata_attributes(VarAttrs) + # final write to IODA file writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) @@ -188,54 +189,44 @@ def get_data(f, g, obs_data): # 'spatial_lbl_len', 'subsat_lat', 'subsat_lon', 'sun_glint_dist', 'sun_glint_lat', 'sun_glint_lon', # 'surf_alt', 'surf_alt_sdev', 'utc_tuple', 'utc_tuple_lbl', 'utc_tuple_lbl_len', 'view_ang', 'warm_nedt', 'xtrack' - WMO_sat_ID = get_WMO_satellite_ID(f.filename) - # example: dimension ( 180, 96 ) == dimension( nscan, nbeam_pos ) try: nscans = np.shape(g['lat'])[0] nbeam_pos = np.shape(g['lat'])[1] - obs_data[('latitude', 'MetaData')] = np.array(g['lat'][:, :].flatten(), dtype='float32') - obs_data[('longitude', 'MetaData')] = np.array(g['lon'][:, :].flatten(), dtype='float32') - obs_data[('channelNumber', 'MetaData')] = np.array(g['channel'][:], dtype='int32') -# V2 obs_data[('fieldOfViewNumber', 'MetaData')] = np.tile(np.arange(nbeam_pos, dtype='int32') + 1, (nscans, 1)).flatten() - obs_data[('scan_position', 'MetaData')] = np.tile(np.arange(nbeam_pos, dtype='float32') + 1, (nscans, 1)).flatten() -# V2 obs_data[('solarZenithAngle', 'MetaData')] = np.array(g['sol_zen'][:, :].flatten(), dtype='float32') - obs_data[('solar_zenith_angle', 'MetaData')] = np.array(g['sol_zen'][:, :].flatten(), dtype='float32') -# V2 obs_data[('solarAzimuthAngle', 'MetaData')] = np.array(g['sol_azi'][:, :].flatten(), dtype='float32') - obs_data[('solar_azimuth_angle', 'MetaData')] = np.array(g['sol_azi'][:, :].flatten(), dtype='float32') -# V2 obs_data[('sensorZenithAngle', 'MetaData')] = np.array(g['sat_zen'][:, :].flatten(), dtype='float32') - obs_data[('sensor_zenith_angle', 'MetaData')] = np.array(g['sat_zen'][:, :].flatten(), dtype='float32') -# V2 obs_data[('sensorAzimuthAngle', 'MetaData')] = np.array(g['sat_azi'][:, :].flatten(), dtype='float32') - obs_data[('sensor_azimuth_angle', 'MetaData')] = np.array(g['sat_azi'][:, :].flatten(), dtype='float32') - obs_data[('sensor_view_angle', 'MetaData')] = np.array(g['view_ang'][:, :].flatten(), dtype='float32') - nlocs = len(obs_data[('latitude', 'MetaData')]) - obs_data[('satelliteId', 'MetaData')] = np.full((nlocs), WMO_sat_ID, dtype='int32') - obs_data[('datetime', 'MetaData')] = np.array(get_string_dtg(g['obs_time_utc'][:, :, :]), dtype=object) + obs_data[('latitude', metaDataName)] = np.array(g['lat'][:, :].flatten(), dtype='float32') + obs_data[('longitude', metaDataName)] = np.array(g['lon'][:, :].flatten(), dtype='float32') + obs_data[('sensorChannelNumber', metaDataName)] = np.array(g['channel'][:], dtype='int32') + obs_data[('fieldOfViewNumber', metaDataName)] = np.tile(np.arange(nbeam_pos, dtype='int32') + 1, (nscans, 1)).flatten() + obs_data[('sensorScanPosition', metaDataName)] = np.tile(np.arange(nbeam_pos, dtype='int32') + 1, (nscans, 1)).flatten() + obs_data[('solarZenithAngle', metaDataName)] = np.array(g['sol_zen'][:, :].flatten(), dtype='float32') + obs_data[('solarAzimuthAngle', metaDataName)] = np.array(g['sol_azi'][:, :].flatten(), dtype='float32') + obs_data[('sensorZenithAngle', metaDataName)] = np.array(g['sat_zen'][:, :].flatten(), dtype='float32') + obs_data[('sensorAzimuthAngle', metaDataName)] = np.array(g['sat_azi'][:, :].flatten(), dtype='float32') + obs_data[('sensorViewAngle', metaDataName)] = np.array(g['view_ang'][:, :].flatten(), dtype='float32') + nlocs = len(obs_data[('latitude', metaDataName)]) + obs_data[('dateTime', metaDataName)] = np.array(get_observation_time(g['obs_time_utc'][:, :, :]), dtype='object') # BaseException is a catch-all mechamism except BaseException: # this section is for the NOAA CLASS files and need to be tested - obs_data[('latitude', 'MetaData')] = np.array(g['All_Data']['ATMS-SDR-GEO_All']['Latitude'][:, :].flatten(), dtype='float32') - obs_data[('longitude', 'MetaData')] = np.array(g['All_Data']['ATMS-SDR-GEO_All']['Longitude'][:, :].flatten(), dtype='float32') + obs_data[('latitude', metaDataName)] = np.array(g['All_Data']['ATMS-SDR-GEO_All']['Latitude'][:, :].flatten(), dtype='float32') + obs_data[('longitude', metaDataName)] = np.array(g['All_Data']['ATMS-SDR-GEO_All']['Longitude'][:, :].flatten(), dtype='float32') # example: dimension ( 180, 96, 22 ) == dimension( nscan, nbeam_pos, nchannel ) try: - nchans = len(obs_data[('channelNumber', 'MetaData')]) - nlocs = len(obs_data[('latitude', 'MetaData')]) -# V2 obs_data[('brightnessTemperature', "ObsValue")] = np.array(np.vstack(g['antenna_temp']), dtype='float32') -# V2 obs_data[('brightnessTemperature', "ObsError")] = np.full((nlocs, nchans), 5.0, dtype='float32') -# V2 obs_data[('brightnessTemperature', "PreQC")] = np.full((nlocs, nchans), 0, dtype='int32') - obs_data[('brightness_temperature', "ObsValue")] = np.array(np.vstack(g['antenna_temp']), dtype='float32') - obs_data[('brightness_temperature', "ObsError")] = np.full((nlocs, nchans), 5.0, dtype='float32') - obs_data[('brightness_temperature', "PreQC")] = np.full((nlocs, nchans), 0, dtype='int32') + nchans = len(obs_data[('sensorChannelNumber', metaDataName)]) + nlocs = len(obs_data[('latitude', metaDataName)]) + obs_data[('brightnessTemperature', obsValName)] = np.array(np.vstack(g['antenna_temp']), dtype='float32') + obs_data[('brightnessTemperature', obsErrName)] = np.full((nlocs, nchans), 5.0, dtype='float32') + obs_data[('brightnessTemperature', qcName)] = np.full((nlocs, nchans), 0, dtype='int32') except BaseException: # this section is for the NOAA CLASS files and need to be tested scaled_data = np.vstack(f['All_Data']['ATMS-SDR_All']['BrightnessTemperature']) scale_fac = f['All_Data']['ATMS-SDR_All']['BrightnessTemperatureFactors'][:].flatten() - obs_data[('brightnessTemperature', "ObsValue")] = np.array((scaled_data * scale_fac[0]) + scale_fac[1], dtype='float32') - obs_data[('brightnessTemperature', "ObsError")] = np.full((nlocs, nchans), 5.0, dtype='float32') - obs_data[('brightnessTemperature', "PreQC")] = np.full((nlocs, nchans), 0, dtype='int32') + obs_data[('brightnessTemperature', obsValName)] = np.array((scaled_data * scale_fac[0]) + scale_fac[1], dtype='float32') + obs_data[('brightnessTemperature', obsErrName)] = np.full((nlocs, nchans), 5.0, dtype='float32') + obs_data[('brightnessTemperature', qcName)] = np.full((nlocs, nchans), 0, dtype='int32') return obs_data @@ -257,45 +248,38 @@ def get_WMO_satellite_ID(filename): return WMO_sat_ID -def get_string_dtg(obs_time_utc): +def get_observation_time(obs_time_utc): year = obs_time_utc[:, :, 0].flatten() month = obs_time_utc[:, :, 1].flatten() day = obs_time_utc[:, :, 2].flatten() hour = obs_time_utc[:, :, 3].flatten() minute = obs_time_utc[:, :, 4].flatten() + second = 0 dtg = [] for i, yyyy in enumerate(year): - cdtg = ("%4i-%.2i-%.2iT%.2i:%.2i:00Z" % (yyyy, month[i], day[i], hour[i], minute[i])) - dtg.append(cdtg) + observation_time = datetime(yyyy, month[i], day[i], hour[i], minute[i], second) + dtg.append(observation_time) return dtg def init_obs_loc(): - # V2 ('brightnessTemperature', "ObsValue"): [], - # V2 ('brightnessTemperature', "ObsError"): [], - # V2 ('brightnessTemperature', "PreQC"): [], - # V2 ('fieldOfViewNumber', 'MetaData'): [], - # V2 ('solarZenithAngle', 'MetaData'): [], - # V2 ('solarAzimuthAngle', 'MetaData'): [], - # V2 ('sensorZenithAngle', 'MetaData'): [], - # V2 ('sensorAzimuthAngle', 'MetaData'): [], obs = { - ('brightness_temperature', "ObsValue"): [], - ('brightness_temperature', "ObsError"): [], - ('brightness_temperature', "PreQC"): [], - ('satelliteId', 'MetaData'): [], - ('channelNumber', 'MetaData'): [], - ('latitude', 'MetaData'): [], - ('longitude', 'MetaData'): [], - ('datetime', 'MetaData'): [], - ('scan_position', 'MetaData'): [], - ('solar_zenith_angle', 'MetaData'): [], - ('solar_azimuth_angle', 'MetaData'): [], - ('sensor_zenith_angle', 'MetaData'): [], - ('sensor_view_angle', 'MetaData'): [], - ('sensor_azimuth_angle', 'MetaData'): [], + ('brightnessTemperature', obsValName): [], + ('brightnessTemperature', obsErrName): [], + ('brightnessTemperature', qcName): [], + ('sensorChannelNumber', metaDataName): [], + ('latitude', metaDataName): [], + ('longitude', metaDataName): [], + ('dateTime', metaDataName): [], + ('sensorScanPosition', metaDataName): [], + ('fieldOfViewNumber', metaDataName): [], + ('solarZenithAngle', metaDataName): [], + ('solarAzimuthAngle', metaDataName): [], + ('sensorZenithAngle', metaDataName): [], + ('sensorAzimuthAngle', metaDataName): [], + ('sensorViewAngle', metaDataName): [], } return obs @@ -312,6 +296,27 @@ def concat_obs_dict(obs_data, append_obs_data): print("WARNING: ", gv_key, " is missing from append_obs_data dictionary") +def set_metadata_attributes(VarAttrs): + VarAttrs[('sensorZenithAngle', metaDataName)]['units'] = 'degree' + VarAttrs[('sensorViewAngle', metaDataName)]['units'] = 'degree' + VarAttrs[('solarZenithAngle', metaDataName)]['units'] = 'degree' + VarAttrs[('sensorAzimuthAngle', metaDataName)]['units'] = 'degree' + VarAttrs[('solarAzimuthAngle', metaDataName)]['units'] = 'degree' + + return VarAttrs + + +def set_obspace_attributes(VarAttrs): + VarAttrs[('brightnessTemperature', obsValName)]['units'] = 'K' + VarAttrs[('brightnessTemperature', obsErrName)]['units'] = 'K' + + VarAttrs[('brightnessTemperature', obsValName)]['_FillValue'] = float_missing_value + VarAttrs[('brightnessTemperature', obsErrName)]['_FillValue'] = float_missing_value + VarAttrs[('brightnessTemperature', qcName)]['_FillValue'] = int_missing_value + + return VarAttrs + + if __name__ == "__main__": parser = argparse.ArgumentParser( diff --git a/src/land/CMakeLists.txt b/src/land/CMakeLists.txt index e0d7c4b67..f85915e17 100644 --- a/src/land/CMakeLists.txt +++ b/src/land/CMakeLists.txt @@ -1,6 +1,4 @@ list(APPEND programs - ims_scf2ioda.py - afwa_snod2ioda.py ghcn_snod2ioda.py smap_ssm2ioda.py smos_ssm2ioda.py diff --git a/src/land/afwa_snod2ioda.py b/src/land/afwa_snod2ioda.py deleted file mode 100644 index b7547ce6a..000000000 --- a/src/land/afwa_snod2ioda.py +++ /dev/null @@ -1,176 +0,0 @@ -#!/usr/bin/env python3 -# -# (C) Copyright 2021 NOAA/NWS/NCEP/EMC -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. -################################################### -import pygrib -import time, os, sys -import argparse -import netCDF4 as nc -import numpy as np -import pyproj -from datetime import datetime, timedelta -from pathlib import Path - -IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" -if not IODA_CONV_PATH.is_dir(): - IODA_CONV_PATH = Path(__file__).parent/'..'/'lib-python' -sys.path.append(str(IODA_CONV_PATH.resolve())) - -import ioda_conv_engines as iconv -from collections import defaultdict, OrderedDict -from orddicts import DefaultOrderedDict - -locationKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") -] - -obsvars = { - 'snow_depth': 'snowDepth', -} - -AttrData = { - 'converter': os.path.basename(__file__), -} - -DimDict = { -} - -VarDims = { - 'snowDepth': ['nlocs'], -} - -os.environ["TZ"] = "UTC" - - -class AFWA(object): - - def __init__(self, filename, mask): - self.filename = filename - self.mask = mask - self.varDict = defaultdict(lambda: defaultdict(dict)) - self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - self.varAttrs = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - self._read() - - def _read(self): - # set up variable names for IODA - for iodavar in ['snowDepth']: - self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() - self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() - self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() - self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'm' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'm' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' - - data = pygrib.open(self.filename) - lat, lon = data[1].latlons() - lons = lon[:].ravel() - lats = lat[:].ravel() - vals = data[1].values[:].ravel() - - # use stereographic projection to calculate lat/lon as read of - # lat/lon is not correct for afwa grib1 file - lat1 = data[1]['latitudeOfFirstGridPointInDegrees'] - lon1 = data[1]['longitudeOfFirstGridPointInDegrees'] - nx = data[1]['Nx'] - ny = data[1]['Ny'] - dx = data[1]['DxInMetres'] - dy = data[1]['DyInMetres'] - # this works for corner assumption to get symmetric data - dxfac = 1.000376522 - dx = dxfac*dx - dy = dxfac*dy - - myparams = data[1].projparams - # reset Lat of True Origin(lat_ts)for Soutern Hemisphere grib file - if myparams['lat_0'] == -90.0: - myparams['lat_ts'] = -60.0 - - pj = pyproj.Proj(myparams) - llcrnrx, llcrnry = pj(lon1, lat1) - x = llcrnrx - dx*np.arange(nx) - y = llcrnry + dy*np.arange(ny) - x, y = np.meshgrid(x, y) - lon, lat = pj(x, y, inverse=True) - lons = lon[:].ravel() - lats = lat[:].ravel() - # setup snowCover and mask_flag - vals = vals.astype('float32') - errs = 0.0*vals.astype('float32') - qflg = 0*vals.astype('int32') - lats = lats.astype('float32') - lons = lons.astype('float32') - times = np.empty_like(vals, dtype=object) - - if self.mask == "maskout": - mask = np.logical_not(vals.mask) - vals = vals[mask] - errs = errs[mask] - qflg = qflg[mask] - lons = lons[mask] - lats = lats[mask] - times = times[mask] - # get global attributes - start_datetime = data[1].analDate - base_datetime = start_datetime.isoformat() + "Z" - data.close() - AttrData['date_time_string'] = base_datetime - - for i in range(len(lons)): - times[i] = base_datetime - - # add metadata variables - self.outdata[('datetime', 'MetaData')] = times - self.outdata[('latitude', 'MetaData')] = lats - self.outdata[('longitude', 'MetaData')] = lons - - # add output variables - for iodavar in ['snowDepth']: - self.outdata[self.varDict[iodavar]['valKey']] = vals - self.outdata[self.varDict[iodavar]['errKey']] = errs - self.outdata[self.varDict[iodavar]['qcKey']] = qflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) - - -def main(): - - parser = argparse.ArgumentParser( - description=('Read AFWA snow depth file(s) and Converter' - ' of native grib format for observations of snow' - ' depth to IODA netCDF format.') - ) - parser.add_argument('-i', '--input', - help="name of afwa snow depth input file(s)", - type=str, required=True) - parser.add_argument('-o', '--output', - help="name of ioda output file", - type=str, required=True) - optional = parser.add_argument_group(title='optional arguments') - optional.add_argument( - '-m', '--mask', - help="maskout missing values: maskout/default, default=none", - type=str, required=True) - - args = parser.parse_args() - - # Read in the AFWA snow depth data - snod = AFWA(args.input, args.mask) - - # setup the IODA writer - writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - - # write everything out - writer.BuildIoda(snod.outdata, VarDims, snod.varAttrs, AttrData) - - -if __name__ == '__main__': - main() diff --git a/src/land/ascat_ssm2ioda.py b/src/land/ascat_ssm2ioda.py index 3ebc8730d..63ff9bfb4 100755 --- a/src/land/ascat_ssm2ioda.py +++ b/src/land/ascat_ssm2ioda.py @@ -27,7 +27,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] obsvars = { @@ -42,9 +42,12 @@ } VarDims = { - 'soilMoistureNormalized': ['nlocs'], + 'soilMoistureNormalized': ['Location'], } +iso8601_string = '1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[:-1]) + class ascat(object): def __init__(self, filename, mask): @@ -65,19 +68,20 @@ def _read(self): self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'percent' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'percent' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' + self.varAttrs[iodavar, iconv.OvalName()]['units'] = '1' + self.varAttrs[iodavar, iconv.OerrName()]['units'] = '1' # open input file name ncd = nc.Dataset(self.filename, 'r') # set and get global attributes - AttrData["satellite"] = ncd.getncattr('source') + # A satellite ID is expected for satellite attribute, use sensor instead + AttrData['sensor'] = ncd.getncattr('source') AttrData['platform'] = ncd.getncattr('platform_long_name') lats = ncd.variables['lat'][:].ravel() lons = ncd.variables['lon'][:].ravel() vals = ncd.variables['soil_moisture'][:].ravel() errs = ncd.variables['soil_moisture_error'][:].ravel() + errs = errs*0.01 # Scale from zero to one. wflg = ncd.variables['wetland_flag'][:].ravel() tflg = ncd.variables['topography_flag'][:].ravel() times = np.empty_like(vals, dtype=object) @@ -88,6 +92,7 @@ def _read(self): vals = vals.astype('float32') lats = lats.astype('float32') lons = lons.astype('float32') + vals = vals*0.01 qflg = 0*vals.astype('int32') wflg = wflg.astype('int32') tflg = tflg.astype('int32') @@ -104,25 +109,26 @@ def _read(self): secs = secs[mask] times = times[mask] - for i in range(len(lons)): + for i in range(len(secs)): base_date = datetime(2000, 1, 1) + timedelta(seconds=int(secs[i])) - base_datetime = base_date.strftime("%Y-%m-%dT%H:%M:%SZ") - AttrData['date_time_string'] = base_datetime - times[i] = base_datetime + time_offset = np.int64(round((base_date - epoch).total_seconds())) + times[i] = time_offset + + self.varAttrs['dateTime', 'MetaData']['units'] = 'seconds since ' + iso8601_string # add metadata variables - self.outdata[('datetime', 'MetaData')] = times - self.outdata[('latitude', 'MetaData')] = lats - self.outdata[('longitude', 'MetaData')] = lons - self.outdata[('wetlandFraction', 'MetaData')] = wflg - self.outdata[('topographyComplexity', 'MetaData')] = tflg + self.outdata[('dateTime', 'MetaData')] = np.array(times, dtype=np.int64) + self.outdata[('latitude', 'MetaData')] = np.array(lats, dtype=np.float32) + self.outdata[('longitude', 'MetaData')] = np.array(lons, dtype=np.float32) + self.outdata[('wetlandFraction', 'MetaData')] = np.array(wflg, dtype=np.float32) + self.outdata[('topographyComplexity', 'MetaData')] = np.array(tflg, dtype=np.float32) - for iodavar in ['soilMoistureNormalized']: - self.outdata[self.varDict[iodavar]['valKey']] = vals - self.outdata[self.varDict[iodavar]['errKey']] = errs - self.outdata[self.varDict[iodavar]['qcKey']] = qflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + iodavar = 'soilMoistureNormalized' + self.outdata[self.varDict[iodavar]['valKey']] = np.array(vals, dtype=np.float32) + self.outdata[self.varDict[iodavar]['errKey']] = np.array(errs, dtype=np.float32) + self.outdata[self.varDict[iodavar]['qcKey']] = np.array(qflg, dtype=np.int32) + + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) def main(): @@ -156,8 +162,8 @@ def main(): # setup the IODA writer writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - ssm.varAttrs[('wetlandFraction', 'MetaData')]['units'] = 'unitless' - ssm.varAttrs[('topographyComplexity', 'MetaData')]['units'] = 'unitless' + ssm.varAttrs[('wetlandFraction', 'MetaData')]['units'] = '1' + ssm.varAttrs[('topographyComplexity', 'MetaData')]['units'] = '1' # write everything out writer.BuildIoda(ssm.outdata, VarDims, ssm.varAttrs, AttrData) diff --git a/src/land/ghcn_snod2ioda.py b/src/land/ghcn_snod2ioda.py index a6f53721b..f48d128c5 100755 --- a/src/land/ghcn_snod2ioda.py +++ b/src/land/ghcn_snod2ioda.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# (C) Copyright 2021 NOAA/NWS/NCEP/EMC +# (C) Copyright 2021-2022 NOAA/NWS/NCEP/EMC # # This software is licensed under the terms of the Apache Licence Version 2.0 # @@ -27,7 +27,7 @@ ("latitude", "float"), ("longitude", "float"), ("height", "float"), - ("datetime", "string") + ("dateTime", "string") ] obsvars = { @@ -35,14 +35,13 @@ } AttrData = { - 'converter': os.path.basename(__file__), } DimDict = { } VarDims = { - 'totalSnowDepth': ['nlocs'], + 'totalSnowDepth': ['Location'], } @@ -70,9 +69,8 @@ def _read(self): self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'mm' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'mm' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' + self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'm' + self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'm' def assignValue(colrowValue, df400): if colrowValue == '' or pd.isnull(colrowValue): @@ -162,14 +160,14 @@ def assignValue(colrowValue, df400): my_date = datetime.strptime(startdate, "%Y%m%d") start_datetime = my_date.strftime('%Y-%m-%d') base_datetime = start_datetime + 'T18:00:00Z' - AttrData['date_time_string'] = base_datetime for i in range(len(vals)): if vals[i] >= 0.0: - errs[i] = 40.0 + errs[i] = 0.04 + vals[i] = 0.001*vals[i] times[i] = base_datetime # add metadata variables - self.outdata[('datetime', 'MetaData')] = times + self.outdata[('dateTime', 'MetaData')] = times self.outdata[('stationIdentification', 'MetaData')] = sites self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons @@ -180,8 +178,7 @@ def assignValue(colrowValue, df400): self.outdata[self.varDict[iodavar]['errKey']] = errs self.outdata[self.varDict[iodavar]['qcKey']] = qflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) def main(): diff --git a/src/land/ims_scf2ioda.py b/src/land/ims_scf2ioda.py deleted file mode 100644 index 78476567a..000000000 --- a/src/land/ims_scf2ioda.py +++ /dev/null @@ -1,155 +0,0 @@ -#!/usr/bin/env python3 -# -# (C) Copyright 2021 NOAA/NWS/NCEP/EMC -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. -# -import pygrib -import time, os, sys -import argparse -import netCDF4 as nc -import numpy as np -from datetime import datetime, timedelta -from pathlib import Path - -IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" -if not IODA_CONV_PATH.is_dir(): - IODA_CONV_PATH = Path(__file__).parent/'..'/'lib-python' -sys.path.append(str(IODA_CONV_PATH.resolve())) - -import ioda_conv_engines as iconv -from collections import defaultdict, OrderedDict -from orddicts import DefaultOrderedDict - -locationKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") -] - -obsvars = { - 'snow_cover_fraction': 'snowCover', -} - -AttrData = { - 'converter': os.path.basename(__file__), -} - -DimDict = { -} - -VarDims = { - 'snowCover': ['nlocs'], -} - - -class imsscf(object): - - def __init__(self, filename, mask): - self.filename = filename - self.mask = mask - self.varDict = defaultdict(lambda: defaultdict(dict)) - self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - self.varAttrs = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) - self._read() - - def _read(self): - - # set up variable names for IODA - for iodavar in ['snowCover']: - self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() - self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() - self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() - self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' - self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'percent' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'percent' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' - - # read self.filename to get data - data = pygrib.open(self.filename) - lat, lon = data[1].latlons() - lons = lon[:].ravel() - lats = lat[:].ravel() - vals = data.select(name='Snow cover')[0].values[:].ravel() - # defined errors and qc - vals = vals.astype('float32') - lats = lats.astype('float32') - lons = lons.astype('float32') - errs = 0.08*vals - qflg = 0*vals.astype('int32') - times = np.empty_like(vals, dtype=object) - - if self.mask == "maskout": - mask = np.logical_not(vals.mask) - vals = vals[mask] - errs = errs[mask] - qflg = qflg[mask] - lons = lons[mask] - lats = lats[mask] - times = times[mask] - # get global attributes - start_datetime = data[1].analDate - base_datetime = start_datetime.isoformat() + "Z" - # grbs.close() - - # write global attributes out - self.satellite = "POES/GOES" - self.sensor = "IMS.Multisensor" - AttrData["observation_type"] = "Snow Cover Fraction" - AttrData["satellite"] = self.satellite - AttrData["sensor"] = self.sensor - AttrData['date_time_string'] = base_datetime - - for i in range(len(lons)): - times[i] = base_datetime - - # add metadata variables - self.outdata[('datetime', 'MetaData')] = times - self.outdata[('latitude', 'MetaData')] = lats - self.outdata[('longitude', 'MetaData')] = lons - - # add output variables - for iodavar in ['snowCover']: - self.outdata[self.varDict[iodavar]['valKey']] = vals - self.outdata[self.varDict[iodavar]['errKey']] = errs - self.outdata[self.varDict[iodavar]['qcKey']] = qflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) - - -def main(): - - parser = argparse.ArgumentParser( - description=('Read IMS snow cover fraction file(s) and Converter' - ' of native grib2 format for observations of snow' - ' cover fraction to IODA netCDF format.') - ) - parser.add_argument('-i', '--input', - help="name of ims snow cover input file(s)", - type=str, required=True) - parser.add_argument('-o', '--output', - help="name of ioda output file", - type=str, required=True) - optional = parser.add_argument_group(title='optional arguments') - optional.add_argument( - '-m', '--mask', - help="maskout missing values: maskout/default, default=none", - type=str, required=True) - - args = parser.parse_args() - - # Read in the IMS snow cover data - scf = imsscf(args.input, args.mask) - - # setup the IODA writer - writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - - # write the data out - writer.BuildIoda(scf.outdata, VarDims, scf.varAttrs, AttrData) - - -if __name__ == '__main__': - main() diff --git a/src/land/imsfv3_scf2ioda.py b/src/land/imsfv3_scf2ioda.py index 800e5a6b1..bd805d72a 100644 --- a/src/land/imsfv3_scf2ioda.py +++ b/src/land/imsfv3_scf2ioda.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# (C) Copyright 2021 NOAA/NWS/NCEP/EMC +# (C) Copyright 2020-2022 NOAA/NWS/NCEP/EMC # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -28,7 +28,7 @@ ("latitude", "float"), ("longitude", "float"), ("height", "float"), - ("datetime", "string") + ("dateTime", "string") ] obsvars = { @@ -37,15 +37,14 @@ } AttrData = { - 'converter': os.path.basename(__file__), } DimDict = { } VarDims = { - 'snowCoverFraction': ['nlocs'], - 'totalSnowDepth': ['nlocs'], + 'snowCoverFraction': ['Location'], + 'totalSnowDepth': ['Location'], } @@ -71,22 +70,19 @@ def _read(self): if iodavar == 'snowCoverFraction': self.varAttrs[iodavar, iconv.OvalName()]['units'] = '1' self.varAttrs[iodavar, iconv.OerrName()]['units'] = '1' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = -999. self.varAttrs[iodavar, iconv.OerrName()]['_FillValue'] = -999. self.varAttrs[iodavar, iconv.OqcName()]['_FillValue'] = -999 if iodavar == 'totalSnowDepth': - self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'mm' - self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'mm' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' + self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'm' + self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'm' self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = -999. self.varAttrs[iodavar, iconv.OerrName()]['_FillValue'] = -999. self.varAttrs[iodavar, iconv.OqcName()]['_FillValue'] = -999 # read netcdf file ncd = nc.Dataset(self.filename) - AttrData["sensor"] = "IMS Multisensor" lons = ncd.variables['lon'][:] lats = ncd.variables['lat'][:] oros = ncd.variables['oro'][:] @@ -103,7 +99,7 @@ def _read(self): qdflg = 0*sndv.astype('int32') errsc = 0.0*sncv errsd = 0.0*sndv - errsd[:] = 80.0 + errsd[:] = 0.08 ncd.close() times = np.empty_like(sncv, dtype=object) @@ -113,13 +109,13 @@ def _read(self): my_date = datetime.strptime(str_date, "%Y%m%d") start_datetime = my_date.strftime('%Y-%m-%d') base_datetime = start_datetime + 'T18:00:00Z' - AttrData['date_time_string'] = base_datetime for i in range(len(lats)): times[i] = base_datetime + sndv[i] = 0.001*sndv[i] # add metadata variables - self.outdata[('datetime', 'MetaData')] = times + self.outdata[('dateTime', 'MetaData')] = times self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons self.outdata[('height', 'MetaData')] = oros @@ -136,8 +132,7 @@ def _read(self): self.outdata[self.varDict[iodavar]['valKey']] = sndv self.outdata[self.varDict[iodavar]['errKey']] = errsd self.outdata[self.varDict[iodavar]['qcKey']] = qdflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) def main(): diff --git a/src/land/owp_snow_obs.py b/src/land/owp_snow_obs.py index 475ed3e10..62fb62aa8 100644 --- a/src/land/owp_snow_obs.py +++ b/src/land/owp_snow_obs.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# (C) Copyright 2019 UCAR +# (C) Copyright 2019-2022 UCAR # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -32,11 +32,11 @@ to IODA output files. """) # obs file name -> ioda file name -output_var_dict = {'snow_depth_m': 'snow_depth', 'snow_water_equivalent_mm': 'swe'} +output_var_dict = {'snow_depth_m': 'totalSnowDepth', 'snow_water_equivalent_mm': 'snowWaterEquivalent'} # ioda file_name -> ioda file units -output_var_unit_dict = {'snow_depth': 'm', 'swe': 'mm'} +output_var_unit_dict = {'totalSnowDepth': 'm', 'snowWaterEquivalent': 'kg m-2'} one = 1.00000000000000 -output_conversion_factor = {'snow_depth': one, 'swe': one} +output_conversion_factor = {'totalSnowDepth': one, 'snowWaterEquivalent': one} col_types = { 'StnObjID': np.int32, @@ -48,25 +48,22 @@ 'rec_elev_m': np.float32, 'latitude': np.float64, 'longitude': np.float64, - 'datetime': str, + 'dateTime': str, 'variable_name': str} location_key_list = [ ("latitude", "float"), ("longitude", "float"), ("height", "integer"), - ("datetime", "string"), ] + ("dateTime", "string"), ] dim_dict = {} var_dims = { - 'snow_depth': ['nlocs'], - 'swe': ['nlocs'], } + 'totalSnowDepth': ['Location'], + 'snowWaterEquivalent': ['Location'], } -attr_data = { - 'converter': os.path.basename(__file__), - 'converter_version': 0.3, - 'nvars': np.int32(len(var_dims)), } +attr_data = {} fill_value = 9.96921e+36 @@ -108,7 +105,6 @@ def __init__( def _read(self): # print(f"Reading: {self.file_in}") - self.attr_data['obs_file'] = str(self.file_in.split('/')[-1]) # use pandas to get the data lined up obs_df = pd.read_csv(self.file_in, header=0, index_col=False, dtype=col_types) @@ -135,7 +131,7 @@ def _read(self): .sort_index() .reset_index()) - self.attr_data['ref_date_time'] = ( + self.attr_data['datetimeReference'] = ( pd.to_datetime(obs_df.datetime[0]) .round('D') .strftime('%Y-%m-%dT%H:%M:%SZ')) @@ -173,14 +169,13 @@ def _read(self): wh_not_var_other = np.where(np.isnan(obs_df[f'ObsValue {var_other}']))[0] # 1-D obs_df = obs_df.drop(wh_not_var_other).reset_index() - self.data[('datetime', 'MetaData')] = obs_df.datetime.values + self.data[('dateTime', 'MetaData')] = obs_df.datetime.values self.data[('latitude', 'MetaData')] = obs_df.latitude.values.astype('float32') self.data[('longitude', 'MetaData')] = obs_df.longitude.values.astype('float32') self.data[('height', 'MetaData')] = obs_df.rec_elev_m.values.astype('float32') - self.data[('station_id', 'MetaData')] = obs_df.StnID.values + self.data[('stationIdentification', 'MetaData')] = obs_df.StnID.values self.var_metadata[('height', 'MetaData')]['units'] = 'm' - self.var_metadata[('station_id', 'MetaData')]['units'] = 'unitless' for obs_var, ioda_var in output_var_dict.items(): # define the ioda variable @@ -190,10 +185,9 @@ def _read(self): # define ioda meta/ancillary for name in [iconv.OvalName(), iconv.OerrName(), iconv.OqcName()]: self.var_metadata[ioda_var, name]['coordinates'] = 'longitude latitude' - self.var_metadata[ioda_var, name]['units'] = output_var_unit_dict[ioda_var] # not really for Oqc... but - # just kidding for OqcName... a lazy tag along above, fix now (less code to overwrite) - self.var_metadata[ioda_var, iconv.OqcName()]['units'] = 'unitless' - # the data + if(iconv.OqcName() != name): + self.var_metadata[ioda_var, name]['units'] = output_var_unit_dict[ioda_var] + conv_fact = output_conversion_factor[ioda_var] self.data[self.var_dict[ioda_var]['valKey']] = ( mask_nans(obs_df[f'ObsValue {obs_var}'].values * conv_fact)) @@ -206,9 +200,7 @@ def _read(self): self.data[self.var_dict[ioda_var]['qcKey']] = ( mask_nans(obs_df[f'PreQC {obs_var}'].values * conv_fact)) - nlocs = len(self.data[('datetime', 'MetaData')]) - dim_dict['nlocs'] = nlocs - attr_data['nlocs'] = np.int32(nlocs) + dim_dict['Location'] = len(self.data[('dateTime', 'MetaData')]) def write(self): writer = iconv.IodaWriter(self.file_out, location_key_list, dim_dict) diff --git a/src/land/smap9km_ssm2ioda.py b/src/land/smap9km_ssm2ioda.py index b5ccbd0d2..35937fba5 100644 --- a/src/land/smap9km_ssm2ioda.py +++ b/src/land/smap9km_ssm2ioda.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# (C) Copyright 2022 EMC/NCEP/NWS/NOAA +# (C) Copyright 2020-2022 EMC/NCEP/NWS/NOAA # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -22,11 +22,13 @@ from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + locationKeyList = [ ("latitude", "float"), ("longitude", "float"), ("depthBelowSoilSurface", "float"), - ("datetime", "string") + ("dateTime", "long") ] obsvars = { @@ -34,16 +36,19 @@ } AttrData = { - 'converter': os.path.basename(__file__), } DimDict = { } VarDims = { - 'soilMoistureVolumetric': ['nlocs'], + 'soilMoistureVolumetric': ['Location'], } +# Usual reference time for these data is 12UTC 1Jan2000 +iso8601_string = 'seconds since 2000-01-01T12:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class smap(object): def __init__(self, args): @@ -67,20 +72,18 @@ def _read(self): self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'm3 m-3' self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'm3 m-3' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' # open input file name ncd = nc.Dataset(self.filename, 'r') # set and get global attributes - self.satellite = "SMAP" - self.sensor = "radar and radiometer" - AttrData["satellite"] = self.satellite - AttrData["sensor"] = self.sensor + self.satellite = 789 + self.sensor = 432 + AttrData["platform"] = np.array([self.satellite], dtype=np.int32) + AttrData["sensor"] = np.array([self.sensor], dtype=np.int32) data = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['soil_moisture'][:] vals = data[:].ravel() _FillValue = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['soil_moisture'].getncattr('_FillValue') - self.varAttrs['soilMoistureVolumetric', iconv.OvalName()]['_FillValue'] = _FillValue valid_max = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['soil_moisture'].getncattr('valid_max') valid_min = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['soil_moisture'].getncattr('valid_min') @@ -95,7 +98,7 @@ def _read(self): refsec = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['tb_time_seconds'][:].ravel() deps = np.full_like(vals, self.assumedSoilDepth) - times = np.empty_like(vals, dtype=object) + times = np.empty_like(vals, dtype=np.int64) if self.mask: with np.errstate(invalid='ignore'): @@ -113,8 +116,6 @@ def _read(self): refsec = refsec[mask] times = times[mask] - # get datetime and reference time 12UTC 1Jan2000 - base_date = datetime(2000, 1, 1, 12, 0) vals = vals.astype('float32') lats = lats.astype('float32') lons = lons.astype('float32') @@ -127,34 +128,28 @@ def _read(self): ecoli = ecoli.astype('int32') for i in range(len(lons)): - dt = base_date + timedelta(seconds=int(refsec[i])) - times[i] = dt.strftime("%Y-%m-%dT%H:%M:%SZ") + times[i] = int(refsec[i]) errs[i] = 0.04 # add metadata variables - self.outdata[('datetime', 'MetaData')] = times + self.outdata[('dateTime', 'MetaData')] = times + self.varAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons self.varAttrs[('latitude', 'MetaData')]['units'] = 'degree_north' self.varAttrs[('longitude', 'MetaData')]['units'] = 'degree_east' self.outdata[('depthBelowSoilSurface', 'MetaData')] = deps self.varAttrs[('depthBelowSoilSurface', 'MetaData')]['units'] = 'm' - self.outdata[('surfaceFlag', 'MetaData')] = sflg - self.varAttrs[('surfaceFlag', 'MetaData')]['units'] = 'unitless' + self.outdata[('surfaceQualifier', 'MetaData')] = sflg self.outdata[('vegetationOpacity', 'MetaData')] = vegop - self.varAttrs[('vegetationOpacity', 'MetaData')]['units'] = 'unitless' self.outdata[('easeRowIndex', 'MetaData')] = erowi - self.varAttrs[('easeRowIndex', 'MetaData')]['units'] = '1' self.outdata[('easeColumnIndex', 'MetaData')] = ecoli - self.varAttrs[('easeColumnIndex', 'MetaData')]['units'] = '1' for iodavar in ['soilMoistureVolumetric']: self.outdata[self.varDict[iodavar]['valKey']] = vals self.outdata[self.varDict[iodavar]['errKey']] = errs self.outdata[self.varDict[iodavar]['qcKey']] = qflg - AttrData['date_time_string'] = times[0] - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) def main(): diff --git a/src/land/smap_ssm2ioda.py b/src/land/smap_ssm2ioda.py index 8e6a81bec..d8d6447ae 100644 --- a/src/land/smap_ssm2ioda.py +++ b/src/land/smap_ssm2ioda.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# (C) Copyright 2021 EMC/NCEP/NWS/NOAA +# (C) Copyright 2021-2022 EMC/NCEP/NWS/NOAA # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -22,11 +22,13 @@ from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + locationKeyList = [ ("latitude", "float"), ("longitude", "float"), ("depthBelowSoilSurface", "float"), - ("datetime", "string") + ("dateTime", "long") ] obsvars = { @@ -34,16 +36,18 @@ } AttrData = { - 'converter': os.path.basename(__file__), } DimDict = { } VarDims = { - 'soilMoistureVolumetric': ['nlocs'], + 'soilMoistureVolumetric': ['Location'], } +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class smap(object): def __init__(self, args): @@ -67,15 +71,14 @@ def _read(self): self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'm3 m-3' self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'm3 m-3' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' # open input file name ncd = nc.Dataset(self.filename, 'r') # set and get global attributes - self.satellite = "SMAP" - self.sensor = "radar and radiometer" - AttrData["satellite"] = self.satellite - AttrData["sensor"] = self.sensor + satelliteID = 789 + sensorID = 432 + AttrData["platform"] = np.array([satelliteID], dtype=np.int32) + AttrData["sensor"] = np.array([sensorID], dtype=np.int32) data = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['soil_moisture'][:] vals = data[:].ravel() @@ -89,7 +92,7 @@ def _read(self): qflg = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['retrieval_qual_flag'][:].ravel() deps = np.full_like(vals, self.assumedSoilDepth) - times = np.empty_like(vals, dtype=object) + times = np.empty_like(vals, dtype=np.int64) if self.mask: with np.errstate(invalid='ignore'): @@ -106,14 +109,13 @@ def _read(self): str_split = self.filename.split("_") str_datetime = str_split[7] my_datetime = datetime.strptime(str_datetime, "%Y%m%dT%H%M%S") - base_datetime = my_datetime.strftime('%Y-%m-%dT%H:%M:%SZ') + time_offset = round((my_datetime - epoch).total_seconds()) vals = vals.astype('float32') lats = lats.astype('float32') lons = lons.astype('float32') deps = deps.astype('float32') errs = errs.astype('float32') qflg = qflg.astype('int32') - AttrData['date_time_string'] = base_datetime for i in range(len(lons)): @@ -127,10 +129,11 @@ def _read(self): else: qflg[i] = 1 - times[i] = base_datetime + times[i] = time_offset # add metadata variables - self.outdata[('datetime', 'MetaData')] = times + self.outdata[('dateTime', 'MetaData')] = times + self.varAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons self.outdata[('depthBelowSoilSurface', 'MetaData')] = deps @@ -141,8 +144,7 @@ def _read(self): self.outdata[self.varDict[iodavar]['errKey']] = errs self.outdata[self.varDict[iodavar]['qcKey']] = qflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) def main(): diff --git a/src/land/smos_ssm2ioda.py b/src/land/smos_ssm2ioda.py index c59645334..0dd501d7f 100644 --- a/src/land/smos_ssm2ioda.py +++ b/src/land/smos_ssm2ioda.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# (C) Copyright 2021 NOAA/NWS/NCEP/EMC +# (C) Copyright 2020-2022 NOAA/NWS/NCEP/EMC # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -24,7 +24,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "string") ] obsvars = { @@ -32,14 +32,13 @@ } AttrData = { - 'converter': os.path.basename(__file__), } DimDict = { } VarDims = { - 'soilMoistureVolumetric': ['nlocs'], + 'soilMoistureVolumetric': ['Location'], } @@ -65,14 +64,14 @@ def _read(self): self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'm3 m-3' self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'm3 m-3' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' # open input file name ncd = nc.Dataset(self.filename, 'r') # set and get global attributes - AttrData["observation_type"] = "surface soil moisture" - AttrData["satellite"] = "SMOS" - AttrData["sensor"] = "MIRAS" + satelliteID = 46 + sensorID = 176 + AttrData["platform"] = np.array([satelliteID], dtype=np.int32) + AttrData["sensor"] = np.array([sensorID], dtype=np.int32) lons = ncd.variables['longitude'][:] lats = ncd.variables['latitude'][:] @@ -112,11 +111,10 @@ def _read(self): base_date = datetime(2000, 1, 1) + timedelta(days=int(ddys[i])) dt = base_date + timedelta(seconds=int(secs[i])) base_datetime = dt.strftime("%Y-%m-%dT%H:%M:%SZ") - AttrData['date_time_string'] = base_datetime times[i] = base_datetime # add metadata variables - self.outdata[('datetime', 'MetaData')] = times + self.outdata[('dateTime', 'MetaData')] = times self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons @@ -125,8 +123,7 @@ def _read(self): self.outdata[self.varDict[iodavar]['errKey']] = errs self.outdata[self.varDict[iodavar]['qcKey']] = qflg - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) def main(): diff --git a/src/lib-python/ioda_conv_engines.py b/src/lib-python/ioda_conv_engines.py index ad7fb57b2..7b357871b 100644 --- a/src/lib-python/ioda_conv_engines.py +++ b/src/lib-python/ioda_conv_engines.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +import datetime as dt import ioda_obs_space as ioda_os import numpy as np from collections import OrderedDict @@ -38,7 +39,7 @@ def OqcName(): return _oqc_name -def get_default_fill_val(mydtype): +def get_default_fill_val(mydtype, isDateTime=False): dtype_tmp = np.array([], dtype=mydtype) NumpyDtype = dtype_tmp.dtype if (NumpyDtype == np.dtype('float64')): @@ -58,7 +59,10 @@ def get_default_fill_val(mydtype): elif (NumpyDtype == np.dtype('U1')): fillval = '\x00' elif (NumpyDtype == np.dtype('object')): - fillval = '\x00' + if (isDateTime): + fillval = dt.datetime(2200, 1, 1, tzinfo=dt.timezone.utc) + else: + fillval = '\x00' else: print("ERROR: Unrecognized data type", NumpyDtype) exit(-2) @@ -76,6 +80,7 @@ def get_default_fill_val(mydtype): class IodaWriter(object): # Constructor def __init__(self, Fname, LocKeyList, DimDict, TestKeyList=None): + # note: loc_key_list does nothing self._loc_key_list = LocKeyList self._dim_dict = DimDict self._test_key_list = TestKeyList @@ -91,8 +96,8 @@ def WriteGeoVars(self, GeoVars, GeoVarDims, GeoVarAttrs): dims = GeoVarDims[VarName] else: # assume it is just nlocs - dims = ['nlocs'] - fillval = get_default_fill_val(Vvals.dtype) + dims = ['Location'] + fillval = get_default_fill_val(Vvals.dtype, isinstance(Vvals[0], dt.datetime)) # get fill value if VarName in GeoVarAttrs.keys(): if '_FillValue' in GeoVarAttrs[VarName].keys(): @@ -120,8 +125,8 @@ def WriteObsVars(self, ObsVars, VarDims, VarAttrs): dims = VarDims[Vname] else: # assume it is just nlocs - dims = ['nlocs'] - fillval = get_default_fill_val(Vvals.dtype) + dims = ['Location'] + fillval = get_default_fill_val(Vvals.dtype, isinstance(Vvals[0], dt.datetime)) # get fill value if VarKey in VarAttrs.keys(): if '_FillValue' in VarAttrs[VarKey].keys(): @@ -148,6 +153,25 @@ def WriteObsVars(self, ObsVars, VarDims, VarAttrs): except KeyError: pass # no metadata for this variable + def VerifyDateTime(self, ObsVars): + # this method will check if the variable + # MetaData/dateTime is a string or datetime object + # if string, convert to datetime object + VarKey = ('dateTime', 'MetaData') + if VarKey not in ObsVars.keys(): + raise KeyError("Required variable 'MetaData/dateTime' does not exist.") + dtvar = ObsVars[VarKey] + # check if the array is type 'object' or not + # otherwise we will assume it is an integer and already set up + if (dtvar.dtype == np.dtype('object')): + # object is used for strings or datetime objects + if (isinstance(dtvar[0], str)): + # convert ISO date strings to datetime objects + newdtvar = [dt.datetime.strptime(x, "%Y-%m-%dT%H:%M:%SZ") for x in dtvar] + ObsVars[VarKey] = np.array(newdtvar, dtype=object) + + return ObsVars + def WriteGlobalAttrs(self, GlobalAttrs): # this method will create global attributes from GlobalAttrs dictionary for AttrKey, AttrVal in GlobalAttrs.items(): @@ -155,6 +179,8 @@ def WriteGlobalAttrs(self, GlobalAttrs): def BuildIoda(self, ObsVars, VarDims, VarAttrs, GlobalAttrs, TestData=None, geovals=False): + # check and fix dateTime if necessary + ObsVars = self.VerifyDateTime(ObsVars) if geovals: self.WriteGeoVars(ObsVars, VarDims, VarAttrs) else: @@ -176,6 +202,7 @@ def ExtractObsData(ObsData, loc_key_list): # can be preallocated, and variable numbers can be assigned ObsVarList = [] ObsVarExamples = [] + ObsVarTypes = [] for LocKey, LocDict in ObsData.items(): _nlocs += 1 for VarKey, VarVal in LocDict.items(): @@ -184,6 +211,7 @@ def ExtractObsData(ObsData, loc_key_list): if (VarKey not in ObsVarList): ObsVarList.append(VarKey) ObsVarExamples.append(VarVal) + ObsVarTypes.append(type(VarVal)) # Extract the locations metadata encoded in the keys for i in range(len(loc_key_list)): (LocVname, LocVtype) = loc_key_list[i] @@ -191,18 +219,23 @@ def ExtractObsData(ObsData, loc_key_list): if (locvar not in ObsVarList): ObsVarList.append(locvar) ObsVarExamples.append(LocKey[i]) + if (LocVtype == "long"): + # For case where MetaData/dateTime is directly assigned 64-bit integers + ObsVarTypes.append(np.int64) + else: + ObsVarTypes.append(type(LocKey[i])) # Preallocate arrays and fill them up with data from the dictionary ObsVars = OrderedDict() for o in range(len(ObsVarList)): - VarType = type(ObsVarExamples[o]) + VarType = ObsVarTypes[o] if (VarType in [float, np.float32, np.float64]): defaultval = get_default_fill_val(np.float32) defaultvaltype = np.float32 - elif ((VarType in [np.int64]) and (o == ('dateTime', 'MetaData'))): + elif (VarType in [np.int64]): defaultval = get_default_fill_val(np.int64) defaultvaltype = np.int64 - elif (VarType in [int, np.int64, np.int32, np.int8]): + elif (VarType in [int, np.int32, np.int8]): defaultval = get_default_fill_val(np.int32) defaultvaltype = np.int32 elif (VarType in [str, np.str_]): diff --git a/src/lib-python/meteo_sounding_utils.py b/src/lib-python/meteo_sounding_utils.py index e2f0879bb..73c161421 100644 --- a/src/lib-python/meteo_sounding_utils.py +++ b/src/lib-python/meteo_sounding_utils.py @@ -66,11 +66,6 @@ def zext_up(pres_lower, pres_upper, temperature, hght_lower): :param hght_lower: Height of lower layer (m) :return: Extrapolated height (m) """ - if pres_upper is None or pres_lower is None: - return None - if pres_upper <= 0 or pres_lower <= 0 or temperature is None or hght_lower is None: - return None - return hght_lower + (Rd * (temperature + CTOK) / G) * math.log(pres_lower / pres_upper) # --+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+ @@ -86,11 +81,6 @@ def zext_down(pres_lower, pres_upper, temperature, hght_upper): :param hght_upper: Height of upper layer (m) :return: Extrapolated height (m) """ - if pres_upper is None or pres_lower is None: - return None - if pres_upper <= 0 or pres_lower <= 0 or temperature is None or hght_upper is None: - return None - return hght_upper - (Rd * (temperature + CTOK) / G) * math.log(pres_lower / pres_upper) # --+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+ diff --git a/src/marine/amsr2_icec2ioda.py b/src/marine/amsr2_icec2ioda.py index ee1494f10..44b2cfd76 100755 --- a/src/marine/amsr2_icec2ioda.py +++ b/src/marine/amsr2_icec2ioda.py @@ -7,7 +7,7 @@ # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # -import sys +import os, sys import argparse import numpy as np from datetime import datetime, timedelta @@ -22,17 +22,24 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" -vName = "sea_ice_area_fraction" +vName = "seaIceFraction" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] +icec_FillValue = None +icec_units = '' + GlobalAttrs = {} +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class iceconc(object): def __init__(self, filenames, date): @@ -48,6 +55,9 @@ def _read(self): errKey = vName, iconv.OerrName() qcKey = vName, iconv.OqcName() + global icec_FillValue + global icec_units + for f in self.filenames: print(" Reading file: ", f) ncd = nc.Dataset(f, 'r') @@ -62,7 +72,8 @@ def _read(self): lat = ncd.variables['Latitude'][:] icec = ncd.variables['NASA_Team_2_Ice_Concentration'][:] icec_FillValue = ncd.variables['NASA_Team_2_Ice_Concentration']._FillValue - icec_units = ncd.variables['NASA_Team_2_Ice_Concentration'].units + icec_units = str(ncd.variables['NASA_Team_2_Ice_Concentration'].units) + print(f" units: {icec_units}") icec_qc = ncd.variables['Flags'][:] qc_FillValue = ncd.variables['Flags']._FillValue qc_units = ncd.variables['Flags'].units @@ -71,7 +82,7 @@ def _read(self): lon = lon[mask] lat = lat[mask] icec = icec[mask] - icec_qc = icec_qc[mask] + icec_qc = icec_qc[mask].astype(np.int32) for i in range(len(lon)): # get date from filename @@ -80,16 +91,11 @@ def _read(self): date1 = datetime.strptime(datestart, "%Y-%m-%dT%H:%M:%S.%fZ") date2 = datetime.strptime(dateend, "%Y-%m-%dT%H:%M:%S.%fZ") avg = date1 + (date2 - date1) * 0.5 - locKey = lat[i], lon[i], avg.strftime("%Y-%m-%dT%H:%M:%SZ") + time_offset = round((avg - epoch).total_seconds()) + locKey = lat[i], lon[i], time_offset self.data[locKey][valKey] = icec[i] * 0.01 - self.VarAttrs[locKey][valKey]['_FillValue'] = icec_FillValue - self.VarAttrs[locKey][valKey]['units'] = icec_units self.data[locKey][errKey] = 0.1 - self.VarAttrs[locKey][errKey]['_FillValue'] = icec_FillValue - self.VarAttrs[locKey][errKey]['units'] = icec_units self.data[locKey][qcKey] = icec_qc[i] - self.VarAttrs[locKey][qcKey]['_FillValue'] = qc_FillValue - self.VarAttrs[locKey][qcKey]['units'] = qc_units ncd.close() @@ -117,7 +123,7 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') # VarDims = { - 'sea_ice_area_fraction': ['nlocs'], + vName: ['Location'], } # Read in the Ice concentration @@ -126,9 +132,14 @@ def main(): # write them out ObsVars, nlocs = iconv.ExtractObsData(icec.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + icec.VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string + icec.VarAttrs[(vName, 'ObsValue')]['units'] = icec_units + icec.VarAttrs[(vName, 'ObsValue')]['_FillValue'] = icec_FillValue + icec.VarAttrs[(vName, 'ObsError')]['units'] = icec_units + icec.VarAttrs[(vName, 'ObsError')]['_FillValue'] = icec_FillValue writer.BuildIoda(ObsVars, VarDims, icec.VarAttrs, GlobalAttrs) diff --git a/src/marine/argoClim2ioda.py b/src/marine/argoClim2ioda.py index 14ca29e34..5731cdf8b 100755 --- a/src/marine/argoClim2ioda.py +++ b/src/marine/argoClim2ioda.py @@ -67,9 +67,12 @@ def _readData(self): assert self.varname in ['TEMPERATURE', 'SALINITY'],\ "%s is not a valid variable name" % self.varname + self.varname2 = 'waterTemperature' + lon = nc.variables['LONGITUDE'][:] lat = nc.variables['LATITUDE'][:] - pres = nc.variables['PRESSURE'][:] + pres = nc.variables['PRESSURE'][:] # pressure in decibar + depth = pres # 1 decibar = 1 meter, so you can use pressure as depth # Get absolute time instead of time since epoch dtime = nc.variables['TIME'] @@ -100,9 +103,10 @@ def _readData(self): time = timeArray[bI:eI] mean = nc.variables['ARGO_%s_MEAN' % self.varname][:] + meanK = [x+273.15 for x in mean] # create a full field from mean and anomaly - fullField = anomaly + np.tile(mean, (anomaly.shape[0], 1, 1, 1)) + fullField = anomaly + np.tile(meanK, (anomaly.shape[0], 1, 1, 1)) try: nc.close() @@ -115,7 +119,7 @@ def _readData(self): self.data = {} self.data['lat'] = lat self.data['lon'] = lon - self.data['pres'] = pres + self.data['depth'] = depth self.data['time'] = time self.data['field'] = fullField.data @@ -137,8 +141,8 @@ def __init__(self, filename, date, argo): self.locKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("pressure", "float"), - ("datetime", "string") + ("depthBelowWaterSurface", "float"), + ("dateTime", "string") ] self.GlobalAttrs = { @@ -149,17 +153,17 @@ def __init__(self, filename, date, argo): self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - valKey = argo.varname, iconv.OvalName() - errKey = argo.varname, iconv.OerrName() - qcKey = argo.varname, iconv.OqcName() + valKey = argo.varname2, iconv.OvalName() + errKey = argo.varname2, iconv.OerrName() + qcKey = argo.varname2, iconv.OqcName() # There has to be a better way than explicitly looping over 4! dimensions for t, time in enumerate(argo.data['time']): for y, lat in enumerate(argo.data['lat']): for x, lon in enumerate(argo.data['lon']): - for z, pres in enumerate(argo.data['pres']): + for z, depth in enumerate(argo.data['depth']): - locKey = lat, lon, pres, time.strftime('%Y-%m-%dT%H:%M:%SZ') + locKey = lat, lon, depth, time.strftime('%Y-%m-%dT%H:%M:%SZ') val = argo.data['field'][t, z, y, x] err = 0. @@ -169,21 +173,22 @@ def __init__(self, filename, date, argo): self.data[locKey][errKey] = err self.data[locKey][qcKey] = qc - self.varAttrs[argo.varname, iconv.OvalName()]['_FillValue'] = -999. - self.varAttrs[argo.varname, iconv.OerrName()]['_FillValue'] = -999. - self.varAttrs[argo.varname, iconv.OqcName()]['_FillValue'] = -999 - self.varAttrs[argo.varname, iconv.OvalName()]['units'] = 'degree_C' - self.varAttrs[argo.varname, iconv.OerrName()]['units'] = 'degree_C' - self.varAttrs[argo.varname, iconv.OqcName()]['units'] = 'unitless' + self.varAttrs[argo.varname2, iconv.OvalName()]['_FillValue'] = -999. + self.varAttrs[argo.varname2, iconv.OerrName()]['_FillValue'] = -999. + self.varAttrs[argo.varname2, iconv.OqcName()]['_FillValue'] = -999 + self.varAttrs[argo.varname2, iconv.OvalName()]['units'] = 'K' + self.varAttrs[argo.varname2, iconv.OerrName()]['units'] = 'K' + self.varAttrs['depthBelowWaterSurface', 'MetaData']['units'] = 'm' # Extract obs - ObsVars, nlocs = iconv.ExtractObsData(self.data, self.locKeyList) - DimDict = {'nlocs': nlocs} + ObsVars, Location = iconv.ExtractObsData(self.data, self.locKeyList) + DimDict = {'Location': Location} varDims = { - 'TEMPERATURE': ['nlocs'] + 'waterTemperature': ['Location'] } # Set up IODA writer self.writer = iconv.IodaWriter(self.filename, self.locKeyList, DimDict) + # Write out observations self.writer.BuildIoda(ObsVars, varDims, self.varAttrs, self.GlobalAttrs) @@ -227,7 +232,7 @@ def main(): } varDims = { - 'TEMPERATURE': ['nlocs'] + 'waterTemperature': ['Location'] } IODA(foutput, fdate, argo) diff --git a/src/marine/avhrr_radiance2ioda.py b/src/marine/avhrr_radiance2ioda.py index a1f41f56c..a3ba60183 100755 --- a/src/marine/avhrr_radiance2ioda.py +++ b/src/marine/avhrr_radiance2ioda.py @@ -25,12 +25,12 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -output_var_names = ["brightness_temperature"] +output_var_names = ["brightnessTemperature"] locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = {} @@ -43,6 +43,8 @@ chan_number = {3, 4, 5} +time_units = '' # Will fill in below. + def read_input(input_args): """ @@ -64,9 +66,14 @@ def read_input(input_args): print("Reading ", input_file) ncd = nc.Dataset(input_file, 'r') + global time_units # get the base time (should only have 1 or 2 time slots) time_base = ncd.variables['time'][:] - basetime = dateutil.parser.parse(ncd.variables['time'].units[-20:]) + time_units = ncd.variables['time'].units[-19:] + 'Z' + s = list(time_units) + s[10] = "T" + time_units = 'seconds since ' + "".join(s) + basetime = dateutil.parser.parse(ncd.variables['time'].units[-19:]) # get some of the global attributes that we are interested in @@ -138,9 +145,8 @@ def read_input(input_args): # create a string version of the date for each observation dates = [] for i in range(len(lons)): - obs_date = basetime + \ - timedelta(seconds=float(time[i]+3600*data_in['scan_line_time'][i])) # check later - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates.append(round(3600*data_in['scan_line_time'][i])) + dates = np.array(dates, dtype=np.int64) # output values nchans = len(chan_number) @@ -162,19 +168,18 @@ def read_input(input_args): # allocate space for output depending on which variables are to be saved obs_data = {} - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = dates.astype(np.int64) obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons - obs_data[('record_number', 'MetaData')] = data_in['scan_line_number'] - obs_data[('height_above_mean_sea_level', 'MetaData')] = 840*np.ones((obs_dim)) - obs_data[('sensor_azimuth_angle', 'MetaData')] = data_in['sensor_azimuth_angle'] - obs_data[('sensor_zenith_angle', 'MetaData')] = data_in['sensor_zenith_angle'] - obs_data[('solar_zenith_angle', 'MetaData')] = data_in['solar_zenith_angle'] - obs_data[('solar_azimuth_angle', 'MetaData')] = data_in['solar_azimuth_angle'] - obs_data[('scan_position', 'MetaData')] = data_in['scan_line_number'] - obs_data[output_var_names[0], global_config['oval_name']] = val_tb - obs_data[output_var_names[0], global_config['oerr_name']] = err + obs_data[('sequenceNumber', 'MetaData')] = data_in['scan_line_number'] + obs_data[('height', 'MetaData')] = 840*np.ones((obs_dim)).astype('float32') + obs_data[('sensorAzimuthAngle', 'MetaData')] = data_in['sensor_azimuth_angle'] + obs_data[('sensorZenithAngle', 'MetaData')] = data_in['sensor_zenith_angle'] + obs_data[('solarZenithAngle', 'MetaData')] = data_in['solar_zenith_angle'] + obs_data[('solarAzimuthAngle', 'MetaData')] = data_in['solar_azimuth_angle'] + obs_data[('scanPosition', 'MetaData')] = data_in['scan_line_number'] + obs_data[output_var_names[0], global_config['oval_name']] = val_tb.astype('float32') + obs_data[output_var_names[0], global_config['oerr_name']] = err.astype('float32') obs_data[output_var_names[0], global_config['opqc_name']] = qc.astype('int32') return (obs_data, GlobalAttrs) @@ -248,7 +253,7 @@ def main(): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin GlobalAttrs['converter'] = os.path.basename(__file__) @@ -259,24 +264,24 @@ def main(): # pass parameters to the IODA writer # (needed because we are bypassing ExtractObsData within BuildNetcdf) VarDims = { - 'brightness_temperature': ['nlocs', 'nchans'] + 'brightnessTemperature': ['Location', 'Channel'] } nchans = len(chan_number) nlocs = len(obs_data[('longitude', 'MetaData')]) DimDict = { - 'nlocs': nlocs, - 'nchans': list(chan_number) + 'Location': nlocs, + 'Channel': list(chan_number) } writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - VarAttrs[('brightness_temperature', 'ObsValue')]['units'] = 'Kelvin' - VarAttrs[('brightness_temperature', 'ObsError')]['units'] = 'Kelvin' - VarAttrs[('brightness_temperature', 'PreQC')]['units'] = 'unitless' - VarAttrs[('brightness_temperature', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('brightness_temperature', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('brightness_temperature', 'PreQC')]['_FillValue'] = 999 + VarAttrs[('dateTime', 'MetaData')]['units'] = time_units + VarAttrs[('brightnessTemperature', 'ObsValue')]['units'] = 'K' + VarAttrs[('brightnessTemperature', 'ObsError')]['units'] = 'K' + VarAttrs[('brightnessTemperature', 'ObsValue')]['_FillValue'] = 999 + VarAttrs[('brightnessTemperature', 'ObsError')]['_FillValue'] = 999 + VarAttrs[('brightnessTemperature', 'PreQC')]['_FillValue'] = 999 writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/copernicus_l3swh2ioda.py b/src/marine/copernicus_l3swh2ioda.py index 3e9732a4e..f35043ef6 100755 --- a/src/marine/copernicus_l3swh2ioda.py +++ b/src/marine/copernicus_l3swh2ioda.py @@ -28,23 +28,24 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] +time_units = 'seconds since 2000-01-01T00:00:00Z' + obsvars = { 'swh': 'swh', } AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), } DimDict = { } VarDims = { - 'swh': ['nlocs'], + 'swh': ['Location'], } @@ -62,9 +63,9 @@ def __init__(self, filename, factor): ncd.close() # set time stamp for all obs - self.datetime = np.empty_like(self.swh, dtype=object) + self.datetime = np.empty_like(self.swh, dtype=np.int64) for t in range(len(self.datetime)): - self.datetime[t] = datetime.utcfromtimestamp(self.time[t]+datetime(2000, 1, 1, 0, tzinfo=pytz.UTC).timestamp()).strftime("%Y-%m-%dT%H:%M:%SZ") + self.datetime[t] = self.time[t] # Remove observations out of sane bounds qci = np.where(self.swh > 0.0) @@ -89,7 +90,7 @@ def __init__(self, filename, factor): # Open input file and read relevant info def _read(self): # set up variable names for IODA - iodavar = 'sea_surface_wave_significant_height' + iodavar = 'waveHeightSignificant' self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -100,15 +101,15 @@ def _read(self): swh = copernicus(self.filename, self.factor) # map copernicus to ioda data structure - self.outdata[('datetime', 'MetaData')] = swh.datetime + self.outdata[('dateTime', 'MetaData')] = swh.datetime + self.var_mdata[('dateTime', 'MetaData')]['units'] = time_units self.outdata[('latitude', 'MetaData')] = swh.lats self.outdata[('longitude', 'MetaData')] = swh.lons self.outdata[self.varDict[iodavar]['valKey']] = swh.swh self.outdata[self.varDict[iodavar]['errKey']] = swh.err self.outdata[self.varDict[iodavar]['qcKey']] = np.zeros(swh.nlocs, dtype=np.int32) - DimDict['nlocs'] = swh.nlocs - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = swh.nlocs def main(): diff --git a/src/marine/copernicus_l4adt2ioda.py b/src/marine/copernicus_l4adt2ioda.py index 30d0cb7e7..709f3d89c 100755 --- a/src/marine/copernicus_l4adt2ioda.py +++ b/src/marine/copernicus_l4adt2ioda.py @@ -26,7 +26,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] obsvars = { @@ -35,16 +35,18 @@ AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), } DimDict = { } VarDims = { - 'adt': ['nlocs'], + 'adt': ['Location'], } +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class copernicus(object): def __init__(self, filename): @@ -58,7 +60,7 @@ def __init__(self, filename): self.lats = self.lats.ravel() self.adt = np.squeeze(ncd.variables['adt'][:]).ravel() self.err = np.squeeze(ncd.variables['err_sla'][:]).ravel() - self.date = ncd.getncattr('time_coverage_start') + self.time_coverage_start = ncd.getncattr('time_coverage_start') ncd.close() # Remove observations out of sane bounds @@ -68,9 +70,6 @@ def __init__(self, filename): self.lats = self.lats[qci] self.adt = self.adt[qci].astype(np.single) self.err = self.err[qci].astype(np.single) - # Same time stamp for all obs within 1 file - self.datetime = np.empty_like(self.adt, dtype=object) - self.datetime[:] = self.date class copernicus_l4adt2ioda(object): @@ -87,7 +86,7 @@ def __init__(self, filename, datetime=None): # Open input file and read relevant info def _read(self): # set up variable names for IODA - iodavar = 'absolute_dynamic_topography' + iodavar = 'absoluteDynamicTopography' self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -97,20 +96,33 @@ def _read(self): # read input filename adt = copernicus(self.filename) # put the time at 00 between start and end coverage time - if self.datetime is not None: - ymdh = datetime.strptime(self.datetime, "%Y%m%d") - ymdhm = ymdh.strftime("%Y-%m-%dT%H:%M:%SZ") - adt.datetime[:] = ymdhm + if adt.time_coverage_start is not None: + this_datetime = datetime.strptime(adt.time_coverage_start[:-1], "%Y-%m-%dT%H:%M:%S") + time_offset = round((this_datetime - epoch).total_seconds()) + else: + try: + this_datetime = datetime.strptime(adt.datetime, "%Y-%m-%dT%H:%M:%SZ") + time_offset = round((this_datetime - epoch).total_seconds()) + except Exception: + print(f"ABORT, failure to find timestamp; check format," + " it should be like 2014-07-29T12:00:00Z whereas" + " you entered {self.datetime}") + sys.exit() + + # Same time stamp for all obs within 1 file + adt.datetime = np.empty_like(adt.adt, dtype=np.int64) + adt.datetime[:] = time_offset + # map copernicus to ioda data structure - self.outdata[('datetime', 'MetaData')] = adt.datetime + self.outdata[('dateTime', 'MetaData')] = adt.datetime + self.var_mdata[('dateTime', 'MetaData')]['units'] = iso8601_string self.outdata[('latitude', 'MetaData')] = adt.lats self.outdata[('longitude', 'MetaData')] = adt.lons self.outdata[self.varDict[iodavar]['valKey']] = adt.adt self.outdata[self.varDict[iodavar]['errKey']] = adt.err self.outdata[self.varDict[iodavar]['qcKey']] = np.zeros(adt.nlocs, dtype=np.int32) - DimDict['nlocs'] = adt.nlocs - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = adt.nlocs def main(): diff --git a/src/marine/cryosat_ice2ioda.py b/src/marine/cryosat_ice2ioda.py index 2fb41623e..b4c878a2c 100755 --- a/src/marine/cryosat_ice2ioda.py +++ b/src/marine/cryosat_ice2ioda.py @@ -76,12 +76,12 @@ def _read(self): os.remove("cryosat_nc4classic.nc") -vName = "sea_ice_freeboard" +vName = "seaIceFreeboard" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "string") ] GlobalAttrs = { @@ -98,7 +98,7 @@ def main(): required = parser.add_argument_group(title='required arguments') required.add_argument( '-i', '--input', - help="Cryosat-2 ice freeboard obs input file(s)", + help="Cryosat-2 sea ice freeboard obs input file(s)", type=str, nargs='+', required=True) required.add_argument( '-o', '--output', @@ -119,7 +119,7 @@ def main(): args = parser.parse_args() fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - vName: ['nlocs'], + vName: ['Location'], } # Read in @@ -127,14 +127,13 @@ def main(): # write them out - ObsVars, nlocs = iconv.ExtractObsData(ice.data, locationKeyList) - DimDict = {'nlocs': nlocs} + ObsVars, Location = iconv.ExtractObsData(ice.data, locationKeyList) + DimDict = {'Location': Location} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) VarAttrs[vName, iconv.OvalName()]['units'] = 'm' VarAttrs[vName, iconv.OerrName()]['units'] = 'm' - VarAttrs[vName, iconv.OqcName()]['units'] = 'unitless' VarAttrs[vName, iconv.OvalName()]['_FillValue'] = -32768 VarAttrs[vName, iconv.OerrName()]['_FillValue'] = -999. VarAttrs[vName, iconv.OqcName()]['_FillValue'] = -2147483648 diff --git a/src/marine/emc_ice2ioda.py b/src/marine/emc_ice2ioda.py index 717f22c07..4350f9de6 100755 --- a/src/marine/emc_ice2ioda.py +++ b/src/marine/emc_ice2ioda.py @@ -74,12 +74,12 @@ def _read(self, date): self.data[locKey][qcKey] = qc[i] -vName = "sea_ice_area_fraction" +vName = "seaIceFraction" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "string") ] GlobalAttrs = { @@ -96,7 +96,7 @@ def main(): required = parser.add_argument_group(title='required arguments') required.add_argument( '-i', '--input', - help="EMC ice fraction obs input file(s)", + help="EMC sea ice fraction obs input file(s)", type=str, nargs='+', required=True) required.add_argument( '-o', '--output', @@ -117,20 +117,19 @@ def main(): args = parser.parse_args() fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - vName: ['nlocs'], + vName: ['Location'], } # Read in ice = Observation(args.input, args.thin, fdate) # write them out - ObsVars, nlocs = iconv.ExtractObsData(ice.data, locationKeyList) - DimDict = {'nlocs': nlocs} + ObsVars, Location = iconv.ExtractObsData(ice.data, locationKeyList) + DimDict = {'Location': Location} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) VarAttrs[vName, iconv.OvalName()]['units'] = '1' VarAttrs[vName, iconv.OerrName()]['units'] = '1' - VarAttrs[vName, iconv.OqcName()]['units'] = 'unitless' writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/gds2_sst2ioda.py b/src/marine/gds2_sst2ioda.py index b6efe4af0..82a843150 100755 --- a/src/marine/gds2_sst2ioda.py +++ b/src/marine/gds2_sst2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019-2021 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -26,23 +26,59 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -output_var_names = [ - "sea_surface_temperature", - "sea_surface_skin_temperature"] +os.environ["TZ"] = "UTC" -locationKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") -] - -GlobalAttrs = {} - -VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) +# The first section of three variables are related to the IODA output file. -DimDict = {} +varInfo = [('sst', 'seaSurfaceTemperature', 'K', 999.0), + ('skin_sst', 'seaSurfaceSkinTemperature', 'K', 999.0)] +var_keys = [var_key[0] for var_key in varInfo] -VarDims = {} +locationKeyList = [ + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") +] +meta_keys = [m_item[0] for m_item in locationKeyList] + +GlobalAttrs = { + 'odb_version': 1, + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'description': "Sea-surface temperature from GHRRST Data Specification (GDS2.0) L2/3 formatted" +} + +# This list of variables is from the input file. + +incoming_vars = ['quality_level', + 'sses_bias', + 'sses_standard_deviation', + 'sea_surface_temperature'] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = 999.0 # More typically nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} def read_input(input_args): @@ -62,7 +98,7 @@ def read_input(input_args): input_file = input_args[0] global_config = input_args[1] - print("Reading ", input_file) + print(f"Reading input file: {input_file}") ncd = nc.Dataset(input_file, 'r') # get the base time (should only have 1 or 2 time slots) @@ -72,7 +108,11 @@ def read_input(input_args): # get some of the global attributes that we are interested in for v in ('platform', 'sensor', 'processing_level'): - GlobalAttrs[v] = ncd.getncattr(v) + if v == 'processing_level': + v2 = 'processingLevel' + else: + v2 = v + GlobalAttrs[v2] = ncd.getncattr(v) # get the QC flags, and calculate a mask from the non-missing values # (since L3 files are mostly empty, fields need a mask applied immediately @@ -87,7 +127,7 @@ def read_input(input_args): # If len(time) > 1, also need to repeat the lat/lon vals lons = ncd.variables['lon'][:].ravel() lats = ncd.variables['lat'][:].ravel() - if GlobalAttrs['processing_level'][:2] == 'L3': + if GlobalAttrs['processingLevel'][:2] == 'L3': len_grid = len(lons)*len(lats) lons, lats = np.meshgrid(lons, lats, copy=False) lons = np.tile(lons.ravel(), len(time_base)).ravel()[mask] @@ -100,14 +140,11 @@ def read_input(input_args): # calculate the basetime offsets time = np.tile(np.atleast_2d(time_base).T, (1, len_grid)).ravel()[mask] + # Determine the observed time. + data_in['sst_dtime'] = ncd.variables['sst_dtime'][:].ravel()[mask] + # load in all the other data and apply the missing value mask - input_vars = ( - 'quality_level', - 'sst_dtime', - 'sses_bias', - 'sses_standard_deviation', - 'sea_surface_temperature') - for v in input_vars: + for v in incoming_vars: if v not in data_in: data_in[v] = ncd.variables[v][:].ravel()[mask] ncd.close() @@ -119,7 +156,7 @@ def read_input(input_args): # also, sometimes certain input variables have their own mask due to # missing values - for v in input_vars: + for v in incoming_vars: if np.ma.is_masked(data_in[v]): mask = np.logical_and(mask, np.logical_not(data_in[v].mask)) @@ -127,57 +164,84 @@ def read_input(input_args): time = time[mask] lons = lons[mask] lats = lats[mask] - for v in input_vars: + for v in incoming_vars: data_in[v] = data_in[v][mask] # create a string version of the date for each observation dates = [] + data_in['sst_dtime'] = data_in['sst_dtime'][mask] for i in range(len(lons)): - obs_date = basetime + \ - timedelta(seconds=float(time[i]+data_in['sst_dtime'][i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + obs_date = basetime + timedelta(seconds=float(time[i]+data_in['sst_dtime'][i])) + obs_date = int((obs_date - epoch).total_seconds()) + dates.append(np.int64(obs_date)) # calculate output values # Note: the qc flags in GDS2.0 run from 0 to 5, with higher numbers # being better. IODA typically expects 0 to be good, and higher numbers # are bad, so the qc flags flipped here. - # TODO change everything in soca to handle K instead of C ? - val_sst_skin = data_in['sea_surface_temperature'] - 273.15 + val_sst_skin = data_in['sea_surface_temperature'] val_sst = val_sst_skin - data_in['sses_bias'] err = data_in['sses_standard_deviation'] qc = 5 - data_in['quality_level'] - # allocate space for output depending on which variables are to be saved - - obs_dim = (len(lons)) obs_data = {} - if global_config['output_sst']: - obs_data[(output_var_names[0], global_config['oval_name'])] = np.zeros(obs_dim) - obs_data[(output_var_names[0], global_config['oerr_name'])] = np.zeros(obs_dim) - obs_data[(output_var_names[0], global_config['opqc_name'])] = np.zeros(obs_dim) - - if global_config['output_skin_sst']: - obs_data[(output_var_names[1], global_config['oval_name'])] = np.zeros(obs_dim) - obs_data[(output_var_names[1], global_config['oerr_name'])] = np.zeros(obs_dim) - obs_data[(output_var_names[1], global_config['opqc_name'])] = np.zeros(obs_dim) - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates - obs_data[('latitude', 'MetaData')] = lats - obs_data[('longitude', 'MetaData')] = lons + # First, populate the MetaData. + obs_data[('dateTime', metaDataName)] = dates + obs_data[('latitude', metaDataName)] = lats + obs_data[('longitude', metaDataName)] = lons + # Next, populate the observed variables. if global_config['output_sst']: - obs_data[output_var_names[0], global_config['oval_name']] = val_sst - obs_data[output_var_names[0], global_config['oerr_name']] = err - obs_data[output_var_names[0], global_config['opqc_name']] = qc.astype('int32') + obs_data[('sst', obsValName)] = val_sst.astype('float32') + obs_data[('sst', obsErrName)] = err.astype('float32') + obs_data[('sst', qcName)] = qc.astype('int32') + if global_config['output_skin_sst']: - obs_data[output_var_names[1], global_config['oval_name']] = val_sst_skin - obs_data[output_var_names[1], global_config['oerr_name']] = err - obs_data[output_var_names[1], global_config['opqc_name']] = qc.astype('int32') + obs_data[('skin_sst', obsValName)] = val_sst_skin.astype('float32') + obs_data[('skin_sst', obsErrName)] = err.astype('float32') + obs_data[('skin_sst', qcName)] = qc.astype('int32') return (obs_data, GlobalAttrs) +def IODA(filename, GlobalAttrs, nlocs, obs_data): + + DimDict = {'Location': nlocs} + varDims = {} + varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + # Set units and FillValue attributes for groups associated with observed variable. + for key in var_keys: + if (key, obsValName) in obs_data: + var_name = varInfo[var_keys.index(key)][1] + varDims[var_name] = ['Location'] + varAttrs[(var_name, obsValName)]['units'] = varInfo[var_keys.index(key)][2] + varAttrs[(var_name, obsErrName)]['units'] = varInfo[var_keys.index(key)][2] + varAttrs[(var_name, obsValName)]['_FillValue'] = varInfo[var_keys.index(key)][3] + varAttrs[(var_name, obsErrName)]['_FillValue'] = varInfo[var_keys.index(key)][3] + varAttrs[(var_name, qcName)]['_FillValue'] = int_missing_value + + data[(var_name, obsValName)] = np.array(obs_data[(key, obsValName)], dtype=np.float32) + data[(var_name, obsErrName)] = np.array(obs_data[(key, obsErrName)], dtype=np.float32) + data[(var_name, qcName)] = np.array(obs_data[(key, qcName)], dtype=np.int32) + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + data[(key, metaDataName)] = np.array(obs_data[(key, metaDataName)], dtype=dtypes[dtypestr]) + + # Initialize the writer, then write the file. + writer = iconv.IodaWriter(filename, locationKeyList, DimDict) + writer.BuildIoda(data, varDims, varAttrs, GlobalAttrs) + + return + + def main(): # Get command line arguments @@ -239,9 +303,6 @@ def main(): global_config = {} global_config['date'] = args.date global_config['thin'] = args.thin - global_config['oval_name'] = iconv.OvalName() - global_config['oerr_name'] = iconv.OerrName() - global_config['opqc_name'] = iconv.OqcName() global_config['output_sst'] = args.sst global_config['output_skin_sst'] = args.skin_sst pool_inputs = [(i, global_config) for i in args.input] @@ -253,52 +314,24 @@ def main(): # concatenate the data from the files obs_data, GlobalAttrs = obs[0] + # Add additional global attributes we want to output in the file. + GlobalAttrs['sourceFiles'] = ", ".join(args.input) + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['thinning'] = args.thin + for i in range(1, len(obs)): for k in obs_data: axis = len(obs[i][0][k].shape)-1 obs_data[k] = np.concatenate( (obs_data[k], obs[i][0][k]), axis=axis) - # prepare global attributes we want to output in the file, - # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") - GlobalAttrs['thinning'] = args.thin - GlobalAttrs['converter'] = os.path.basename(__file__) - - # determine which variables we are going to output - selected_names = [] - if args.sst: - selected_names.append(output_var_names[0]) - if args.skin_sst: - selected_names.append(output_var_names[1]) - - # pass parameters to the IODA writer - # (needed because we are bypassing ExtractObsData within BuildNetcdf) - VarDims = { - 'sea_surface_temperature': ['nlocs'], - 'sea_surface_skin_temperature': ['nlocs'], - } - - nlocs = len(obs_data[('longitude', 'MetaData')]) - - DimDict = {'nlocs': nlocs} - - writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - - VarAttrs[('sea_surface_temperature', 'ObsValue')]['units'] = 'celsius' - VarAttrs[('sea_surface_temperature', 'ObsError')]['units'] = 'celsius' - VarAttrs[('sea_surface_temperature', 'PreQC')]['units'] = 'unitless' - VarAttrs[('sea_surface_skin_temperature', 'ObsValue')]['units'] = 'celsius' - VarAttrs[('sea_surface_skin_temperature', 'ObsError')]['units'] = 'celsius' - VarAttrs[('sea_surface_skin_temperature', 'PreQC')]['units'] = 'unitless' - VarAttrs[('sea_surface_temperature', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('sea_surface_temperature', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('sea_surface_temperature', 'PreQC')]['_FillValue'] = 999 - VarAttrs[('sea_surface_skin_temperature', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('sea_surface_skin_temperature', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('sea_surface_skin_temperature', 'PreQC')]['_FillValue'] = 999 - - writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) + # Total number of observations. + nlocs = len(obs_data[('dateTime', metaDataName)]) + + print(f"Preparing to write {nlocs} observations to {args.output}") + + # Write out the file. + IODA(args.output, GlobalAttrs, nlocs, obs_data) if __name__ == '__main__': diff --git a/src/marine/glider2ioda.py b/src/marine/glider2ioda.py index b09cc2570..de9449aef 100644 --- a/src/marine/glider2ioda.py +++ b/src/marine/glider2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -25,14 +25,14 @@ from orddicts import DefaultOrderedDict vName = [ - "sea_water_temperature", - "sea_water_salinity"] + "waterTemperature", + "salinity"] locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("depth", "float"), - ("datetime", "string") + ("depthBelowWaterSurface", "float"), + ("dateTime", "long") ] GlobalAttrs = { @@ -74,7 +74,7 @@ def _read(self): Sqcs = ncd.variables['salinity_qc'][:]-1 errs = np.squeeze(errs) ncd.close() - base_date = datetime(1970, 1, 1) + ii = int((self.lon_eth_HAT10-self.lon_wth_HAT10)/self.dxy)+10 jj = int((self.lat_nth_HAT10-self.lat_sth_HAT10)/self.dxy)+10 kk = int((self.mdep)/self.dz)+10 @@ -93,10 +93,10 @@ def _read(self): valKey = vName[j], iconv.OvalName() errKey = vName[j], iconv.OerrName() qcKey = vName[j], iconv.OqcName() - dt = base_date + timedelta(seconds=int(time[i])) - locKey = lats[i], lons[i], dpth[i], dt.strftime("%Y-%m-%dT%H:%M:%SZ") + dt = int(time[i]) + locKey = lats[i], lons[i], dpth[i], dt if j == 0: - self.data[locKey][valKey] = temperature[i] + self.data[locKey][valKey] = temperature[i] + 273.15 self.data[locKey][errKey] = errs[i] self.data[locKey][qcKey] = Tqcs[i] else: @@ -132,20 +132,21 @@ def main(): yamlfile = args.yaml fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - 'sea_water_temperature': ['nlocs'], - 'sea_water_salinity': ['nlocs']} + 'waterTemperature': ['Location'], + 'salinity': ['Location']} prof = Profile(args.input, args.yaml, fdate) - GlobalAttrs['date_time_string'] = fdate.strftime("%Y-%m-%dT%H:%M:%SZ") ObsVars, nlocs = iconv.ExtractObsData(prof.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('sea_water_temperature', 'ObsValue')]['units'] = 'celsius' - VarAttrs[('sea_water_salinity', 'ObsValue')]['units'] = 'psu' - VarAttrs[('sea_water_temperature', 'ObsValue')]['_FillValue'] = -32767 - VarAttrs[('sea_water_salinity', 'ObsValue')]['_FillValue'] = -32767 - VarAttrs[('sea_water_temperature', 'PreQC')]['units'] = '' - VarAttrs[('sea_water_salinity', 'ObsError')]['units'] = '' + VarAttrs[('waterTemperature', 'ObsValue')]['units'] = 'K' + VarAttrs[('waterTemperature', 'ObsError')]['units'] = 'K' + VarAttrs[('salinity', 'ObsValue')]['units'] = '1' + VarAttrs[('salinity', 'ObsError')]['units'] = '1' + VarAttrs[('depthBelowWaterSurface', 'MetaData')]['units'] = 'm' + VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since 1970-01-01T00:00:00Z' + VarAttrs[('waterTemperature', 'ObsValue')]['_FillValue'] = -32767 + VarAttrs[('salinity', 'ObsValue')]['_FillValue'] = -32767 writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/gmao_obs2ioda.py b/src/marine/gmao_obs2ioda.py index bce852e5a..dfddcc2c4 100755 --- a/src/marine/gmao_obs2ioda.py +++ b/src/marine/gmao_obs2ioda.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# (C) Copyright 2019 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -11,8 +11,9 @@ from __future__ import print_function import sys +import os from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -import netCDF4 as nc4 +import netCDF4 as nc import numpy as np from datetime import datetime from pathlib import Path @@ -22,43 +23,55 @@ IODA_CONV_PATH = Path(__file__).parent/'..'/'lib-python' sys.path.append(str(IODA_CONV_PATH.resolve())) -import ioda_conv_ncio as iconv +import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" -# obsIdDict is defined as obsid_dict in ocean_obs.py -obsIdDict = { - 5521: 'sea_water_salinity', # Salinity - 3073: 'sea_water_temperature', # Temperature - 5525: 'sea_surface_temperature', # SST - 5526: 'absolute_dynamic_topography', # SSH (Not used ...) - 5351: 'absolute_dynamic_topography', # SSH - 6000: 'sea_ice_area_fraction', # AICE - 6001: 'sea_ice_thickness' # HICE -} - - +# varDict has a numerical code to match a variable type attribute in the netCDF input file +# followed by an abbreviated name, IODA output var name, units, then acceptable min/max vals. varDict = { - 'sea_water_salinity': 'sal', - 'sea_water_temperature': 'temp', - 'sea_surface_temperature': 'sst', - 'absolute_dynamic_topography': 'adt', - 'sea_ice_area_fraction': 'frac', - 'sea_ice_thickness': 'thick' + 5521: ['sal', 'salinity', '1', 0.0, 50.0], + 3073: ['temp', 'waterTemperature', 'K', 271.0, 325.0], + 5525: ['sst', 'seaSurfaceTemperature', 'K', 271.0, 325.0], + 5526: ['adt', 'absoluteDynamicTopography', 'm', -5.0, 5.0], + 5351: ['adt', 'absoluteDynamicTopography', 'm', -5.0, 5.0], # not used + 6000: ['frac', 'seaIceFraction', '1', 0.0, 1.0], + 6001: ['thick', 'iceThickness', 'm', 0.001, 5000.0], } - -def flipDict(dictIn): - - dictOut = {} - - for key, value in dictIn.items(): - if value not in dictOut: - dictOut[value] = [key] - else: - dictOut[value].append(key) - - return dictOut +locationKeyList = [ + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("depthBelowWaterSurface", "float", "m"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") +] +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} class GMAOobs(object): @@ -69,38 +82,43 @@ def __init__(self, filename, date): self.date = date # Read GMAO data - self._read() + self._read(self.date) return - def _read(self): - - data = {} - - nc = nc4.Dataset(self.filename) + def _read(self, date): - data['nobs'] = len(nc.dimensions['nobs']) + self.date = date - data['typ'] = nc.variables['typ'][:].data - data['lon'] = nc.variables['lon'][:].data - data['lat'] = nc.variables['lat'][:].data - data['depth'] = nc.variables['depth'][:].data - data['value'] = nc.variables['value'][:].data - data['oerr'] = nc.variables['oerr'][:].data + data = {} - nc.close() + ncd = nc.Dataset(self.filename) - self.data = data + nobs = len(ncd.dimensions['nobs']) + data['nobs'] = nobs - return + # The input file(s) contain no date information, so take it from command line info. + data['dateTime'] = np.full(nobs, np.int64(round((self.date - epoch).total_seconds()))) + data['longitude'] = ncd.variables['lon'][:].data + data['latitude'] = ncd.variables['lat'][:].data + data['depthBelowWaterSurface'] = ncd.variables['depth'][:].data + types = ncd.variables['typ'][:].data + values = ncd.variables['value'][:].data + values[np.isnan(values)] = float_missing_value + errors = ncd.variables['oerr'][:].data -class refGMAOobs(object): + for key in varDict.keys(): + key_var = varDict[key][0] + "_vals" + key_err = varDict[key][0] + "_errs" + data[key_var] = np.full(nobs, float_missing_value) + data[key_err] = np.full(nobs, float_missing_value) + ind = np.where(types == key) + data[key_var][ind] = values[ind] + data[key_err][ind] = errors[ind] - def __init__(self, filename, date, data): + ncd.close() - self.filename = filename - self.date = date self.data = data return @@ -108,28 +126,40 @@ def __init__(self, filename, date, data): class IODA(object): - def __init__(self, filename, date, varName, obsList): - ''' - Initialize IODA writer class, - transform to IODA data structure and, - write out to IODA file. - ''' + def __init__(self, files_input, filename, date, obsList): + self.files_input = files_input self.filename = filename self.date = date - self.locKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("depth", "float"), - ("datetime", "string") - ] - - self.AttrData = { + GlobalAttrs = { 'odb_version': 1, - 'date_time_string': self.date.strftime("%Y-%m-%dT%H:%M:%SZ") + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': ", ".join(self.files_input), + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "GMAO Ocean Observations" } + varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + + # Set units and FillValue attributes for groups associated with observed variable. + for key in varDict.keys(): + variable = varDict[key][1] + units = varDict[key][2] + varAttrs[(variable, obsValName)]['units'] = units + varAttrs[(variable, obsErrName)]['units'] = units + varAttrs[(variable, obsValName)]['_FillValue'] = float_missing_value + varAttrs[(variable, obsErrName)]['_FillValue'] = float_missing_value + varAttrs[(variable, qcName)]['_FillValue'] = int_missing_value + # Skip out if there are no obs! totalObs = 0 for obs in obsList: @@ -140,126 +170,78 @@ def __init__(self, filename, date, varName, obsList): print('No %s observations for IODA!' % varName) return - self.writer = iconv.NcWriter(self.filename, self.locKeyList) - - self.keyDict = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - - self.keyDict[varName]['valKey'] = varName, self.writer.OvalName() - self.keyDict[varName]['errKey'] = varName, self.writer.OerrName() - self.keyDict[varName]['qcKey'] = varName, self.writer.OqcName() - # data is the dictionary containing IODA friendly data structure - self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - - recKey = 0 + data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + nlocs = 0 for obs in obsList: - - if obs.data['nobs'] <= 0: - continue - - for n in range(obs.data['nobs']): - - oval = obs.data['value'][n] - oerr = obs.data['oerr'][n] - - if discardOb(varName, oval): - continue - - lat = obs.data['lat'][n] - lon = obs.data['lon'][n] - lvl = obs.data['depth'][n] - - locKey = lat, lon, lvl, obs.date.strftime("%Y-%m-%dT%H:%M:%SZ") - - valKey = self.keyDict[varName]['valKey'] - errKey = self.keyDict[varName]['errKey'] - qcKey = self.keyDict[varName]['qcKey'] - - self.data[recKey][locKey][valKey] = oval - self.data[recKey][locKey][errKey] = oerr - self.data[recKey][locKey][qcKey] = 0 - - (ObsVars, LocMdata, VarMdata) = self.writer.ExtractObsData(self.data) - self.writer.BuildNetcdf(ObsVars, LocMdata, VarMdata, self.AttrData) + nlocs = obs.data['nobs'] + for n in range(nlocs): + # Transfer the MetaData info into the IODA final data container. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + val = obs.data[key][n] + varVals = np.array(val, dtype=dtypes[dtypestr]) + # If on the first time through, set the data dict entry + # to a numpy array with a specified data type. Otherwise, append + # the incoming data to the current data. + if (key, metaDataName) in data: + data[(key, metaDataName)] = np.append( + data[(key, metaDataName)], varVals) + else: + data[(key, metaDataName)] = varVals + + varDims = {} + # Fill up the final array of observed values and obsErrors + for key in varDict.keys(): + key_var = varDict[key][0] + "_vals" + key_err = varDict[key][0] + "_errs" + variable = varDict[key][1] + min_val = varDict[key][3] + max_val = varDict[key][4] + + if all(x == float_missing_value for x in obs.data[key_var]): + continue + + varDims = {key_var: ['Location']} + + # ObsValue + varVals = np.array(obs.data[key_var][n], dtype=dtypes['float']) + if 'Temperature' in variable: + if varVals != float_missing_value: + varVals = varVals + 273.15 + if (varVals < min_val or varVals > max_val): + varVals = float_missing_value + if (variable, obsValName) in data: + data[(variable, obsValName)] = np.append( + data[(variable, obsValName)], np.float32(varVals)) + else: + data[(variable, obsValName)] = np.float32(varVals) + + # ObsError + varVals = np.array(obs.data[key_err][n], dtype=dtypes['float']) + if (variable, obsErrName) in data: + data[(variable, obsErrName)] = np.append( + data[(variable, obsErrName)], np.float32(varVals)) + else: + data[(variable, obsErrName)] = np.float32(varVals) + + # QC (preQC) value (zero for now) + if (variable, qcName) in data: + data[(variable, qcName)] = np.append(data[(variable, qcName)], np.int32(0)) + else: + data[(variable, qcName)] = np.int32(0) + + print(f"Found a total number of observations: {nlocs}") + + # Initialize the writer, then write the file. + DimDict = {'Location': nlocs} + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(data, varDims, varAttrs, GlobalAttrs) return -def separateObs(obsList): - - obsDict = {} - for key in obsIdDict.keys(): - - obsListKey = [] - - for obs in obsList: - - date = obs.date - filename = obs.filename - - ind = np.where(obs.data['typ'] == key) - - data = {} - data['nobs'] = len(ind[0]) - data['typ'] = obs.data['typ'][ind] - data['lon'] = obs.data['lon'][ind] - data['lat'] = obs.data['lat'][ind] - data['depth'] = obs.data['depth'][ind] - data['value'] = obs.data['value'][ind] - data['oerr'] = obs.data['oerr'][ind] - - obsListKey.append(refGMAOobs(filename, date, data)) - - obsDict[key] = obsListKey - - return obsDict - - -def sortDict(obsDictIn): - - # Flip the obsIdDict - obsIdDictFlipped = flipDict(obsIdDict) - - obsDictOut = {} - - # Loop over flipped obsIdDict - for key, values in obsIdDictFlipped.items(): - - obsList = [] - for value in values: - obsList.append(obsDictIn[value]) - - # Flatten the newly created list of lists - obsDictOut[key] = [item for sublist in obsList for item in sublist] - - return obsDictOut - - -def discardOb(varName, obsValue): - - discardOb = True - - if varName in ["sea_water_salinity"]: - if 0. <= obsValue <= 50.: - discardOb = False - elif varName in ["sea_water_temperature", "sea_surface_temperature"]: - if -2. <= obsValue <= 100.: - discardOb = False - elif varName in ["absolute_dynamic_topography"]: - if -5. <= obsValue <= 5.: - discardOb = False - elif varName in ["sea_ice_area_fraction"]: - if 0. <= obsValue <= 1.: - discardOb = False - elif varName in ["sea_ice_thickness"]: - discardOb = False - else: - raise SystemExit("Unknown observation variable %s" % varName) - - return discardOb - - def main(): parser = ArgumentParser( @@ -293,15 +275,12 @@ def main(): obsList = [] for fname, idate in zip(fList, dList): - obsList.append(GMAOobs(fname, idate)) - - obsDict = separateObs(obsList) + if not os.path.isfile(fname): + parser.error('Input (-i option) file: ', fname, ' does not exist') - obsDictSorted = sortDict(obsDict) + obsList.append(GMAOobs(fname, idate)) - for key, value in varDict.items(): - fout = '%s_%s.nc' % (foutput, value) - IODA(fout, fdate, key, obsDictSorted[key]) + IODA(fList, foutput, fdate, obsList) if __name__ == '__main__': diff --git a/src/marine/godae_bgc_argo2ioda.py b/src/marine/godae_bgc_argo2ioda.py index 2eb23bdd3..ccc8c87df 100644 --- a/src/marine/godae_bgc_argo2ioda.py +++ b/src/marine/godae_bgc_argo2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2021 UCAR +# (C) Copyright 2021-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -9,7 +9,9 @@ from __future__ import print_function import sys +import os import argparse +import numpy as np import netCDF4 as nc from datetime import datetime, timedelta from pathlib import Path @@ -23,18 +25,43 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -vName = { - 'CHL': "mass_concentration_of_chlorophyll_in_sea_water", -} +os.environ["TZ"] = "UTC" + +varInfo = ['chlorophyllMassConcentration', 'mg m-3', 999.] +varDims = {varInfo[0]: ['Location']} locationKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("depth", "float"), - ("datetime", "string") + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("depthBelowWaterSurface", "float", "m"), + ("dateTime", "long", "seconds since 1950-01-01T00:00:00Z") ] - -GlobalAttrs = {} +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = 999. +int_missing_value = 99900 +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} class Profile(object): @@ -56,23 +83,92 @@ def _read(self): qcs = ncd.variables['CHLA_ADJUSTED_QC'][:] ncd.close() - base_date = datetime(1950, 1, 1) + obs = {} + for key in meta_keys: + obs[key] = [] + obs['vals'] = [] + obs['errs'] = [] + obs['qc'] = [] for i in range(len(dpth[1])-1): if ma.getmask(vals)[1][i] == 1: continue - valKey = vName['CHL'], iconv.OvalName() - errKey = vName['CHL'], iconv.OerrName() - qcKey = vName['CHL'], iconv.OqcName() + dt = epoch + timedelta(days=float(time[1])) + obs['dateTime'].append(np.int64(round((dt - epoch).total_seconds()))) + obs['latitude'].append(ma.getdata(lats)[1]) + obs['longitude'].append(ma.getdata(lons)[1]) + obs['depthBelowWaterSurface'].append(ma.getdata(dpth)[1][i]) + obs['vals'].append(ma.getdata(vals)[1][i]) + obs['errs'].append(ma.getdata(errs)[1][i]) + obs['qc'].append(0) + + self.data = obs + + return + + +class IODA(object): + + def __init__(self, files_input, filename, date, obsList): - dt = base_date + timedelta(days=float(time[1])) - locKey = ma.getdata(lats)[1], ma.getdata(lons)[1], \ - ma.getdata(dpth)[1][i], dt.strftime("%Y-%m-%dT%H:%M:%SZ") - self.data[locKey][valKey] = ma.getdata(vals)[1][i] - self.data[locKey][errKey] = ma.getdata(errs)[1][i] - self.data[locKey][qcKey] = 0 + ''' + Initialize IODA writer class, + transform to IODA data structure and, + write out to IODA file. + ''' + + self.filename = filename + self.date = date + + self.GlobalAttrs = { + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': ", ".join(files_input), + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "GODAE Profile Observations of chlorophyll (BGC-Argo)" + } + + self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + # Set units and FillValue attributes for groups associated with observed variable. + self.varAttrs[(varInfo[0], obsValName)]['units'] = varInfo[1] + self.varAttrs[(varInfo[0], obsErrName)]['units'] = varInfo[1] + self.varAttrs[(varInfo[0], obsValName)]['_FillValue'] = varInfo[2] + self.varAttrs[(varInfo[0], obsErrName)]['_FillValue'] = varInfo[2] + self.varAttrs[(varInfo[0], qcName)]['_FillValue'] = int_missing_value + + # data is the dictionary containing IODA friendly data structure + self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + nobs = 0 + for obs in obsList: + + nobs += len(obs.data['vals']) + if nobs <= 0: + print('No observations for IODA!') + continue + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + self.varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + self.data[(key, metaDataName)] = np.array(obs.data[key], dtype=dtypes[dtypestr]) + + # Fill up the final array of observed values, obsErrors, and Qc + self.data[(varInfo[0], obsValName)] = np.array(obs.data['vals'], dtype=float) + self.data[(varInfo[0], obsErrName)] = np.array(obs.data['errs'], dtype=float) + self.data[(varInfo[0], qcName)] = np.array(obs.data['qc'], dtype=np.int32) + + # Initialize the writer, then write the file. + DimDict = {'Location': nobs} + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(self.data, varDims, self.varAttrs, self.GlobalAttrs) + + return def main(): @@ -80,7 +176,7 @@ def main(): parser = argparse.ArgumentParser( description=( 'Read BGC-Argo chlorophyll profile from godae' - ' and convert to IODA v2 format.' + ' and convert to IODA v2 format.' ) ) @@ -88,7 +184,7 @@ def main(): required.add_argument( '-i', '--input', help="name of BGC-Argo observation input file(s)", - type=str, required=True) + type=str, nargs='+', required=True) required.add_argument( '-o', '--output', help="path of ioda output file", @@ -98,29 +194,18 @@ def main(): help="base date for the center of the window", metavar="YYYYMMDDHH", type=str, required=True) args = parser.parse_args() - fdate = datetime.strptime(args.date, '%Y%m%d%H') - VarDims = { - 'mass_concentration_of_chlorophyll_in_sea_water': ['nlocs'], - } - - # Read in argo profiles - prof = Profile(args.input, fdate) - - # write them out - ObsVars, nlocs = iconv.ExtractObsData(prof.data, locationKeyList) + fList = args.input + foutput = args.output + fdate = datetime.strptime(args.date, '%Y%m%d%H') - DimDict = {'nlocs': nlocs} - writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + obsList = [] + for fname in fList: + obs = Profile(fname, fdate) # Read in argo profiles + obsList.append(obs) - VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('mass_concentration_of_chlorophyll_in_sea_water', 'ObsValue')]['units'] = 'mg m-3' - VarAttrs[('mass_concentration_of_chlorophyll_in_sea_water', 'ObsError')]['units'] = 'mg m-3' - VarAttrs[('mass_concentration_of_chlorophyll_in_sea_water', 'PreQC')]['units'] = 'unitless' - VarAttrs[('mass_concentration_of_chlorophyll_in_sea_water', 'ObsValue')]['_FillValue'] = 999. - VarAttrs[('mass_concentration_of_chlorophyll_in_sea_water', 'ObsError')]['_FillValue'] = 999. - VarAttrs[('mass_concentration_of_chlorophyll_in_sea_water', 'PreQC')]['_FillValue'] = 999 - writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) + # Write the output file. + IODA(fList, foutput, fdate, obsList) if __name__ == '__main__': diff --git a/src/marine/godae_profile2ioda.py b/src/marine/godae_profile2ioda.py index 6fbb3f5c2..0b7ca904a 100755 --- a/src/marine/godae_profile2ioda.py +++ b/src/marine/godae_profile2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019-2021 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -9,6 +9,9 @@ from __future__ import print_function import sys +import os +import numpy as np +import netCDF4 as nc from datetime import datetime from scipy.io import FortranFile from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter @@ -22,6 +25,51 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + +varDict = { + 'ob_tmp': ['waterTemperature', 'K'], + 'ob_sal': ['salinity', '1'] +} + +varDims = { + 'waterTemperature': ['Location'], + 'salinity': ['Location'], +} + +locationKeyList = [ + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("depthBelowWaterSurface", "float", "m"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") +] +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = -999. +int_missing_value = -99900 +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} + class profile(object): @@ -50,7 +98,7 @@ def _rd_prof(self): # data is the dictionary with data structure as in ocn_obs.f data = {} - + data['dateTime'] = [] data['n_obs'], data['n_lvl'], data['n_vrsn'] = fh.read_ints('>i4') print(' number profiles: %d' % data['n_obs']) @@ -62,8 +110,8 @@ def _rd_prof(self): return data['ob_btm'] = fh.read_reals('>f4') - data['ob_lat'] = fh.read_reals('>f4') - data['ob_lon'] = fh.read_reals('>f4') + data['latitude'] = fh.read_reals('>f4') + data['longitude'] = fh.read_reals('>f4') data['ob_ls'] = fh.read_reals('>i4') data['ob_lt'] = fh.read_reals('>i4') data['ob_ssh'] = fh.read_reals('>f4') @@ -73,7 +121,7 @@ def _rd_prof(self): data['ob_tmp_typ'] = fh.read_reals('>i4') data['ob_tmp_qc'] = fh.read_reals('>f4') - data['ob_lvl'] = [] + data['depthBelowWaterSurface'] = [] data['ob_sal'] = [] data['ob_sal_err'] = [] data['ob_sal_prb'] = [] @@ -82,7 +130,7 @@ def _rd_prof(self): data['ob_tmp_prb'] = [] for n in range(data['n_obs']): - data['ob_lvl'].append(fh.read_reals('>f4')) + data['depthBelowWaterSurface'].append(fh.read_reals('>f4')) data['ob_sal'].append(fh.read_reals('>f4')) data['ob_sal_err'].append(fh.read_reals('>f4')) data['ob_sal_prb'].append(fh.read_reals('>f4')) @@ -144,6 +192,15 @@ def _rd_prof(self): fh.close() + # Transfer timestamp into seconds since epoch and convert Celcius to Kelvin + for n in range(data['n_obs']): + dtg = datetime.strptime(data['ob_dtg'][n], '%Y%m%d%H%M') + time_offset = np.int64(round((dtg - epoch).total_seconds())) + data['dateTime'].append(time_offset) + + for k, depth in enumerate(data['depthBelowWaterSurface'][n]): + data['ob_tmp'][n][k] = data['ob_tmp'][n][k] + 273.15 + self.data = data return @@ -151,7 +208,7 @@ def _rd_prof(self): class IODA(object): - def __init__(self, filename, date, varDict, varDims, obsList): + def __init__(self, files_input, filename, date, obsList): ''' Initialize IODA writer class, transform to IODA data structure and, @@ -160,71 +217,103 @@ def __init__(self, filename, date, varDict, varDims, obsList): self.filename = filename self.date = date - self.varDict = varDict - - self.locKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("depth", "float"), - ("datetime", "string") - ] - self.GlobalAttrs = { + GlobalAttrs = { 'odb_version': 1, + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': ", ".join(files_input), + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "GODAE Profile Observations of salinity and temperature" } - self.keyDict = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - for key in self.varDict.keys(): - value = self.varDict[key][0] - units = self.varDict[key][1] - self.keyDict[key]['valKey'] = value, iconv.OvalName() - self.keyDict[key]['errKey'] = value, iconv.OerrName() - self.keyDict[key]['qcKey'] = value, iconv.OqcName() - self.varAttrs[value, iconv.OvalName()]['_FillValue'] = -999. - self.varAttrs[value, iconv.OerrName()]['_FillValue'] = -999. - self.varAttrs[value, iconv.OqcName()]['_FillValue'] = -999 - self.varAttrs[value, iconv.OvalName()]['units'] = units - self.varAttrs[value, iconv.OerrName()]['units'] = units - self.varAttrs[value, iconv.OqcName()]['units'] = "unitless" + varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + + # Set units and FillValue attributes for groups associated with observed variable. + for key in varDict.keys(): + value = varDict[key][0] + units = varDict[key][1] + varAttrs[(value, obsValName)]['units'] = units + varAttrs[(value, obsErrName)]['units'] = units + varAttrs[(value, obsValName)]['_FillValue'] = float_missing_value + varAttrs[(value, obsErrName)]['_FillValue'] = float_missing_value + varAttrs[(value, qcName)]['_FillValue'] = int_missing_value # data is the dictionary containing IODA friendly data structure - self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + nprofs = 0 + nlocs = 0 for obs in obsList: - if obs.data['n_obs'] <= 0: - print('No profile observations for IODA!') + nprofs = obs.data['n_obs'] + if nprofs <= 0: + print('No observations for IODA!') continue - for n in range(obs.data['n_obs']): - - lat = obs.data['ob_lat'][n] - lon = obs.data['ob_lon'][n] - dtg = datetime.strptime(obs.data['ob_dtg'][n], '%Y%m%d%H%M') - - for ilev, lvl in enumerate(obs.data['ob_lvl'][n]): - - locKey = lat, lon, lvl, dtg.strftime("%Y-%m-%dT%H:%M:%SZ") - - for key in self.varDict.keys(): - val = obs.data[key][n][ilev] - err = obs.data[key+'_err'][n][ilev] - qc = (100 * obs.data[key+'_qc'][n]).astype('i4') - if obs.data[key+'_prb'][n][ilev] >= 1.000: - val = -999.00 - valKey = self.keyDict[key]['valKey'] - errKey = self.keyDict[key]['errKey'] - qcKey = self.keyDict[key]['qcKey'] - self.data[locKey][valKey] = val - self.data[locKey][errKey] = err - self.data[locKey][qcKey] = qc - - ObsVars, nlocs = iconv.ExtractObsData(self.data, self.locKeyList) - DimDict = {'nlocs': nlocs} - self.writer = iconv.IodaWriter(self.filename, self.locKeyList, DimDict) - self.varAttrs['depth', 'MetaData']['units'] = "m" - self.writer.BuildIoda(ObsVars, varDims, self.varAttrs, self.GlobalAttrs) + for n in range(nprofs): + for k, depth in enumerate(obs.data['depthBelowWaterSurface'][n]): + nlocs += 1 + # Transfer the MetaData info into the IODA final data container. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if isinstance(obs.data[key][n], list): + val = obs.data[key][n][k] + else: + val = obs.data[key][n] + varVals = np.array(val, dtype=dtypes[dtypestr]) + # If on the first time through, set the data dict entry + # to a numpy array with a specified data type. Otherwise, append + # the incoming data to the current data. + if (key, metaDataName) in data: + data[(key, metaDataName)] = np.append( + data[(key, metaDataName)], varVals) + else: + data[(key, metaDataName)] = varVals + + # Fill up the final array of observed values, obsErrors, and Qc + for key in varDict.keys(): + value = varDict[key][0] + varErr = key + '_err' + varQc = key + '_qc' + + # ObsValue + varVals = np.array(obs.data[key][n][k], dtype=dtypes['float']) + if (value, obsValName) in data: + data[(value, obsValName)] = np.append( + data[(value, obsValName)], varVals) + else: + data[(value, obsValName)] = varVals + + # ObsError + varVals = np.array(obs.data[varErr][n][k], dtype=dtypes['float']) + if (value, obsErrName) in data: + data[(value, obsErrName)] = np.append( + data[(value, obsErrName)], varVals) + else: + data[(value, obsErrName)] = varVals + + # QC + varVals = np.array(obs.data[varQc][n]*100, dtype=dtypes['integer']) + if (value, qcName) in data: + data[(value, qcName)] = np.append( + data[(value, qcName)], varVals) + else: + data[(value, qcName)] = varVals + + print(f"Found a total number of observations: {nlocs}") + + # Initialize the writer, then write the file. + DimDict = {'Location': nlocs} + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(data, varDims, varAttrs, GlobalAttrs) return @@ -256,18 +345,7 @@ def main(): obs = profile(fname, fdate) obsList.append(obs) - varDict = { - # var name, units - 'ob_tmp': ['sea_water_temperature', 'C'], - 'ob_sal': ['sea_water_salinity', 'PSU'] - } - - varDims = { - 'sea_water_temperature': ['nlocs'], - 'sea_water_salinity': ['nlocs'], - } - - IODA(foutput, fdate, varDict, varDims, obsList) + IODA(fList, foutput, fdate, obsList) if __name__ == '__main__': diff --git a/src/marine/godae_ship2ioda.py b/src/marine/godae_ship2ioda.py index 096839ccf..64f47e51e 100755 --- a/src/marine/godae_ship2ioda.py +++ b/src/marine/godae_ship2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019-2021 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -9,7 +9,9 @@ from __future__ import print_function import sys +import os import numpy as np +import netCDF4 as nc from datetime import datetime from scipy.io import FortranFile from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter @@ -23,6 +25,43 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + +varInfo = ['seaSurfaceTemperature', 'K', -999.] +varDims = {'seaSurfaceTemperature': ['Location']} + +locationKeyList = [ + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") +] +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = -999. +int_missing_value = -99900 +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} + class ship(object): @@ -51,7 +90,7 @@ def _rd_ship(self): # data is the dictionary with data structure as in ocn_obs.f data = {} - + data['dateTime'] = [] data['n_obs'], data['n_lvl'], data['n_vrsn'] = fh.read_ints('>i4') print(' number ship obs: %d' % data['n_obs']) @@ -64,8 +103,8 @@ def _rd_ship(self): data['ob_wm'] = fh.read_reals('>i4') data['ob_glb'] = fh.read_reals('>f4') - data['ob_lat'] = fh.read_reals('>f4') - data['ob_lon'] = fh.read_reals('>f4') + data['latitude'] = fh.read_reals('>f4') + data['longitude'] = fh.read_reals('>f4') data['ob_age'] = fh.read_reals('>f4') data['ob_clm'] = fh.read_reals('>f4') data['ob_qc'] = fh.read_reals('>f4') @@ -94,6 +133,13 @@ def _rd_ship(self): fh.close() + # Transfer timestamp into seconds since epoch and convert Celcius to Kelvin + for n in range(data['n_obs']): + dtg = datetime.strptime(data['ob_dtg'][n], '%Y%m%d%H%M') + time_offset = np.int64(round((dtg - epoch).total_seconds())) + data['dateTime'].append(time_offset) + data['ob_sst'][n] = data['ob_sst'][n] + 273.15 + self.data = data return @@ -101,7 +147,7 @@ def _rd_ship(self): class IODA(object): - def __init__(self, filename, date, varDict, varDims, obsList): + def __init__(self, files_input, filename, date, obsList): ''' Initialize IODA writer class, transform to IODA data structure and, @@ -110,71 +156,52 @@ def __init__(self, filename, date, varDict, varDims, obsList): self.filename = filename self.date = date - self.varDict = varDict - - self.locKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") - ] self.GlobalAttrs = { + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': ", ".join(files_input), + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "GODAE Ship Observations of sea surface temperature" } - self.keyDict = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - for key in self.varDict.keys(): - value = self.varDict[key] - self.keyDict[key]['valKey'] = value, iconv.OvalName() - self.keyDict[key]['errKey'] = value, iconv.OerrName() - self.keyDict[key]['qcKey'] = value, iconv.OqcName() - # TO DO the missing value should be the one defined in class ship - # instead of being hardcoded here - self.varAttrs[value, iconv.OvalName()]['_FillValue'] = -999. - self.varAttrs[value, iconv.OerrName()]['_FillValue'] = -999. - self.varAttrs[value, iconv.OqcName()]['_FillValue'] = -999 - self.varAttrs[value, iconv.OvalName()]['units'] = 'degree_C' - self.varAttrs[value, iconv.OerrName()]['units'] = 'degree_C' - self.varAttrs[value, iconv.OqcName()]['units'] = 'unitless' + + # Set units and FillValue attributes for groups associated with observed variable. + self.varAttrs[(varInfo[0], obsValName)]['units'] = varInfo[1] + self.varAttrs[(varInfo[0], obsErrName)]['units'] = varInfo[1] + self.varAttrs[(varInfo[0], obsValName)]['_FillValue'] = varInfo[2] + self.varAttrs[(varInfo[0], obsErrName)]['_FillValue'] = varInfo[2] + self.varAttrs[(varInfo[0], qcName)]['_FillValue'] = int_missing_value # data is the dictionary containing IODA friendly data structure self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + nobs = 0 for obs in obsList: - if obs.data['n_obs'] <= 0: - print('No ship observations for IODA!') + nobs += obs.data['n_obs'] + if nobs <= 0: + print('No observations for IODA!') continue - for n in range(obs.data['n_obs']): - - lat = obs.data['ob_lat'][n] - lon = obs.data['ob_lon'][n] - dtg = datetime.strptime(obs.data['ob_dtg'][n], '%Y%m%d%H%M') - - locKey = lat, lon, dtg.strftime("%Y-%m-%dT%H:%M:%SZ") - - for key in self.varDict.keys(): + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + self.varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + self.data[(key, metaDataName)] = np.array(obs.data[key], dtype=dtypes[dtypestr]) - val = obs.data[key][n] - err = 0.5 - qc = (100*obs.data['ob_qc'][n]).astype('i4') + # Fill up the final array of observed values, obsErrors, and Qc + self.data[(varInfo[0], obsValName)] = np.array(obs.data['ob_sst'], dtype=np.float32) + self.data[(varInfo[0], obsErrName)] = np.full(nobs, 0.5, dtype=np.float32) + self.data[(varInfo[0], qcName)] = np.array(obs.data['ob_qc']*100, dtype=np.int32) - valKey = self.keyDict[key]['valKey'] - errKey = self.keyDict[key]['errKey'] - qcKey = self.keyDict[key]['qcKey'] - - self.data[locKey][valKey] = val - self.data[locKey][errKey] = err - self.data[locKey][qcKey] = qc - # Extract obs - ObsVars, nlocs = iconv.ExtractObsData(self.data, self.locKeyList) - DimDict = {'nlocs': nlocs} - - # Set up IODA writer - self.writer = iconv.IodaWriter(self.filename, self.locKeyList, DimDict) - # Write out observations - self.writer.BuildIoda(ObsVars, varDims, self.varAttrs, self.GlobalAttrs) + # Initialize the writer, then write the file. + DimDict = {'Location': nobs} + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(self.data, varDims, self.varAttrs, self.GlobalAttrs) return @@ -204,15 +231,7 @@ def main(): for fname in fList: obsList.append(ship(fname, fdate)) - varDict = { - 'ob_sst': 'sea_surface_temperature', - } - - varDims = { - 'sea_surface_temperature': ['nlocs'], - } - - IODA(foutput, fdate, varDict, varDims, obsList) + IODA(fList, foutput, fdate, obsList) if __name__ == '__main__': diff --git a/src/marine/godae_trak2ioda.py b/src/marine/godae_trak2ioda.py index d8a67628b..d5cc18ffd 100755 --- a/src/marine/godae_trak2ioda.py +++ b/src/marine/godae_trak2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019-2021 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -9,7 +9,9 @@ from __future__ import print_function import sys +import os import numpy as np +import netCDF4 as nc from datetime import datetime from scipy.io import FortranFile from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter @@ -23,13 +25,61 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + +varDict = { + 'ob_sst': 'seaSurfaceTemperature', + 'ob_sal': 'seaSurfaceSalinity', + 'ob_uuu': 'seaSurfaceZonalWind', + 'ob_vvv': 'seaSurfaceMeridionalWind' +} + +varDims = { + 'seaSurfaceTemperature': ['Location'], + 'seaSurfaceSalinity': ['Location'], + 'seaSurfaceZonalWind': ['Location'], + 'seaSurfaceMeridionalWind': ['Location'] +} + unitDict = { - 'ob_sst': 'degree_C', - 'ob_sal': 'PSU', - 'ob_uuu': 'm/s', - 'ob_vvv': 'm/s' + 'ob_sst': 'K', + 'ob_sal': '1', + 'ob_uuu': 'm s-1', + 'ob_vvv': 'm s-1' } +locationKeyList = [ + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") +] +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = -999. +int_missing_value = -99900 +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} + class trak(object): @@ -58,7 +108,7 @@ def _rd_trak(self): # data is the dictionary with data structure as in ocn_obs.f data = {} - + data['dateTime'] = [] data['n_obs'], data['n_lvl'], data['n_vrsn'] = fh.read_ints('>i4') print(' number trak obs: %d' % data['n_obs']) @@ -72,8 +122,8 @@ def _rd_trak(self): data['ob_wm'] = fh.read_reals('>i4') data['ob_gsal'] = fh.read_reals('>f4') data['ob_gsst'] = fh.read_reals('>f4') - data['ob_lat'] = fh.read_reals('>f4') - data['ob_lon'] = fh.read_reals('>f4') + data['latitude'] = fh.read_reals('>f4') + data['longitude'] = fh.read_reals('>f4') data['ob_age'] = fh.read_reals('>f4') data['ob_csal'] = fh.read_reals('>f4') data['ob_csst'] = fh.read_reals('>f4') @@ -97,6 +147,13 @@ def _rd_trak(self): fh.close() + # Transfer timestamp into seconds since epoch and convert Celcius to Kelvin + for n in range(data['n_obs']): + dtg = datetime.strptime(data['ob_dtg'][n], '%Y%m%d%H%M') + time_offset = np.int64(round((dtg - epoch).total_seconds())) + data['dateTime'].append(time_offset) + data['ob_sst'][n] = data['ob_sst'][n] + 273.15 + self.data = data return @@ -104,7 +161,7 @@ def _rd_trak(self): class IODA(object): - def __init__(self, filename, date, varDict, varDims, obsList): + def __init__(self, files_input, filename, date, obsList): ''' Initialize IODA writer class, transform to IODA data structure and, @@ -113,75 +170,61 @@ def __init__(self, filename, date, varDict, varDims, obsList): self.filename = filename self.date = date - self.varDict = varDict - - self.locKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") - ] self.GlobalAttrs = { + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': ", ".join(files_input), + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "GODAE Ship Observations of sea surface temperature" } - self.keyDict = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - for key in self.varDict.keys(): - value = self.varDict[key] - self.keyDict[key]['valKey'] = value, iconv.OvalName() - self.keyDict[key]['errKey'] = value, iconv.OerrName() - self.keyDict[key]['qcKey'] = value, iconv.OqcName() - self.varAttrs[value, iconv.OvalName()]['_FillValue'] = -999. - self.varAttrs[value, iconv.OerrName()]['_FillValue'] = -999. - self.varAttrs[value, iconv.OqcName()]['_FillValue'] = -999 - self.varAttrs[value, iconv.OvalName()]['units'] = unitDict[key] - self.varAttrs[value, iconv.OerrName()]['units'] = unitDict[key] - self.varAttrs[value, iconv.OqcName()]['units'] = 'unitless' + + # Set units and FillValue attributes for groups associated with observed variable. + for key in varDict.keys(): + value = varDict[key] + self.varAttrs[(value, obsValName)]['units'] = unitDict[key] + self.varAttrs[(value, obsErrName)]['units'] = unitDict[key] + self.varAttrs[(value, obsValName)]['_FillValue'] = float_missing_value + self.varAttrs[(value, obsErrName)]['_FillValue'] = float_missing_value + self.varAttrs[(value, qcName)]['_FillValue'] = int_missing_value # data is the dictionary containing IODA friendly data structure self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + nobs = 0 for obs in obsList: - if obs.data['n_obs'] <= 0: - print('No trak observations for IODA!') + nobs += obs.data['n_obs'] + if nobs <= 0: + print('No observations for IODA!') continue - for n in range(obs.data['n_obs']): - - lat = obs.data['ob_lat'][n] - lon = obs.data['ob_lon'][n] - dtg = datetime.strptime(obs.data['ob_dtg'][n], '%Y%m%d%H%M') - - locKey = lat, lon, dtg.strftime("%Y-%m-%dT%H:%M:%SZ") - - for key in self.varDict.keys(): - - if key in ['ob_uuu', 'ob_vvv']: - varName = 'vel' - else: - varName = key.split('_')[-1] - - val = obs.data[key][n] - err = 1.0 - qc = (100*obs.data['ob_qc_'+varName][n]).astype('i4') - - valKey = self.keyDict[key]['valKey'] - errKey = self.keyDict[key]['errKey'] - qcKey = self.keyDict[key]['qcKey'] - - self.data[locKey][valKey] = val - self.data[locKey][errKey] = err - self.data[locKey][qcKey] = qc - # Extract obs - ObsVars, nlocs = iconv.ExtractObsData(self.data, self.locKeyList) - DimDict = {'nlocs': nlocs} - - # Set up IODA writer - self.writer = iconv.IodaWriter(self.filename, self.locKeyList, DimDict) - - # Write out observations - self.writer.BuildIoda(ObsVars, varDims, self.varAttrs, self.GlobalAttrs) + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + self.varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + self.data[(key, metaDataName)] = np.array(obs.data[key], dtype=dtypes[dtypestr]) + + # Fill up the final array of observed values, obsErrors, and Qc + for key in varDict.keys(): + value = varDict[key] + if key in ['ob_uuu', 'ob_vvv']: + varQc = 'ob_qc_vel' + else: + varQc = 'ob_' + key.split('_')[-1] + + self.data[(value, obsValName)] = np.array(obs.data[key], dtype=np.float32) + self.data[(value, obsErrName)] = np.full(nobs, 1.0, dtype=np.float32) + self.data[(value, qcName)] = np.array(obs.data[varQc]*100, dtype=np.int32) + + # Initialize the writer, then write the file. + DimDict = {'Location': nobs} + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(self.data, varDims, self.varAttrs, self.GlobalAttrs) return @@ -212,21 +255,7 @@ def main(): for fname in fList: obsList.append(trak(fname, fdate)) - varDict = { - 'ob_sst': 'sea_surface_temperature', - 'ob_sal': 'sea_surface_salinity', - 'ob_uuu': 'sea_surface_zonal_wind', - 'ob_vvv': 'sea_surface_meriodional_wind' - } - - varDims = { - 'sea_surface_temperature': ['nlocs'], - 'sea_surface_salinity': ['nlocs'], - 'sea_surface_zonal_wind': ['nlocs'], - 'sea_surface_meriodional_wind': ['nlocs'] - } - - IODA(foutput, fdate, varDict, varDims, obsList) + IODA(fList, foutput, fdate, obsList) if __name__ == '__main__': diff --git a/src/marine/hgodas_adt2ioda.py b/src/marine/hgodas_adt2ioda.py index 2d5d59c86..de6d86973 100755 --- a/src/marine/hgodas_adt2ioda.py +++ b/src/marine/hgodas_adt2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -9,8 +9,10 @@ from __future__ import print_function import sys +import os import argparse import netCDF4 as nc +import numpy as np from datetime import datetime, timedelta from pathlib import Path @@ -22,24 +24,41 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" -vName = "absolute_dynamic_topography" +varInfo = ['absoluteDynamicTopography', 'm'] locationKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") ] - -GlobalAttrs = { -} - -VarDims = { - vName: ['nlocs'], -} - -DimDict = { -} +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = -999. +int_missing_value = -999 +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} class Observation(object): @@ -47,32 +66,27 @@ class Observation(object): def __init__(self, filename, date): self.filename = filename self.date = date - self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - self.VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self._read() def _read(self): + + data = {} + data['dateTime'] = [] + data['vals'] = [] + data['errs'] = [] + data['qcs'] = [] + ncd = nc.Dataset(self.filename) + data['longitude'] = ncd.variables['lon'][:] + data['latitude'] = ncd.variables['lat'][:] time = ncd.variables['time'][:] - lons = ncd.variables['lon'][:] - lats = ncd.variables['lat'][:] hrs = ncd.variables['hr'][:] vals = ncd.variables['val'][:] errs = ncd.variables['err'][:] qcs = ncd.variables['qc'][:] ncd.close() - base_date = datetime(1970, 1, 1) + timedelta(seconds=int(time[0])) - - valKey = vName, iconv.OvalName() - errKey = vName, iconv.OerrName() - qcKey = vName, iconv.OqcName() - self.VarAttrs[vName, iconv.OvalName()]['_FillValue'] = -999. - self.VarAttrs[vName, iconv.OerrName()]['_FillValue'] = -999. - self.VarAttrs[vName, iconv.OqcName()]['_FillValue'] = -999 - self.VarAttrs[vName, iconv.OvalName()]['units'] = 'm' - self.VarAttrs[vName, iconv.OerrName()]['units'] = 'm' - self.VarAttrs[vName, iconv.OqcName()]['units'] = 'unitless' + base_date = epoch + timedelta(seconds=int(time[0])) for i in range(len(hrs)): # there shouldn't be any bad obs, but just in case remove them all @@ -80,10 +94,70 @@ def _read(self): continue dt = base_date + timedelta(hours=float(hrs[i])) - locKey = lats[i], lons[i], dt.strftime("%Y-%m-%dT%H:%M:%SZ") - self.data[locKey][valKey] = vals[i] - self.data[locKey][errKey] = errs[i] - self.data[locKey][qcKey] = qcs[i] + time_offset = np.int64(round((dt - epoch).total_seconds())) + data['dateTime'].append(time_offset) + data['vals'].append(vals[i]) + data['errs'].append(errs[i]) + data['qcs'].append(np.int32(qcs[i])) + + self.data = data + + +class IODA(object): + + def __init__(self, file_input, filename, date, obs): + + self.file_input = file_input + self.filename = filename + self.date = date + + self.GlobalAttrs = { + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': self.file_input, + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "Absolute Dynamic Topography (ADT) observations" + " used in Hybrid-GODAS" + } + + self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + # Set units and FillValue attributes for groups associated with observed variable. + self.varAttrs[(varInfo[0], obsValName)]['units'] = varInfo[1] + self.varAttrs[(varInfo[0], obsErrName)]['units'] = varInfo[1] + self.varAttrs[(varInfo[0], obsValName)]['_FillValue'] = float_missing_value + self.varAttrs[(varInfo[0], obsErrName)]['_FillValue'] = float_missing_value + self.varAttrs[(varInfo[0], qcName)]['_FillValue'] = int_missing_value + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + self.varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + + # data is the dictionary containing IODA friendly data structure + self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + self.data[(key, metaDataName)] = np.array(obs.data[key], dtype=dtypes[dtypestr]) + + # Fill up the final array of observed values, obsErrors, and Qc + self.data[(varInfo[0], obsValName)] = np.array(obs.data['vals'], dtype=np.float32) + self.data[(varInfo[0], obsErrName)] = np.array(obs.data['errs'], dtype=np.float32) + self.data[(varInfo[0], qcName)] = np.array(obs.data['qcs'], dtype=np.int32) + + nlocs = len(obs.data['vals']) + DimDict = {'Location': nlocs} + varDims = {varInfo[0]: ['Location']} + + # Initialize the writer, then write the file. + print(f"Writing the output file: {self.filename} with {nlocs} observations in it.") + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(self.data, varDims, self.varAttrs, self.GlobalAttrs) + + return def main(): @@ -113,17 +187,8 @@ def main(): # Read in the adt adt = Observation(args.input, fdate) - # Extract Obsdata - ObsVars, nlocs = iconv.ExtractObsData(adt.data, locationKeyList) - - # Set attributes - DimDict['nlocs'] = nlocs - - # Set up the IODA writer - writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - - # write them out - writer.BuildIoda(ObsVars, VarDims, adt.VarAttrs, GlobalAttrs) + # Write out the IODA output file. + IODA(args.input, args.output, fdate, adt) if __name__ == '__main__': diff --git a/src/marine/hgodas_insitu2ioda.py b/src/marine/hgodas_insitu2ioda.py index c6cc7b4b9..759a1b2d2 100755 --- a/src/marine/hgodas_insitu2ioda.py +++ b/src/marine/hgodas_insitu2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019-2021 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -10,8 +10,9 @@ from __future__ import print_function import sys import argparse +import numpy as np import netCDF4 as nc -from datetime import datetime, timedelta +from datetime import datetime from pathlib import Path IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" @@ -25,15 +26,15 @@ vName = { # var name, units - 2210: ["sea_water_temperature", "C"], - 2220: ["sea_water_salinity", "PSU"] + 2210: ["waterTemperature", "K"], + 2220: ["salinity", "1"] } locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("depth", "float"), - ("datetime", "string") + ("depthBelowWaterSurface", "float"), + ("dateTime", "long") ] GlobalAttrs = { @@ -50,6 +51,11 @@ def __init__(self, filename, date): self.VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self.floatDefFillVal = iconv.get_default_fill_val('float32') self.intDefFillVal = iconv.get_default_fill_val('int32') + + self.VarAttrs['depthBelowWaterSurface', 'MetaData']['units'] = 'm' + self.VarAttrs['latitude', 'MetaData']['units'] = 'degrees_north' + self.VarAttrs['longitude', 'MetaData']['units'] = 'degrees_east' + self.VarAttrs['dateTime', 'MetaData']['units'] = 'seconds since 1970-01-01T00:00:00Z' self._read() def _read(self): @@ -65,9 +71,7 @@ def _read(self): qcs = ncd.variables['qc'][:] ncd.close() - base_date = datetime(1970, 1, 1) + timedelta(seconds=int(time[0])) - - self.VarAttrs['depth', 'MetaData']['units'] = 'm' + dt = np.full(len(qcs), 0, dtype=np.int64) for i in range(len(hrs)): # there shouldn't be any bad obs, but just in case remove them all if qcs[i] != 0: @@ -80,15 +84,16 @@ def _read(self): self.VarAttrs[varName, iconv.OqcName()]['_FillValue'] = self.intDefFillVal self.VarAttrs[varName, iconv.OvalName()]['units'] = varUnits self.VarAttrs[varName, iconv.OerrName()]['units'] = varUnits - self.VarAttrs[varName, iconv.OqcName()]['units'] = 'unitless' valKey = varName, iconv.OvalName() errKey = varName, iconv.OerrName() qcKey = varName, iconv.OqcName() - dt = base_date + timedelta(hours=float(hrs[i])) - locKey = lats[i], lons[i], dpth[i], dt.strftime( - "%Y-%m-%dT%H:%M:%SZ") + if obid[i] == 2210: + vals[i] = vals[i] + 273.15 + + dt[i] = time[0] + hrs[i]*3600 + locKey = lats[i], lons[i], dpth[i], dt[i] self.data[locKey][valKey] = vals[i] self.data[locKey][errKey] = errs[i] self.data[locKey][qcKey] = qcs[i] @@ -119,18 +124,17 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - 'sea_water_temperature': ['nlocs'], - 'sea_water_salinity': ['nlocs'], + 'watertemperature': ['Location'], + 'salinity': ['Location'], } # Read in the profiles prof = Profile(args.input, fdate) # write them out - GlobalAttrs['date_time_string'] = fdate.strftime("%Y-%m-%dT%H:%M:%SZ") ObsVars, nlocs = iconv.ExtractObsData(prof.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) writer.BuildIoda(ObsVars, VarDims, prof.VarAttrs, GlobalAttrs) diff --git a/src/marine/hgodas_sst2ioda.py b/src/marine/hgodas_sst2ioda.py index 8cb3041da..15b050fc4 100755 --- a/src/marine/hgodas_sst2ioda.py +++ b/src/marine/hgodas_sst2ioda.py @@ -24,13 +24,13 @@ vName = { - 'T': "sea_surface_temperature", + 'T': "seaSurfaceTemperature", } locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "string") ] GlobalAttrs = {} @@ -70,7 +70,7 @@ def _read(self): count += 1 dt = base_date + timedelta(hours=float(hrs[i])) locKey = lats[i], lons[i], dt.strftime("%Y-%m-%dT%H:%M:%SZ") - self.data[locKey][valKey] = vals[i] + self.data[locKey][valKey] = vals[i] + 273.15 self.data[locKey][errKey] = errs[i] self.data[locKey][qcKey] = qcs[i] @@ -93,25 +93,24 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - 'sea_surface_temperature': ['nlocs'], + 'seaSurfaceTemperature': ['Location'], } # Read in the profiles prof = Profile(args.input, fdate) # write them out - ObsVars, nlocs = iconv.ExtractObsData(prof.data, locationKeyList) + ObsVars, Location = iconv.ExtractObsData(prof.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': Location} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('sea_surface_temperature', 'ObsValue')]['units'] = 'Celsius' - VarAttrs[('sea_surface_temperature', 'ObsError')]['units'] = 'Celsius' - VarAttrs[('sea_surface_temperature', 'PreQC')]['units'] = 'unitless' - VarAttrs[('sea_surface_temperature', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('sea_surface_temperature', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('sea_surface_temperature', 'PreQC')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceTemperature', 'ObsValue')]['units'] = 'K' + VarAttrs[('seaSurfaceTemperature', 'ObsError')]['units'] = 'K' + VarAttrs[('seaSurfaceTemperature', 'ObsValue')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceTemperature', 'ObsError')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceTemperature', 'PreQC')]['_FillValue'] = 999 writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/ndbc_hfradar2ioda.py b/src/marine/ndbc_hfradar2ioda.py index 83539fcd3..29d753672 100644 --- a/src/marine/ndbc_hfradar2ioda.py +++ b/src/marine/ndbc_hfradar2ioda.py @@ -22,15 +22,14 @@ from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict -vName = ["sea_water_meridional_current", - "sea_water_zonal_current", - "DOPX", - "DOPY"] +vName = ["waterMeridionalVelocity", + "waterZonalVelocity", + ] locKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = { @@ -56,9 +55,8 @@ def _read(self): vals_v = ncd.variables['v'][:] dopx = ncd.variables['dopx'][:] dopy = ncd.variables['dopy'][:] - units = '1970-01-01 00:00:00' - reftime = dateutil.parser.parse(units) ncd.close() + lons, lats = np.meshgrid(lons, lats) lons = lons.flatten() lats = lats.flatten() @@ -75,8 +73,8 @@ def _read(self): qcKey = vName[j], iconv.OqcName() if vals_u[i] != '--': count += 1 - obs_date = reftime + timedelta(seconds=int(time[i])) - locKey = lats[i], lons[i], obs_date.strftime("%Y-%m-%dT%H:%M:%SZ") + obs_date = int(time[i]) + locKey = lats[i], lons[i], obs_date if j == 0: self.data[locKey][valKey] = vals_u[i] self.data[locKey][errKey] = 0.1 @@ -117,21 +115,23 @@ def main(): args = parser.parse_args() fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - 'sea_water_meridional_current': ['nlocs'], - 'sea_water_zonal_current': ['nlocs']} + 'waterMeridionalVelocity': ['Location'], + 'waterZonalVelocity': ['Location']} radar = Observation(args.input, fdate) - GlobalAttrs['date_time_string'] = fdate.strftime("%Y-%m-%dT%H:%M:%SZ") ObsVars, nlocs = iconv.ExtractObsData(radar.data, locKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('sea_water_meridional_current', 'ObsValue')]['units'] = 'm/s' - VarAttrs[('sea_water_zonal_current', 'ObsValue')]['units'] = 'm/s' - VarAttrs[('sea_water_meridional_current', 'ObsValue')]['_FillValue'] = -32767 - VarAttrs[('sea_water_zonal_current', 'ObsValue')]['_FillValue'] = -32767 - VarAttrs[('longitude', 'MetaData')]['units'] = 'degree' - VarAttrs[('latitude', 'MetaData')]['units'] = 'degree' + VarAttrs[('waterMeridionalVelocity', 'ObsValue')]['units'] = 'm s-1' + VarAttrs[('waterMeridionalVelocity', 'ObsError')]['units'] = 'm s-1' + VarAttrs[('waterZonalVelocity', 'ObsValue')]['units'] = 'm s-1' + VarAttrs[('waterZonalVelocity', 'ObsError')]['units'] = 'm s-1' + VarAttrs[('waterMeridionalVelocity', 'ObsValue')]['_FillValue'] = -32767 + VarAttrs[('waterZonalVelocity', 'ObsValue')]['_FillValue'] = -32767 + VarAttrs[('longitude', 'MetaData')]['units'] = 'degrees_east' + VarAttrs[('latitude', 'MetaData')]['units'] = 'degrees_north' + VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since 1970-01-01T00:00:00Z' writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/nsidc_l4cdr_ice2ioda.py b/src/marine/nsidc_l4cdr_ice2ioda.py index 82ac05c39..9c854fffd 100755 --- a/src/marine/nsidc_l4cdr_ice2ioda.py +++ b/src/marine/nsidc_l4cdr_ice2ioda.py @@ -8,7 +8,7 @@ # from __future__ import print_function -import sys +import os, sys import argparse import netCDF4 as nc from datetime import datetime, timedelta @@ -24,6 +24,9 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class Observation(object): @@ -41,6 +44,7 @@ def _read(self): datein = ncd.variables['time'][:] + 0.5 reftime = dateutil.parser.parse(ncd.variables['time'].units[-20:]) obs_date = reftime + timedelta(days=float(datein)) + time_offset = round((obs_date - epoch).total_seconds()) data_in = {} input_vars = ( @@ -90,7 +94,7 @@ def _read(self): qc = qc[mask_thin] for i in range(len(lons)): - locKey = lats[i], lons[i], obs_date.strftime("%Y-%m-%dT%H:%M:%SZ") + locKey = lats[i], lons[i], time_offset self.data[locKey][valKey] = vals[i] self.VarAttrs[locKey][valKey]['_FillValue'] = vals_FillValue self.VarAttrs[locKey][valKey]['units'] = vals_units @@ -100,20 +104,20 @@ def _read(self): self.VarAttrs[locKey][errKey]['_FillValue'] = errs_FillValue self.VarAttrs[locKey][errKey]['units'] = errs_units self.data[locKey][qcKey] = 1 - self.VarAttrs[locKey][qcKey]['units'] = 'unitless' - -vName = "sea_ice_area_fraction" +vName = "seaIceFraction" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = { - 'odb_version': 1, + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'odb_version': 1 } @@ -148,16 +152,17 @@ def main(): args = parser.parse_args() fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - vName: ['nlocs'], + vName: ['Location'], } # Read in ice = Observation(args.input, args.thin, fdate) # write them out ObsVars, nlocs = iconv.ExtractObsData(ice.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + ice.VarAttrs['dateTime', 'MetaData']['units'] = iso8601_string writer.BuildIoda(ObsVars, VarDims, ice.VarAttrs, GlobalAttrs) diff --git a/src/marine/ostia_l4sst2ioda.py b/src/marine/ostia_l4sst2ioda.py index 836b75856..7c278fc16 100755 --- a/src/marine/ostia_l4sst2ioda.py +++ b/src/marine/ostia_l4sst2ioda.py @@ -27,7 +27,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] obsvars = { @@ -35,15 +35,14 @@ } AttrData = { - 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), + 'converter': os.path.basename(__file__) } DimDict = { } VarDims = { - 'sst': ['nlocs'], + 'sst': ['Location'], } @@ -57,19 +56,16 @@ def __init__(self, filename): self.lons = self.lons.ravel() self.lats = self.lats.ravel() self.sst = np.squeeze(ncd.variables['analysed_sst'][:]).ravel() - self.sst = self.sst-273.15 self.err = np.squeeze(ncd.variables['analysis_error'][:]).ravel() - self.time = ncd.variables['time'][:] + this_datetime = ncd.variables['time'][:].astype(np.int) ncd.close() # Same time stamp for all obs within 1 file - self.datetime = np.empty_like(self.sst, dtype=object) - base_date = datetime(1981, 1, 1) - dt = base_date + timedelta(days=float(self.time/86400.0)) - self.datetime[:] = dt.strftime("%Y-%m-%dT%H:%M:%SZ") + self.datetime = np.full(len(self.lats), this_datetime) + self.time_units = 'seconds since 1981-01-01T00:00:00Z' # Remove observations out of sane bounds - qci = np.where(np.abs(self.sst) < 99.0) + qci = np.where(np.abs(self.sst) < 355.0) self.nlocs = len(qci[0]) self.lons = self.lons[qci].astype(np.single) self.lats = self.lats[qci].astype(np.single) @@ -82,7 +78,6 @@ class ostia_l4sst2ioda(object): def __init__(self, filename): self.filename = filename self.varDict = defaultdict(lambda: defaultdict(dict)) - self.metaDict = defaultdict(lambda: defaultdict(dict)) self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self.var_mdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self._read() @@ -90,26 +85,28 @@ def __init__(self, filename): # Open input file and read relevant info def _read(self): # set up variable names for IODA - iodavar = 'sea_surface_temperature' + iodavar = 'seaSurfaceTemperature' self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() - self.var_mdata[iodavar, iconv.OvalName()]['units'] = 'c' - self.var_mdata[iodavar, iconv.OerrName()]['units'] = 'c' + self.var_mdata[iodavar, iconv.OvalName()]['units'] = 'K' + self.var_mdata[iodavar, iconv.OerrName()]['units'] = 'K' # read input filename sst = ostia(self.filename) # map ostia to ioda data structure - self.outdata[('datetime', 'MetaData')] = sst.datetime + self.outdata[('dateTime', 'MetaData')] = sst.datetime + self.var_mdata[('dateTime', 'MetaData')]['units'] = sst.time_units self.outdata[('latitude', 'MetaData')] = sst.lats + self.var_mdata[('latitude', 'MetaData')]['units'] = 'degrees_north' self.outdata[('longitude', 'MetaData')] = sst.lons + self.var_mdata[('longitude', 'MetaData')]['units'] = 'degrees_east' self.outdata[self.varDict[iodavar]['valKey']] = sst.sst self.outdata[self.varDict[iodavar]['errKey']] = sst.err self.outdata[self.varDict[iodavar]['qcKey']] = np.zeros(sst.nlocs, dtype=np.int32) - DimDict['nlocs'] = sst.nlocs - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = sst.nlocs def main(): diff --git a/src/marine/pace_oc2ioda.py b/src/marine/pace_oc2ioda.py index 6262c5f63..b0ecd2f66 100755 --- a/src/marine/pace_oc2ioda.py +++ b/src/marine/pace_oc2ioda.py @@ -26,8 +26,9 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -output_var_names = [ - "mass_concentration_of_chlorophyll_in_sea_water"] +os.environ["TZ"] = "UTC" + +output_var_names = ["chlorophyllMassConcentration"] DimDict = {} @@ -38,7 +39,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] @@ -62,9 +63,13 @@ def read_input(input_args): print("Reading ", input_file) ncd = nc.Dataset(input_file, 'r') - # get global attributes - for v in ('platform', 'instrument', 'processing_level'): - GlobalAttrs[v] = ncd.getncattr(v) + # get global attributes (dictionary key is incoming name, being renamed). + global_attribs = {'platform': 'platformCommonName', + 'instrument': 'sensor', + 'processing_level': 'processingLevel'} + for v in global_attribs.keys(): + new_name = global_attribs[v] + GlobalAttrs[new_name] = ncd.getncattr(v) # get QC flags, and calculate a mask from the non-missing values # since L2 OC files are quite empty, need a mask applied immediately @@ -115,10 +120,9 @@ def read_input(input_args): data_in[v] = data_in[v][mask] # create a string version of the date for each observation - dates = [] + dates = np.empty(len(lons), dtype=np.int64) for i in range(len(lons)): - obs_date = basetime + timedelta(seconds=float(data_in['time'][i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates[i] = round(data_in['time'][i]) # allocate space for output depending on which variables are to be saved obs_dim = (len(lons)) @@ -129,8 +133,7 @@ def read_input(input_args): obs_data[(output_var_names[0], global_config['opqc_name'])] = np.zeros(obs_dim) # Add the metadata - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = dates obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons @@ -139,7 +142,7 @@ def read_input(input_args): obs_data[output_var_names[0], global_config['oerr_name']] = data_in['chlor_a']*0.0 obs_data[output_var_names[0], global_config['opqc_name']] = data_in['l2_flags'] - return (obs_data, GlobalAttrs) + return (obs_data, basetime, GlobalAttrs) def main(): @@ -202,7 +205,7 @@ def main(): obs = pool.map(read_input, pool_inputs) # concatenate the data from the files - obs_data, GlobalAttrs = obs[0] + obs_data, basetime, GlobalAttrs = obs[0] for i in range(1, len(obs)): obs_data.update(obs[i][0]) # Get the nlocs @@ -210,19 +213,18 @@ def main(): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin GlobalAttrs['converter'] = os.path.basename(__file__) - DimDict['nlocs'] = nlocs - GlobalAttrs['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = nlocs - VarAttrs[output_var_names[0], global_config['oval_name']]['units'] = 'mg ^m-3' - VarAttrs[output_var_names[0], global_config['oerr_name']]['units'] = 'mg ^m-3' - VarAttrs[output_var_names[0], global_config['opqc_name']]['units'] = 'unitless' + VarAttrs['dateTime', 'MetaData']['units'] = 'seconds since ' + basetime.strftime("%Y-%m-%dT%H:%M:%SZ") + VarAttrs[output_var_names[0], global_config['oval_name']]['units'] = 'mg m-3' + VarAttrs[output_var_names[0], global_config['oerr_name']]['units'] = 'mg m-3' VarAttrs[output_var_names[0], global_config['oval_name']]['_FillValue'] = -32767. VarAttrs[output_var_names[0], global_config['oerr_name']]['_FillValue'] = -32767. VarAttrs[output_var_names[0], global_config['opqc_name']]['_FillValue'] = -32767 - VarDims["mass_concentration_of_chlorophyll_in_sea_water"] = ['nlocs'] + VarDims["chlorophyllMassConcentration"] = ['Location'] # setup the IODA writer writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) diff --git a/src/marine/pace_radiance2ioda.py b/src/marine/pace_radiance2ioda.py index 0d81cb2f1..bee0910a0 100755 --- a/src/marine/pace_radiance2ioda.py +++ b/src/marine/pace_radiance2ioda.py @@ -25,16 +25,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" +# The setting of basetime is temporary as it will get modified in the data reading routine. +basetime = datetime.fromisoformat('1970-01-01T00:00:00') + output_var_names = ["radiance"] locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = { 'odb_version': 1, + 'converter': os.path.basename(__file__) } VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) @@ -43,7 +48,7 @@ VarDims = {} -chan_number = range(1, 250) # we havew 120 Blue band 120 Red band and 9 SWRI +chan_number = range(1, 250) # we have 120 Blue band 120 Red band and 9 SWRI def read_input(input_args): @@ -66,6 +71,7 @@ def read_input(input_args): ncd = nc.Dataset(input_file, 'r') # get the base time (should only have 1 or 2 time slots) time_base = ncd.groups['scan_line_attributes'].variables['time'][:] + global basetime # We wish to overrride the initial setting of basetime. basetime = dateutil.parser.parse(ncd.groups['scan_line_attributes'].variables['time'].units[-20:]) # Determine the lat/lon grid. @@ -123,10 +129,9 @@ def read_input(input_args): lats = lats[mask] # create a string version of the date for each observation - dates = [] + dates = np.empty(len(lons), dtype=np.int64) for i in range(len(lons)): - obs_date = basetime + timedelta(seconds=float(time[i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates[i] = round(time[i]) # output values nchans = len(chan_number) @@ -147,18 +152,16 @@ def read_input(input_args): # allocate space for output depending on which variables are to be saved obs_data = {} - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = dates obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons - obs_data[('time', 'MetaData')] = time.astype('float32') - obs_data[('height_above_mean_sea_level', 'MetaData')] = np.zeros((obs_dim), dtype=np.float32) - obs_data[('sensor_azimuth_angle', 'MetaData')] = data_in['sensor_azimuth'] - obs_data[('sensor_zenith_angle', 'MetaData')] = data_in['sensor_zenith'] - obs_data[('sensor_view_angle', 'MetaData')] = data_in['sensor_zenith'] - obs_data[('solar_zenith_angle', 'MetaData')] = data_in['solar_zenith'] - obs_data[('solar_azimuth_angle', 'MetaData')] = data_in['solar_azimuth'] - obs_data[('sensor_band_central_radiation_wavenumber', 'VarMetaData')] = wavelength.astype('float32') + obs_data[('height', 'MetaData')] = np.zeros((obs_dim), dtype=np.float32) + obs_data[('sensorAzimuthAngle', 'MetaData')] = data_in['sensor_azimuth'].astype('float32') + obs_data[('sensorZenithAngle', 'MetaData')] = data_in['sensor_zenith'].astype('float32') + obs_data[('sensorViewAngle', 'MetaData')] = data_in['sensor_zenith'].astype('float32') + obs_data[('solarZenithAngle', 'MetaData')] = data_in['solar_zenith'].astype('float32') + obs_data[('solarAzimuthAngle', 'MetaData')] = data_in['solar_azimuth'].astype('float32') + obs_data[('sensorCentralWavenumber', 'MetaData')] = wavelength.astype('float32') obs_data[output_var_names[0], global_config['oval_name']] = val_radiance.astype('float32') obs_data[output_var_names[0], global_config['oerr_name']] = err.astype('float32') obs_data[output_var_names[0], global_config['opqc_name']] = qc.astype('int32') @@ -221,9 +224,8 @@ def main(): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin - GlobalAttrs['converter'] = os.path.basename(__file__) # determine which variables we are going to output selected_names = [] @@ -232,28 +234,32 @@ def main(): # pass parameters to the IODA writer # (needed because we are bypassing ExtractObsData within BuildNetcdf) VarDims = { - 'radiance': ['nlocs', 'nchans'], - 'sensor_band_central_radiation_wavenumber': ['nchans'] + 'radiance': ['Location', 'Channel'], + 'sensorCentralWavenumber': ['Channel'] } nchans = len(chan_number) nlocs = len(obs_data[('longitude', 'MetaData')]) - ndatetime = np.zeros((20), dtype=np.float32) DimDict = { - 'nlocs': nlocs, - 'nchans': list(chan_number), - 'nvars': list(chan_number), - 'ndatetime': list(ndatetime) + 'Location': nlocs, + 'Channel': list(chan_number), } writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - VarAttrs[('radiance', 'ObsValue')]['units'] = 'W m^-2 um^-1 sr^-1' - VarAttrs[('radiance', 'ObsError')]['units'] = 'W m^-2 um^-1 sr^-1' - VarAttrs[('radiance', 'PreQC')]['units'] = 'unitless' + VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since ' + basetime.strftime("%Y-%m-%dT%H:%M:%SZ") + VarAttrs[('radiance', 'ObsValue')]['units'] = 'W m-2 sr-1' + VarAttrs[('radiance', 'ObsError')]['units'] = 'W m-2 sr-1' VarAttrs[('radiance', 'ObsValue')]['_FillValue'] = -32767 VarAttrs[('radiance', 'ObsError')]['_FillValue'] = 999 VarAttrs[('radiance', 'PreQC')]['_FillValue'] = 999 + for k in list(obs_data.keys()): + if 'angle' in k[0].lower(): + VarAttrs[(k[0], k[1])]['units'] = 'degree' + elif 'wavenumber' in k[0].lower(): + VarAttrs[(k[0], k[1])]['units'] = 'm-1' + elif k[0] == 'height': + VarAttrs[(k[0], k[1])]['units'] = 'm' writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/rads_adt2ioda.py b/src/marine/rads_adt2ioda.py index 11bd5d854..05e2e22ff 100755 --- a/src/marine/rads_adt2ioda.py +++ b/src/marine/rads_adt2ioda.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -# (C) Copyright 2019 UCAR +# (C) Copyright 2019-2022 UCAR # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. @@ -9,8 +9,10 @@ from __future__ import print_function import sys +import os import argparse import netCDF4 as nc +import numpy as np from datetime import datetime, timedelta import dateutil.parser from pathlib import Path @@ -23,84 +25,150 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -vName = "absolute_dynamic_topography" +os.environ["TZ"] = "UTC" + +varInfo = ['absoluteDynamicTopography', 'm'] locationKeyList = [ - ("latitude", "float"), - ("longitude", "float"), - ("datetime", "string") + ("latitude", "float", "degrees_north"), + ("longitude", "float", "degrees_east"), + ("dateTime", "long", "seconds since 1970-01-01T00:00:00Z") ] - -GlobalAttrs = { -} - -VarDims = { - vName: ['nlocs'], -} - -DimDict = { -} +meta_keys = [m_item[0] for m_item in locationKeyList] + +iso8601_string = locationKeyList[meta_keys.index('dateTime')][2] +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + +metaDataName = iconv.MetaDataName() +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + +float_missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +double_missing_value = nc.default_fillvals['f8'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +missing_vals = {'string': string_missing_value, + 'integer': int_missing_value, + 'long': long_missing_value, + 'float': float_missing_value, + 'double': double_missing_value} +dtypes = {'string': object, + 'integer': np.int32, + 'long': np.int64, + 'float': np.float32, + 'double': np.float64} class Observation(object): def __init__(self, filename, date): - self.filename = filename self.date = date - self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - self.VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self._read() def _read(self): - ncd = nc.MFDataset(self.filename) + data = {} + data['dateTime'] = [] + data['vals'] = [] + data['errs'] = [] + data['qcs'] = [] + + ncd = nc.Dataset(self.filename) time = ncd.variables['time_mjd'][:] - lons = ncd.variables['lon'][:] - lats = ncd.variables['lat'][:] - vals = ncd.variables['adt_egm2008'][:] - val_units = ncd.variables['adt_egm2008'].units - Fill_val = ncd.variables['adt_egm2008']._FillValue - scale_factor = ncd.variables['adt_egm2008'].scale_factor + data['longitude'] = ncd.variables['lon'][:] + data['latitude'] = ncd.variables['lat'][:] + vals = ncd.variables['adt_xgm2016'][:] + data['val_units'] = ncd.variables['adt_xgm2016'].units + data['Fill_val'] = ncd.variables['adt_xgm2016']._FillValue + scale_factor = ncd.variables['adt_xgm2016'].scale_factor units = ncd.variables['time_mjd'].units[-23:-4] reftime = dateutil.parser.parse(units) ncd.close() - valKey = vName, iconv.OvalName() - errKey = vName, iconv.OerrName() - qcKey = vName, iconv.OqcName() - self.VarAttrs[vName, iconv.OvalName()]['_FillValue'] = Fill_val - self.VarAttrs[vName, iconv.OerrName()]['_FillValue'] = Fill_val - self.VarAttrs[vName, iconv.OqcName()]['_FillValue'] = Fill_val - self.VarAttrs[vName, iconv.OvalName()]['units'] = val_units - self.VarAttrs[vName, iconv.OerrName()]['units'] = val_units - self.VarAttrs[vName, iconv.OqcName()]['units'] = 'unitless' + for i in range(len(time)): + dt = reftime + timedelta(days=time[i]) + time_offset = np.int64(round((dt - epoch).total_seconds())) + data['dateTime'].append(time_offset) + data['vals'].append(vals[i]) + data['errs'].append(0.1) + data['qcs'].append(0) - for i in range(len(lons)): + self.data = data - obs_date = reftime + timedelta(days=time[i]) - locKey = lats[i], lons[i], obs_date.strftime("%Y-%m-%dT%H:%M:%SZ") - self.data[locKey][valKey] = vals[i] - self.data[locKey][errKey] = 0.1 - self.data[locKey][qcKey] = 0 +class IODA(object): + def __init__(self, file_input, filename, date, obs): -def main(): + self.file_input = file_input + self.filename = filename + self.date = date + + self.GlobalAttrs = { + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'sourceFiles': self.file_input, + 'datetimeReference': self.date.strftime('%Y-%m-%dT%H:%M:%S%z'), + 'description': "Absolute Dynamic Topography (ADT) observations from NESDIS" + } + + self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + + # Set units and FillValue attributes for groups associated with observed variable. + self.varAttrs[(varInfo[0], obsValName)]['units'] = obs.data['val_units'] + self.varAttrs[(varInfo[0], obsErrName)]['units'] = obs.data['val_units'] + self.varAttrs[(varInfo[0], obsValName)]['_FillValue'] = obs.data['Fill_val'] + self.varAttrs[(varInfo[0], obsErrName)]['_FillValue'] = obs.data['Fill_val'] + self.varAttrs[(varInfo[0], qcName)]['_FillValue'] = int_missing_value + + # Set units of the MetaData variables and all _FillValues. + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + if locationKeyList[meta_keys.index(key)][2]: + self.varAttrs[(key, metaDataName)]['units'] = locationKeyList[meta_keys.index(key)][2] + self.varAttrs[(key, metaDataName)]['_FillValue'] = missing_vals[dtypestr] + + # data is the dictionary containing IODA friendly data structure + self.data = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - # Get command line arguments + for key in meta_keys: + dtypestr = locationKeyList[meta_keys.index(key)][1] + self.data[(key, metaDataName)] = np.array(obs.data[key], dtype=dtypes[dtypestr]) + + # Fill up the final array of observed values, obsErrors, and Qc + self.data[(varInfo[0], obsValName)] = np.array(obs.data['vals'], dtype=np.float32) + self.data[(varInfo[0], obsErrName)] = np.array(obs.data['errs'], dtype=np.float32) + self.data[(varInfo[0], qcName)] = np.array(obs.data['qcs'], dtype=np.int32) + + nlocs = len(obs.data['vals']) + DimDict = {'Location': nlocs} + varDims = {varInfo[0]: ['Location']} + + # Initialize the writer, then write the file. + print(f"Writing the output file: {self.filename} with {nlocs} observations in it.") + self.writer = iconv.IodaWriter(self.filename, locationKeyList, DimDict) + self.writer.BuildIoda(self.data, varDims, self.varAttrs, self.GlobalAttrs) + + return + + +def main(): parser = argparse.ArgumentParser( description=( - 'Reads absolute dynamic topography (ADT) observations' - ' from NESDIS file(s) and converts into IODA formatted' - ' output files') + 'Read absolute dynamic topography (ADT) observations' + ' file(s) that have already been QCd and thinned for use in' + ' Hybrid-GODAS system.') ) required = parser.add_argument_group(title='required arguments') required.add_argument( '-i', '--input', - help="RADS observation input file(s)", - type=str, nargs='+', required=True) + help="name of RADS observation input file(s)", + type=str, required=True) required.add_argument( '-o', '--output', help="path of ioda output file", @@ -109,24 +177,14 @@ def main(): '-d', '--date', help="base date for the center of the window", metavar="YYYYMMDDHH", type=str, required=True) - args = parser.parse_args() fdate = datetime.strptime(args.date, '%Y%m%d%H') - # Read in the altimeter - altim = Observation(args.input, fdate) - - # Extract Obsdata - ObsVars, nlocs = iconv.ExtractObsData(altim.data, locationKeyList) - - # Set attributes - DimDict['nlocs'] = nlocs - - # Set up the IODA writer - writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + # Read in the adt (altimeter) data + adt = Observation(args.input, fdate) - # Write the obs out - writer.BuildIoda(ObsVars, VarDims, altim.VarAttrs, GlobalAttrs) + # Write out the IODA output file. + IODA(args.input, args.output, fdate, adt) if __name__ == '__main__': diff --git a/src/marine/smap_sss2ioda.py b/src/marine/smap_sss2ioda.py index bac861662..ef8524f7b 100755 --- a/src/marine/smap_sss2ioda.py +++ b/src/marine/smap_sss2ioda.py @@ -8,6 +8,7 @@ # import sys +import os import argparse import numpy as np from datetime import datetime, timedelta @@ -24,17 +25,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" -vName = "sea_surface_salinity" +vName = "seaSurfaceSalinity" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class Salinity(object): def __init__(self, filenames, date): @@ -95,7 +100,7 @@ def _read(self): data = {} for v in source_var_name: if v == 'sss_qc': - data[v] = ncd.variables[source_var_name[v]][:].flatten().astype(int) + data[v] = ncd.variables[source_var_name[v]][:].flatten().astype(np.int32) else: data[v] = ncd.variables[source_var_name[v]][:].flatten() @@ -114,8 +119,8 @@ def _read(self): # for each observation for i in range(len(data['time'])): obs_date = basetime + timedelta(seconds=float(data['time'][i])) - locKey = data['lat'][i], data['lon'][i], obs_date.strftime( - "%Y-%m-%dT%H:%M:%SZ") + time_offset = round((obs_date - epoch).total_seconds()) + locKey = data['lat'][i], data['lon'][i], time_offset self.data[locKey][valKey] = data['sss'][i] # if source == 'JPL': #RTOFS-DA # if data['sss_qc'][i] <= 4: @@ -134,7 +139,7 @@ def main(): parser = argparse.ArgumentParser( description=( - 'Read JPL/RSS SMAP sea surface salinity (SSS) file(s) and convert' + 'Read JPL/RSS SMAP seaSurfaceSalinity (SSS) file(s) and convert' ' to a concatenated IODA formatted output file.') ) required = parser.add_argument_group(title='required arguments') @@ -154,25 +159,25 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - 'sea_surface_salinity': ['nlocs'], + 'seaSurfaceSalinity': ['Location'], } - # Read in the salinity + # Read in the seaSurfaceSalinity sal = Salinity(args.input, fdate) # write them out - ObsVars, nlocs = iconv.ExtractObsData(sal.data, locationKeyList) + ObsVars, Location = iconv.ExtractObsData(sal.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': Location} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('sea_surface_salinity', 'ObsValue')]['units'] = 'PSU' - VarAttrs[('sea_surface_salinity', 'ObsError')]['units'] = 'PSU' - VarAttrs[('sea_surface_salinity', 'PreQC')]['units'] = 'unitless' - VarAttrs[('sea_surface_salinity', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('sea_surface_salinity', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('sea_surface_salinity', 'PreQC')]['_FillValue'] = 999 + VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string + VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['units'] = 'PSU' + VarAttrs[('seaSurfaceSalinity', 'ObsError')]['units'] = 'PSU' + VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceSalinity', 'ObsError')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceSalinity', 'PreQC')]['_FillValue'] = 999 writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/smos_sss2ioda.py b/src/marine/smos_sss2ioda.py index 38ac34430..781d00494 100755 --- a/src/marine/smos_sss2ioda.py +++ b/src/marine/smos_sss2ioda.py @@ -8,6 +8,7 @@ # import sys +import os import argparse import numpy as np from datetime import datetime, timedelta @@ -22,17 +23,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" -vName = "sea_surface_salinity" +vName = "seaSurfaceSalinity" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class Salinity(object): def __init__(self, filenames, date): @@ -63,7 +68,7 @@ def _read(self): sss = ncd.variables['SSS_corr'][:] sss_err = ncd.variables['Sigma_SSS_corr'][:] sss_qc = ncd.variables['Dg_quality_SSS_corr'][:] - sss_qc = sss_qc.astype(int) + sss_qc = sss_qc.astype(np.int32) mask = np.logical_not(sss.mask) lon = lon[mask] @@ -84,11 +89,9 @@ def _read(self): MM1 = f[n+19+11:n+19+13] SS1 = f[n+19+13:n+19+15] # - seconds = (datetime.strptime(date1+HH1+MM1+SS1, '%Y%m%d%H%M%S') - datetime.strptime( - date1, '%Y%m%d')).total_seconds() - basetime = datetime.strptime(date1, '%Y%m%d') - obs_date = basetime + timedelta(seconds=int(seconds)) - locKey = lat[i], lon[i], obs_date.strftime("%Y-%m-%dT%H:%M:%SZ") + this_dt = datetime.strptime(date1+HH1+MM1+SS1, '%Y%m%d%H%M%S') + time_offset = round((this_dt - epoch).total_seconds()) + locKey = lat[i], lon[i], time_offset self.data[locKey][valKey] = sss[i] self.data[locKey][errKey] = sss_err[i] self.data[locKey][qcKey] = sss_qc[i] @@ -99,7 +102,7 @@ def main(): parser = argparse.ArgumentParser( description=( - 'Read JPL/RSS SMOS sea surface salinity (SSS) file(s) and convert' + 'Read JPL/RSS SMOS seaSurfaceSalinity (SSS) file(s) and convert' ' to a concatenated IODA formatted output file.') ) required = parser.add_argument_group(title='required arguments') @@ -119,25 +122,25 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') # VarDims = { - 'sea_surface_salinity': ['nlocs'], + 'seaSurfaceSalinity': ['Location'], } - # Read in the salinity + # Read in the seaSurfaceSalinity sal = Salinity(args.input, fdate) # write them out - ObsVars, nlocs = iconv.ExtractObsData(sal.data, locationKeyList) + ObsVars, Location = iconv.ExtractObsData(sal.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': Location} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('sea_surface_salinity', 'ObsValue')]['units'] = 'PSU' - VarAttrs[('sea_surface_salinity', 'ObsError')]['units'] = 'PSU' - VarAttrs[('sea_surface_salinity', 'PreQC')]['units'] = 'unitless' - VarAttrs[('sea_surface_salinity', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('sea_surface_salinity', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('sea_surface_salinity', 'PreQC')]['_FillValue'] = 999 + VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string + VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['units'] = 'PSU' + VarAttrs[('seaSurfaceSalinity', 'ObsError')]['units'] = 'PSU' + VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceSalinity', 'ObsError')]['_FillValue'] = 999 + VarAttrs[('seaSurfaceSalinity', 'PreQC')]['_FillValue'] = 999 writer.BuildIoda(ObsVars, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/swot_l2adt2ioda.py b/src/marine/swot_l2adt2ioda.py index 265aba6e5..1617f070e 100755 --- a/src/marine/swot_l2adt2ioda.py +++ b/src/marine/swot_l2adt2ioda.py @@ -28,7 +28,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] obsvars = { @@ -36,15 +36,14 @@ } AttrData = { - 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), + 'converter': os.path.basename(__file__) } DimDict = { } VarDims = { - 'adt': ['nlocs'], + 'adt': ['Location'], } @@ -52,7 +51,6 @@ class swot_l2adt2ioda(object): def __init__(self, filename): self.filename = filename self.varDict = defaultdict(lambda: defaultdict(dict)) - self.metaDict = defaultdict(lambda: defaultdict(dict)) self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self.var_mdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self._read() @@ -73,20 +71,25 @@ def _read(self): err_units = ncd.variables['ssh_karin'].units err_scale_factor = ncd.variables['ssh_karin'].scale_factor self.qcflag = ncd.variables['ssha_karin_qual'][:].ravel() - # get the time data, convert to timestamps + # get the time data, chop off milliseconds, set the units time_var = ncd.variables['time'] + time_units = time_var.units[:-2] + "Z" + s = list(time_units) + s[24] = "T" + time_units = "".join(s) num_pixels = ncd.dimensions['num_pixels'].size - self.time = nc.num2date(np.repeat(time_var[:], num_pixels), - time_var.units) # only_use_cftime_datetimes=False) - for t in range(len(self.time)): - self.time[t] = self.time[t].strftime("%Y-%m-%dT%H:%M:%SZ") + self.time = np.zeros(len(time_var)*num_pixels, dtype=np.int64) + for t in range(len(time_var)): + for n in range(num_pixels): + self.time[n + t*num_pixels] = np.round(time_var[t]) + ncd.close() # estimate adt from SSH and Geoid height adt = np.where(self.ssha == Fillvalue, Fillvalue, self.ssha + self.mssh - self.geoid) # set up variable names for IODA - iodavar = 'absolute_dynamic_topography' + iodavar = 'absoluteDynamicTopography' self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -94,20 +97,22 @@ def _read(self): self.var_mdata[iodavar, iconv.OerrName()]['units'] = err_units self.var_mdata[iodavar, iconv.OvalName()]['_FillValue'] = Fillvalue self.var_mdata[iodavar, iconv.OerrName()]['_FillValue'] = err_Fillvalue - self.var_mdata[iodavar, iconv.OvalName()]['scale_factor'] = scale_factor - self.var_mdata[iodavar, iconv.OerrName()]['scale_factor'] = err_scale_factor + # self.var_mdata[iodavar, iconv.OvalName()]['scaleFactor'] = scale_factor + # self.var_mdata[iodavar, iconv.OerrName()]['scaleFactor'] = err_scale_factor # map swot adt to ioda data structure - self.outdata[('datetime', 'MetaData')] = self.time - self.outdata[('latitude', 'MetaData')] = self.lats - self.outdata[('longitude', 'MetaData')] = self.lons + self.outdata[('dateTime', 'MetaData')] = self.time + self.var_mdata[('dateTime', 'MetaData')]['units'] = time_units + self.outdata[('latitude', 'MetaData')] = self.lats.astype('float32') + self.var_mdata[('latitude', 'MetaData')]['units'] = "degrees_north" + self.outdata[('longitude', 'MetaData')] = self.lons.astype('float32') + self.var_mdata[('longitude', 'MetaData')]['units'] = "degrees_east" self.outdata[self.varDict[iodavar]['valKey']] = adt # The current uncertainity values seem to be wrong so setting error to 1 self.outdata[self.varDict[iodavar]['errKey']] = np.ones(np.shape(self.err)) self.outdata[self.varDict[iodavar]['qcKey']] = self.qcflag.astype('int32') - DimDict['nlocs'] = len(adt) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(adt) def main(): diff --git a/src/marine/viirs_modis_l2_oc2ioda.py b/src/marine/viirs_modis_l2_oc2ioda.py index 419ddfe2f..5343e303f 100755 --- a/src/marine/viirs_modis_l2_oc2ioda.py +++ b/src/marine/viirs_modis_l2_oc2ioda.py @@ -27,8 +27,8 @@ from orddicts import DefaultOrderedDict output_var_names = [ - "ocean_mass_content_of_particulate_organic_matter_expressed_as_carbon", - "mass_concentration_of_chlorophyll_in_sea_water"] + "oceanMassParticulateAsCarbon", + "chlorophyllMassConcentration"] DimDict = {} @@ -39,9 +39,13 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] +obsValName = iconv.OvalName() +obsErrName = iconv.OerrName() +qcName = iconv.OqcName() + def read_input(input_args): """ @@ -64,8 +68,9 @@ def read_input(input_args): ncd = nc.Dataset(input_file, 'r') # get global attributes - for v in ('platform', 'instrument', 'processing_level'): - GlobalAttrs[v] = ncd.getncattr(v) + GlobalAttrs['platformCommonName'] = ncd.getncattr('platform') + GlobalAttrs['sensor'] = ncd.getncattr('instrument') + GlobalAttrs['processingLevel'] = str(ncd.getncattr('processing_level')+' processing') # get QC flags, and calculate a mask from the non-missing values # since L2 OC files are quite empty, need a mask applied immediately @@ -91,6 +96,7 @@ def read_input(input_args): time = (np.repeat(sla.variables['msec'][:].ravel(), pixels_per_line).ravel() - sla.variables['msec'][0])/1000.0 data_in['time'] = time[mask] + time_units = basetime.strftime("%Y-%m-%dT%H:%M:%SZ") # load in all the other data and apply the missing value mask input_vars = ('poc', 'chlor_a') @@ -115,49 +121,36 @@ def read_input(input_args): # create a string version of the date for each observation dates = [] for i in range(len(lons)): - obs_date = basetime + timedelta(seconds=float(data_in['time'][i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates.append(np.int64(data_in['time'][i])) # allocate space for output depending on which variables are to be saved obs_dim = (len(lons)) obs_data = {} if global_config['output_poc']: - obs_data[(output_var_names[0], global_config['oval_name'])] = \ - np.zeros(obs_dim) - obs_data[(output_var_names[0], global_config['oerr_name'])] = \ - np.zeros(obs_dim) - obs_data[(output_var_names[0], global_config['opqc_name'])] = \ - np.zeros(obs_dim) + obs_data[(output_var_names[0], obsValName)] = np.zeros(obs_dim) + obs_data[(output_var_names[0], obsErrName)] = np.zeros(obs_dim) + obs_data[(output_var_names[0], qcName)] = np.zeros(obs_dim) if global_config['output_chl']: - obs_data[(output_var_names[1], global_config['oval_name'])] = \ - np.zeros(obs_dim) - obs_data[(output_var_names[1], global_config['oerr_name'])] = \ - np.zeros(obs_dim) - obs_data[(output_var_names[1], global_config['opqc_name'])] = \ - np.zeros(obs_dim) + obs_data[(output_var_names[1], obsValName)] = np.zeros(obs_dim) + obs_data[(output_var_names[1], obsErrName)] = np.zeros(obs_dim) + obs_data[(output_var_names[1], qcName)] = np.zeros(obs_dim) # Add the metadata - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = np.empty(len(dates), dtype=np.int64) + obs_data[('dateTime', 'MetaData')][:] = dates obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons if global_config['output_poc']: - obs_data[output_var_names[0], global_config['oval_name']] = \ - data_in['poc'] - obs_data[output_var_names[0], global_config['oerr_name']] = \ - data_in['poc']*0.0 - obs_data[output_var_names[0], global_config['opqc_name']] = \ - data_in['l2_flags'] + obs_data[output_var_names[0], obsValName] = data_in['poc'] + obs_data[output_var_names[0], obsErrName] = data_in['poc']*0.0 + obs_data[output_var_names[0], qcName] = data_in['l2_flags'] if global_config['output_chl']: - obs_data[output_var_names[1], global_config['oval_name']] = \ - data_in['chlor_a'] - obs_data[output_var_names[1], global_config['oerr_name']] = \ - data_in['chlor_a']*0.0 - obs_data[output_var_names[1], global_config['opqc_name']] = \ - data_in['l2_flags'] + obs_data[output_var_names[1], obsValName] = data_in['chlor_a'] + obs_data[output_var_names[1], obsErrName] = data_in['chlor_a']*0.0 + obs_data[output_var_names[1], qcName] = data_in['l2_flags'] - return (obs_data, GlobalAttrs) + return (obs_data, GlobalAttrs, time_units) def main(): @@ -218,9 +211,6 @@ def main(): global_config = {} global_config['date'] = args.date global_config['thin'] = args.thin - global_config['oval_name'] = iconv.OvalName() - global_config['oerr_name'] = iconv.OerrName() - global_config['opqc_name'] = iconv.OqcName() global_config['output_poc'] = args.poc global_config['output_chl'] = args.chl @@ -235,50 +225,39 @@ def main(): obs = pool.map(read_input, pool_inputs) # concatenate the data from the files - obs_data, GlobalAttrs = obs[0] + obs_data, GlobalAttrs, time_units = obs[0] for i in range(1, len(obs)): obs_data.update(obs[i][0]) - # Get the nlocs - nlocs = len(obs_data[('longitude', 'MetaData')]) + # Get the Location + Location = len(obs_data[('longitude', 'MetaData')]) # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin GlobalAttrs['converter'] = os.path.basename(__file__) - DimDict['nlocs'] = nlocs - GlobalAttrs['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = Location + + VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since ' + time_units + VarAttrs[('latitude', 'MetaData')]['units'] = 'degrees_north' + VarAttrs[('longitude', 'MetaData')]['units'] = 'degrees_east' # determine which variables we are going to output if args.poc: - VarAttrs[output_var_names[0], global_config['oval_name']]['units'] = \ - 'mg ^m-3' - VarAttrs[output_var_names[0], global_config['oerr_name']]['units'] = \ - 'mg ^m-3' - VarAttrs[output_var_names[0], global_config['opqc_name']]['units'] = \ - 'unitless' - VarAttrs[output_var_names[0], global_config['oval_name']]['_FillValue'] = \ - -32767. - VarAttrs[output_var_names[0], global_config['oerr_name']]['_FillValue'] = \ - -32767. - VarAttrs[output_var_names[0], global_config['opqc_name']]['_FillValue'] = \ - -32767 - VarDims["ocean_mass_content_of_particulate_organic_matter_expressed_as_carbon"] = ['nlocs'] + VarAttrs[output_var_names[0], obsValName]['units'] = 'mg m-3' + VarAttrs[output_var_names[0], obsErrName]['units'] = 'mg m-3' + VarAttrs[output_var_names[0], obsValName]['_FillValue'] = -32767. + VarAttrs[output_var_names[0], obsErrName]['_FillValue'] = -32767. + VarAttrs[output_var_names[0], qcName]['_FillValue'] = -32767 + VarDims["oceanMassParticulateAsCarbon"] = ['Location'] if args.chl: - VarAttrs[output_var_names[1], global_config['oval_name']]['units'] = \ - 'mg ^m-3' - VarAttrs[output_var_names[1], global_config['oerr_name']]['units'] = \ - 'mg ^m-3' - VarAttrs[output_var_names[1], global_config['opqc_name']]['units'] = \ - 'unitless' - VarAttrs[output_var_names[1], global_config['oval_name']]['_FillValue'] = \ - -32767. - VarAttrs[output_var_names[1], global_config['oerr_name']]['_FillValue'] = \ - -32767. - VarAttrs[output_var_names[1], global_config['opqc_name']]['_FillValue'] = \ - -32767 - VarDims["mass_concentration_of_chlorophyll_in_sea_water"] = ['nlocs'] + VarAttrs[output_var_names[1], obsValName]['units'] = 'mg m-3' + VarAttrs[output_var_names[1], obsErrName]['units'] = 'mg m-3' + VarAttrs[output_var_names[1], obsValName]['_FillValue'] = -32767. + VarAttrs[output_var_names[1], obsErrName]['_FillValue'] = -32767. + VarAttrs[output_var_names[1], qcName]['_FillValue'] = -32767 + VarDims["chlorophyllMassConcentration"] = ['Location'] # setup the IODA writer writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) diff --git a/src/marine/viirs_modis_l3_oc2ioda.py b/src/marine/viirs_modis_l3_oc2ioda.py index 38ae8b7e3..84a45c10a 100755 --- a/src/marine/viirs_modis_l3_oc2ioda.py +++ b/src/marine/viirs_modis_l3_oc2ioda.py @@ -26,11 +26,11 @@ vName = { - 'chlor_a': "mass_concentration_of_chlorophyll_in_sea_water", + 'chlor_a': "chlorophyllMassConcentration", } VarDims = { - vName['chlor_a']: ['nlocs'] + vName['chlor_a']: ['Location'] } DimDict = {} @@ -38,11 +38,15 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +# Prepare dateTime info +iso8601_string = '1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[:-1]) + class OCL3(object): @@ -66,12 +70,18 @@ def _read(self): lons = lons.ravel()[mask] lats = lats.ravel()[mask] - # get global attributes - for v in ('platform', 'instrument', 'processing_version', - 'time_coverage_start'): - GlobalAttrs[v] = ncd.getncattr(v) + GlobalAttrs['platform'] = ncd.getncattr('platform') + GlobalAttrs['sensor'] = ncd.getncattr('instrument') + GlobalAttrs['description'] = str(ncd.getncattr('processing_level')+' processing') + + timevar = ncd.getncattr('time_coverage_start') + this_time = datetime.fromisoformat(timevar[:19]) + obstime = np.int64(round((this_time - epoch).total_seconds())) + ncd.close() + # Convert obstime from string to seconds since blah blah + valKey = vName['chlor_a'], iconv.OvalName() errKey = vName['chlor_a'], iconv.OerrName() qcKey = vName['chlor_a'], iconv.OqcName() @@ -81,7 +91,10 @@ def _read(self): self.VarAttrs[vName['chlor_a'], iconv.OqcName()]['_FillValue'] = -32767 self.VarAttrs[vName['chlor_a'], iconv.OvalName()]['units'] = 'mg m^-3' self.VarAttrs[vName['chlor_a'], iconv.OerrName()]['units'] = 'mg m^-3' - self.VarAttrs[vName['chlor_a'], iconv.OqcName()]['units'] = 'unitless' + + self.VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since ' + iso8601_string + self.VarAttrs[('latitude', 'MetaData')]['units'] = 'degrees_north' + self.VarAttrs[('longitude', 'MetaData')]['units'] = 'degrees_east' # apply thinning mask if self.thin > 0.0: @@ -91,7 +104,7 @@ def _read(self): vals = vals[mask_thin] for i in range(len(vals)): - locKey = lats[i], lons[i], GlobalAttrs['time_coverage_start'] + locKey = lats[i], lons[i], obstime self.data[locKey][valKey] = vals[i] self.data[locKey][errKey] = vals[i] * 0.25 self.data[locKey][qcKey] = 0 @@ -123,12 +136,12 @@ def main(): chl = OCL3(args.input, fdate, args.thin) # Extract the obs data - ObsVars, nlocs = iconv.ExtractObsData(chl.data, locationKeyList) + ObsVars, Location = iconv.ExtractObsData(chl.data, locationKeyList) # Set Attributes GlobalAttrs['thinning'] = args.thin GlobalAttrs['converter'] = os.path.basename(__file__) - DimDict['nlocs'] = nlocs + DimDict['Location'] = Location # Set up the writer writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 712bd8b72..872c400a2 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -1082,6 +1082,7 @@ ecbuild_add_test( TARGET test_${PROJECT_NAME}_aeronet_aaod ecbuild_add_test( TARGET test_${PROJECT_NAME}_airnow TYPE SCRIPT + ENVIRONMENT "PYTHONPATH=${IODACONV_PYTHONPATH}" COMMAND bash ARGS ${CMAKE_BINARY_DIR}/bin/iodaconv_comp.sh netcdf @@ -1095,7 +1096,7 @@ ecbuild_add_test( TARGET test_${PROJECT_NAME}_tropomi_co_total TYPE SCRIPT ENVIRONMENT "PYTHONPATH=${IODACONV_PYTHONPATH}" COMMAND bash - ARGS ${CMAKE_BINARY_DIR}/bin/iodaconv_comp.sh + ARGS ${CMAKE_BINARY_DIR}/bin/iodaconv_comp.sh netcdf "${Python3_EXECUTABLE} ${CMAKE_BINARY_DIR}/bin/tropomi_no2_co_nc2ioda.py -i testinput/tropomi_co.nc @@ -1103,7 +1104,7 @@ ecbuild_add_test( TARGET test_${PROJECT_NAME}_tropomi_co_total -v co -q 0.5 -c total" - tropomi_co_total.nc ${IODA_CONV_COMP_TOL_ZERO}) + tropomi_co_total.nc ${IODA_CONV_COMP_TOL_ZERO}) #============================================================================== # Bufr Ingester tests @@ -1537,7 +1538,7 @@ if( iodaconv_eccodes_ENABLED ) COMMAND bash ARGS ${CMAKE_BINARY_DIR}/bin/iodaconv_comp.sh netcdf - "${Python3_EXECUTABLE} ${CMAKE_BINARY_DIR}/bin/decode_bufr_LDM_raob.py + "${Python3_EXECUTABLE} ${CMAKE_BINARY_DIR}/bin/sonde_bufr2ioda.py -i testinput/sonde_wmo_double.bufr -o testrun/wmo_raob_double.nc4" wmo_raob_double.nc4 ${IODA_CONV_COMP_TOL_ZERO}) diff --git a/test/testinput/SNDR.SNPP.ATMS.20211206T0100.m06.g011.L1B.std.v03_15.G.211206074531.nc4 b/test/testinput/SNDR.SNPP.ATMS.20211206T0100.m06.g011.L1B.std.v03_15.G.211206074531.nc4 deleted file mode 100644 index 6b49d9c87..000000000 --- a/test/testinput/SNDR.SNPP.ATMS.20211206T0100.m06.g011.L1B.std.v03_15.G.211206074531.nc4 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:407cd2434710ce14c726085ea659cc0ba5f68cb84dcefc3b2410383fe0f165db -size 7697499 diff --git a/test/testinput/adpupa_prepbufr.yaml b/test/testinput/adpupa_prepbufr.yaml index 17381a260..eb3c4291b 100644 --- a/test/testinput/adpupa_prepbufr.yaml +++ b/test/testinput/adpupa_prepbufr.yaml @@ -129,17 +129,17 @@ observations: path: "*/PRSLEVEL" - name: Depth path: "*/SST_INFO/SSTEVENT" - - name: cloudseq_Dim + - name: CloudSequence path: "*/CLOUDSEQ" - - name: pevent_Dim + - name: PressureEvent path: "*/PRSLEVEL/P___INFO/P__EVENT" - - name: qevent_Dim + - name: HumidityEvent path: "*/PRSLEVEL/Q___INFO/Q__EVENT" - - name: tevent_Dim + - name: TemperatureEvent path: "*/PRSLEVEL/T___INFO/T__EVENT" - - name: zevent_Dim + - name: HeightEvent path: "*/PRSLEVEL/Z___INFO/Z__EVENT" - - name: wevent_Dim + - name: WindEvent path: "*/PRSLEVEL/W___INFO/W__EVENT" variables: @@ -178,7 +178,7 @@ observations: longName: "Station elevation" units: "m" - - name: "MetaData/air_pressure" + - name: "MetaData/pressure" coordinates: "longitude latitude Level" source: variables/pressure longName: "Pressure" @@ -190,43 +190,43 @@ observations: longName: "Height" units: "m" - - name: "ObsValue/air_temperature" + - name: "ObsValue/airTemperature" coordinates: "longitude latitude Level" source: variables/airTemperature longName: "Temperature" units: "K" - - name: "ObsValue/dewpoint_temperature" + - name: "ObsValue/dewPointTemperature" coordinates: "longitude latitude Level" source: variables/dewpointTemperature longName: "Dewpoint temperature" units: "K" - - name: "ObsValue/virtual_temperature" + - name: "ObsValue/virtualTemperature" coordinates: "longitude latitude Level" source: variables/virtualTemperature longName: "Virtual temperature" units: "K" - - name: "ObsValue/specific_humidity" + - name: "ObsValue/specificHumidity" coordinates: "longitude latitude Level" source: variables/specificHumidity longName: "Specific humidity" units: "kg kg-1" - - name: "ObsValue/eastward_wind" + - name: "ObsValue/windEastward" coordinates: "longitude latitude Level" source: variables/windEastward longName: "Eastward wind" units: "m s-1" - - name: "ObsValue/northward_wind" + - name: "ObsValue/windNorthward" coordinates: "longitude latitude Level" source: variables/windNorthward longName: "Northward wind" units: "m s-1" - - name: "ObsValue/sea_surface_temperature" + - name: "ObsValue/seaSurfaceTemperature" coordinates: "longitude latitude Depth" source: variables/seaSurfaceTemperature longName: "Sea surface temperature" @@ -243,27 +243,27 @@ observations: source: variables/heightQM longName: "Height quality marker" - - name: "QualityMarker/air_temperature" + - name: "QualityMarker/airTemperature" coordinates: "longitude latitude Level" source: variables/airTemperatureQM longName: "Temperature quality marker" - - name: "QualityMarker/specific_humidity" + - name: "QualityMarker/specificHumidity" coordinates: "longitude latitude Level" source: variables/specificHumidityQM longName: "Moisture quality marker" - - name: "QualityMarker/eastward_wind" + - name: "QualityMarker/windEastward" coordinates: "longitude latitude Level" source: variables/windQM longName: "U-Component of wind quality marker" - - name: "QualityMarker/northward_wind" + - name: "QualityMarker/windNorthward" coordinates: "longitude latitude Level" source: variables/windQM longName: "V-Component of wind quality marker" - - name: "QualityMarker/sea_surface_temperature" + - name: "QualityMarker/seaSurfaceTemperature" coordinates: "longitude latitude Depth" source: variables/seaSurfaceTemperatureQM longName: "Sea surface temperature quality marker" @@ -275,61 +275,61 @@ observations: longName: "Pressure ObsError" units: "Pa" - - name: "ObsError/air_temperature" + - name: "ObsError/airTemperature" coordinates: "longitude latitude Level" source: variables/airTemperatureError longName: "Temperature ObsError" units: "K" - - name: "ObsError/relative_humidity" + - name: "ObsError/relativeHumidity" coordinates: "longitude latitude Level" source: variables/relativeHumidityError longName: "Relative Humidity ObsError" units: "1" - - name: "ObsError/eastward_wind" + - name: "ObsError/windEastward" coordinates: "longitude latitude Level" source: variables/windError longName: "Easthward wind ObsError" units: "m s-1" - - name: "ObsError/northward_wind" + - name: "ObsError/windNorthward" coordinates: "longitude latitude Level" source: variables/windError longName: "Northward wind ObsError" units: "m s-1" - - name: "ObsError/sea_surface_temperature" + - name: "ObsError/seaSurfaceTemperature" coordinates: "longitude latitude Depth" source: variables/seaSurfaceTemperatureError longName: "Sea surface temperature ObsError" units: "K" # Extra Info - - name: "MetaData/verticalSignificance" - coordinates: "longitude latitude" - source: variables/verticalSignificance - longName: "Vertical Significance" - - - name: "MetaData/prepbufrType" - coordinates: "longitude latitude" - source: variables/prepbufrReportType - longName: "Prepbufr report type" - - - name: "MetaData/dumpReportType" - coordinates: "longitude latitude" - source: variables/dumpReportType - longName: "Data dump report type" - - - name: "MetaData/prepbufrDataLvlCat" - coordinates: "longitude latitude Level" - source: variables/prepbufrDataLvlCat - longName: "Prepbufr data level category" - - - name: "MetaData/waterTemperatureMethod" - coordinates: "longitude latitude" - source: variables/waterTemperatureMethod - longName: "Method of sea surface temperature measurement" + # - name: "MetaData/verticalSignificance" + # coordinates: "longitude latitude" + # source: variables/verticalSignificance + # longName: "Vertical Significance" + + # - name: "MetaData/prepbufrType" + # coordinates: "longitude latitude" + # source: variables/prepbufrReportType + # longName: "Prepbufr report type" + + # - name: "MetaData/dumpReportType" + # coordinates: "longitude latitude" + # source: variables/dumpReportType + # longName: "Data dump report type" + + # - name: "MetaData/prepbufrDataLvlCat" + # coordinates: "longitude latitude Level" + # source: variables/prepbufrDataLvlCat + # longName: "Prepbufr data level category" + + # - name: "MetaData/waterTemperatureMethod" + # coordinates: "longitude latitude" + # source: variables/waterTemperatureMethod + # longName: "Method of sea surface temperature measurement" - name: "ObsValue/presentWeather" coordinates: "longitude latitude" @@ -340,6 +340,7 @@ observations: coordinates: "longitude latitude" source: variables/cloudAmount longName: "Cloud Amount" + units: "1" - name: "ObsValue/cloudType" coordinates: "longitude latitude" @@ -358,7 +359,7 @@ observations: longName: "Cloud Cover" units: "1" - - name: "MetaData/depthBelowSeaSurface" + - name: "MetaData/depthBelowWaterSurface" coordinates: "longitude latitude Depth" source: variables/depthBelowSeaSurface longName: "Depth below sea surface" diff --git a/test/testinput/afwa_snod_24km.grib b/test/testinput/afwa_snod_24km.grib deleted file mode 100644 index 4a831391c..000000000 --- a/test/testinput/afwa_snod_24km.grib +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b137f42aa86c587dbcbd9f194789fd4df70d62af6c3628be9051531954f988a3 -size 2679 diff --git a/test/testinput/aircar_BUFR2ioda.yaml b/test/testinput/aircar_BUFR2ioda.yaml index 71d37d752..c557ea6bb 100644 --- a/test/testinput/aircar_BUFR2ioda.yaml +++ b/test/testinput/aircar_BUFR2ioda.yaml @@ -14,7 +14,7 @@ observations: - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, SECO] - mnemonics: [ACID, ACRN] - mnemonics: [CLAT, CLON] - - mnemonics: [IALT, DPOF, ROLQ] + - mnemonics: [IALT, DPOF, ROLL, ROLQ] - mnemonics: [MIXR, TMDB, WDIR, WSPD] - mnemonics: [TASP, ACTH] @@ -34,19 +34,21 @@ observations: mnemonic: CLON height: mnemonic: IALT - aircraftFlightNum: + aircraftIdentifier: mnemonic: ACID - aircraftRegistrationNum: + aircraftTailNumber: mnemonic: ACRN aircraftFlightPhase: mnemonic: DPOF - aircraftTrueAirspeed: + aircraftVelocity: mnemonic: TASP aircraftHeading: mnemonic: ACTH aircraftRollAngle: + mnemonic: ROLL + aircraftRollAngleQuality: mnemonic: ROLQ - temperatureAir: + airTemperature: mnemonic: TMDB waterVaporMixingRatio: mnemonic: MIXR @@ -60,94 +62,100 @@ observations: obsdataout: "./testrun/gdas.aircar.t00z.20210801.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - name: "MetaData/height" source: variables/height - dimensions: [ "nlocs" ] - longName: "Pressure altitude" + dimensions: [ "Location" ] + longName: "Height" units: "m" - - name: "MetaData/aircraftRegistrationNum" - source: variables/aircraftRegistrationNum - dimensions: [ "nlocs" ] + - name: "MetaData/aircraftTailNumber" + source: variables/aircraftTailNumber + dimensions: [ "Location" ] longName: "Aircraft registration number or other ID" units: "none" - - name: "MetaData/aircraftFlightNum" - source: variables/aircraftFlightNum - dimensions: [ "nlocs" ] + - name: "MetaData/aircraftIdentifier" + source: variables/aircraftIdentifier + dimensions: [ "Location" ] longName: "Aircraft flight number" units: "none" - name: "MetaData/aircraftFlightPhase" source: variables/aircraftFlightPhase - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Aircraft flight phase (ascending/descending/level)" units: "none" - - name: "MetaData/aircraftTrueAirspeed" - source: variables/aircraftTrueAirspeed - dimensions: [ "nlocs" ] - longName: "Aircraft true airspeed" + - name: "MetaData/aircraftVelocity" + source: variables/aircraftVelocity + dimensions: [ "Location" ] + longName: "Aircraft velocity" units: "m s-1" - name: "MetaData/aircraftHeading" source: variables/aircraftHeading - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Aircraft heading" - units: "deg" + units: "degrees" - name: "MetaData/aircraftRollAngle" source: variables/aircraftRollAngle - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] + longName: "Aircraft roll angle" + units: "degrees" + + - name: "MetaData/aircraftRollAngleQuality" + source: variables/aircraftRollAngleQuality + dimensions: [ "Location" ] longName: "Aircraft roll angle quality" units: "none" - - name: "ObsValue/air_temperature" + - name: "ObsValue/airTemperature" + source: variables/airTemperature coordinates: "longitude latitude" - source: variables/temperatureAir - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Air Temperature" units: "K" - - name: "ObsValue/specific_humidity" - coordinates: "longitude latitude" + - name: "ObsValue/waterVaporMixingRatio" source: variables/waterVaporMixingRatio - dimensions: [ "nlocs" ] - longName: "specific humidity" + coordinates: "longitude latitude" + dimensions: [ "Location" ] + longName: "Water Vapor Mixing Ratio" units: "kg kg-1" - - name: "ObsValue/wind_direction" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" source: variables/windDirection - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Direction" units: "degrees" - - name: "ObsValue/wind_speed" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" source: variables/windSpeed - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Speed" units: "m s-1" diff --git a/test/testinput/airep_wmoBUFR2ioda.yaml b/test/testinput/airep_wmoBUFR2ioda.yaml index 3a7b989c0..659c21483 100644 --- a/test/testinput/airep_wmoBUFR2ioda.yaml +++ b/test/testinput/airep_wmoBUFR2ioda.yaml @@ -28,13 +28,13 @@ observations: mnemonic: CLATH longitude: mnemonic: CLONH - altitude: + height: mnemonic: HMSL - aircraftFlightNum: + aircraftIdentifier: mnemonic: ACID aircraftFlightPhase: mnemonic: POAF - temperatureAir: + airTemperature: mnemonic: TMDBST windDirection: mnemonic: WDIR @@ -46,63 +46,63 @@ observations: obsdataout: "./testrun/airep_multi.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - - name: "MetaData/altitude" - source: variables/altitude - dimensions: [ "nlocs" ] - longName: "Altitude" + - name: "MetaData/height" + source: variables/height + dimensions: [ "Location" ] + longName: "Height" units: "m" - - name: "MetaData/aircraftFlightNum" - source: variables/aircraftFlightNum - dimensions: [ "nlocs" ] - longName: "Aircraft flight number" + - name: "MetaData/aircraftIdentifier" + source: variables/aircraftIdentifier + dimensions: [ "Location" ] + longName: "Aircraft identifier" units: "" - name: "MetaData/aircraftFlightPhase" source: variables/aircraftFlightPhase - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Aircraft flight phase (ascending/descending/level)" units: "" - - name: "ObsValue/temperatureAir" + - name: "ObsValue/airTemperature" + source: variables/airTemperature coordinates: "longitude latitude" - source: variables/temperatureAir - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Air Temperature" units: "K" - name: "ObsValue/windDirection" - coordinates: "longitude latitude" source: variables/windDirection - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Direction" - units: "degrees true" + units: "degrees" - name: "ObsValue/windSpeed" - coordinates: "longitude latitude" source: variables/windSpeed - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Speed" - units: "m/s" + units: "m s-1" diff --git a/test/testinput/bufr_airs_v2.yaml b/test/testinput/bufr_airs.yaml similarity index 63% rename from test/testinput/bufr_airs_v2.yaml rename to test/testinput/bufr_airs.yaml index 80126e211..6a554197f 100644 --- a/test/testinput/bufr_airs_v2.yaml +++ b/test/testinput/bufr_airs.yaml @@ -6,14 +6,16 @@ observations: - obs space: name: bufr - obsdatain: "./testinput/gdas.t12z.airsev.tm00.bufr_d" + obsdatain: "./testinput/gdas.t00z.airsev.tm00.bufr_d" mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLATH, CLONH] - - mnemonics: [SAID, SIID, FOVN, SAZA, SOZA, BEARAZ, SOLAZI, TOCC] -# - mnemonics: [ALBD] -# channels: 1-4 - - mnemonics: [CHNM, LOGRCW, TMBR] - channels: 1-10 + - mnemonics: [SAID, SLNM, SOZA, SOLAZI] + - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, SECO] + - mnemonics: [CLATH, CLONH, SAZA, BEARAZ, FOVN] + - mnemonics: [ALBD] + channels: 1-4 + - mnemonics: [CHNM, TMBR] + channels: 1-15 + - mnemonics: [TOCC] exports: variables: @@ -24,18 +26,15 @@ observations: day: DAYS hour: HOUR minute: MINU + second: SECO latitude: mnemonic: CLATH longitude: mnemonic: CLONH - satelliteId: + satelliteIdentifier: mnemonic: SAID - sensorID: - mnemonic: SIID - channelNumber: + sensorChannelNumber: mnemonic: CHNM - centralWavenumber: - mnemonic: LOGRCW fieldOfViewNumber: mnemonic: FOVN solarZenithAngle: @@ -48,127 +47,123 @@ observations: mnemonic: BEARAZ cloudAmount: mnemonic: TOCC + transforms: + - scale: 0.01 # albedo: # mnemonic: ALBD +# transforms: +# - scale: 0.01 brightnessTemperature: mnemonic: TMBR ioda: backend: netcdf - obsdataout: "./testrun/gdas.t12z.airsev.tm00.nc" + obsdataout: "./testrun/gdas.t00z.airsev.tm00.nc" dimensions: - - name: "nlocs" - size: variables/brightnessTemperature.nrows - - name: "nchans" + - name: "Location" + size: variables/latitude.nrows + - name: "Channel" size: variables/brightnessTemperature.ncols -# - name: "nchans_albedo" -# size: variables/albedo.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platformCommonName" type: string value: "AIRS" - - name: "MetaData/platformLongDescription" + - name: "platformLongDescription" type: string value: "MTYP 021-249 EVERY FOV AIRS/AMSU-A/HSB 1B BTEMPS(AQUA)" variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" - units: "" - - - name: "MetaData/centralWavenumber" - source: variables/centralWavenumber - dimensions: ["nchans"] - longName: "Central wavenumber (log10)" + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees_north" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees_east" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/fieldOfViewNumber" source: variables/fieldOfViewNumber - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Field of view number" units: "" - name: "MetaData/solarZenithAngle" source: variables/solarZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar zenith angle" units: "degrees" range: [0, 180] - name: "MetaData/solarAzimuthAngle" source: variables/solarAzimuthAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar azimuth angle" units: "degrees" range: [0, 360] - name: "MetaData/sensorZenithAngle" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor zenith angle" units: "degrees" range: [0, 90] - name: "MetaData/sensorAzimuthAngle" source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor azimuth angle" units: "degrees" range: [0, 360] - - name: "MetaData/cloudAmount" + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" + + - name: "ObsValue/cloudAmount" coordinates: "longitude latitude" source: variables/cloudAmount - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Cloud fraction" - units: "percent" - range: [0, 100] + units: "1" + range: [0, 1] # - name: "ObsValue/albedo" -# coordinates: "longitude latitude nchans_albedo" +# coordinates: "longitude latitude Channel" # source: variables/albedo -# dimensions: ["nlocs", "nchans_albedo"] +# dimensions: ["Location", "Channel"] # longName: "albedo" -# units: "percent" -# range: [0, 100] +# units: "1" +# range: [0, 1] # chunks: [1000, 15] # compressionLevel: 4 - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" + coordinates: "longitude latitude Channel" source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] + dimensions: ["Location", "Channel"] longName: "Brightness temperature" units: "K" range: [120, 500] diff --git a/test/testinput/bufr_amsua_v2.yaml b/test/testinput/bufr_amsua.yaml similarity index 71% rename from test/testinput/bufr_amsua_v2.yaml rename to test/testinput/bufr_amsua.yaml index 16c179110..d7a898804 100644 --- a/test/testinput/bufr_amsua_v2.yaml +++ b/test/testinput/bufr_amsua.yaml @@ -8,8 +8,8 @@ observations: name: bufr obsdatain: "./testinput/gdas.t00z.1bamua.tm00.bufr_d" mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLAT, CLON] - - mnemonics: [SAID, SIID, FOVN, SAZA, SOZA, BEARAZ, SOLAZI] + - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, SECO, CLAT, CLON] + - mnemonics: [SAID, FOVN, LSQL, SAZA, SOZA, SOLAZI, BEARAZ] - mnemonics: [CHNM, TMBR] channels: 1-15 @@ -22,16 +22,13 @@ observations: day: DAYS hour: HOUR minute: MINU + second: SECO latitude: mnemonic: CLAT longitude: mnemonic: CLON - satelliteId: + satelliteIdentifier: mnemonic: SAID - sensorID: - mnemonic: SIID - channelNumber: - mnemonic: CHNM fieldOfViewNumber: mnemonic: FOVN solarZenithAngle: @@ -42,6 +39,8 @@ observations: mnemonic: SAZA sensorAzimuthAngle: mnemonic: BEARAZ + sensorChannelNumber: + mnemonic: CHNM brightnessTemperature: mnemonic: TMBR @@ -50,96 +49,96 @@ observations: obsdataout: "./testrun/gdas.t00z.1bamsua.tm00.nc" dimensions: - - name: "nlocs" - size: variables/brightnessTemperature.nrows - - name: "nchans" + - name: "Location" + size: variables/latitude.nrows + - name: "Channel" size: variables/brightnessTemperature.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platformCommonName" type: string value: "AMSUA" - - name: "MetaData/platformLongDescription" + - name: "platformLongDescription" type: string value: "MTYP 021-023 PROC AMSU-A 1B Tb (NOAA-15-19, METOP-1,2)" - - name: "MetaData/sensorCentralFrequency" - type: floatVector - value: [23.8, 31.4, 50.3, 52.8, 53.596, 54.4, 54.94, 55.5, 57.290, - 57.290, 57.290, 57.290, 57.290, 57.290, 89.0] +# - name: "sensorCentralFrequency" +# type: floatVector +# value: [23.8, 31.4, 50.3, 52.8, 53.596, 54.4, 54.94, 55.5, 57.290, +# 57.290, 57.290, 57.290, 57.290, 57.290, 89.0] variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees_north" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees_east" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/fieldOfViewNumber" source: variables/fieldOfViewNumber - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Field of view number" units: "" - name: "MetaData/solarZenithAngle" source: variables/solarZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar zenith angle" units: "degrees" range: [0, 180] - name: "MetaData/solarAzimuthAngle" source: variables/solarAzimuthAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar azimuth angle" units: "degrees" range: [0, 360] - name: "MetaData/sensorZenithAngle" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor zenith angle" units: "degrees" range: [0, 90] - name: "MetaData/sensorAzimuthAngle" source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor azimuth angle" units: "degrees" range: [0, 360] + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" + - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" + coordinates: "longitude latitude Channel" source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] + dimensions: ["Location", "Channel"] longName: "Brightness temperature" units: "K" range: [120, 500] diff --git a/test/testinput/bufr_atms_v2.yaml b/test/testinput/bufr_atms.yaml similarity index 65% rename from test/testinput/bufr_atms_v2.yaml rename to test/testinput/bufr_atms.yaml index afe99788a..0c5fb404c 100644 --- a/test/testinput/bufr_atms_v2.yaml +++ b/test/testinput/bufr_atms.yaml @@ -8,8 +8,10 @@ observations: name: bufr obsdatain: "./testinput/gdas.t00z.atms.tm00.bufr_d" mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLATH, CLONH] - - mnemonics: [SAID, SIID, FOVN, SAZA, SOZA, BEARAZ, SOLAZI] + - mnemonics: [SAID, YEAR, MNTH, DAYS, HOUR, MINU, SECO] + - mnemonics: [SLNM, FOVN, ATMSSQ] + - mnemonics: [CLATH, CLONH] + - mnemonics: [SAZA, BEARAZ, SOZA, SOLAZI] - mnemonics: [CHNM, TMBR] channels: 1-22 @@ -22,26 +24,29 @@ observations: day: DAYS hour: HOUR minute: MINU + second: SECO latitude: mnemonic: CLATH longitude: mnemonic: CLONH - satelliteId: + satelliteIdentifier: mnemonic: SAID - sensorID: - mnemonic: SIID - channelNumber: - mnemonic: CHNM + scanLineNumber: + mnemonic: SLNM fieldOfViewNumber: mnemonic: FOVN - solarZenithAngle: - mnemonic: SOZA - solarAzimuthAngle: - mnemonic: SOLAZI + qualityFlags: + mnemonic: ATMSSQ sensorZenithAngle: mnemonic: SAZA sensorAzimuthAngle: mnemonic: BEARAZ + solarZenithAngle: + mnemonic: SOZA + solarAzimuthAngle: + mnemonic: SOLAZI + sensorChannelNumber: + mnemonic: CHNM brightnessTemperature: mnemonic: TMBR @@ -50,98 +55,104 @@ observations: obsdataout: "./testrun/gdas.t00z.atms.tm00.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/brightnessTemperature.nrows - - name: "nchans" + - name: "Channel" size: variables/brightnessTemperature.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platformCommonName" type: string value: "ATMS" - - name: "MetaData/platformLongDescription" + - name: "platformLongDescription" type: string value: "MTYP 021-203 ATMS BRIGHTNESS TEMPERATURE DATA" - - name: "MetaData/sensorCentralFrequency" - type: floatVector - value: [23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.40, 54.94, 55.50, - 57.2903, 57.2903, 57.2903, 57.2903, 57.2903, 57.2903, - 88.20, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31] +# - name: "sensorCentralFrequency" +# type: floatVector +# value: [23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.40, 54.94, 55.50, +# 57.2903, 57.2903, 57.2903, 57.2903, 57.2903, 57.2903, +# 88.20, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31] variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees_north" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees_east" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/fieldOfViewNumber" source: variables/fieldOfViewNumber - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Field of view number" units: "" + - name: "MetaData/scanLineNumber" + source: variables/scanLineNumber + dimensions: ["Location"] + longName: "Scan line number" + units: "" + - name: "MetaData/solarZenithAngle" source: variables/solarZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar zenith angle" units: "degrees" range: [0, 180] - name: "MetaData/solarAzimuthAngle" source: variables/solarAzimuthAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar azimuth angle" units: "degrees" range: [0, 360] - name: "MetaData/sensorZenithAngle" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor zenith angle" units: "degrees" range: [0, 90] - name: "MetaData/sensorAzimuthAngle" source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor azimuth angle" units: "degrees" range: [0, 360] + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" + - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" + coordinates: "longitude latitude Channel" source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] + dimensions: ["Location", "Channel"] longName: "Brightness temperature" units: "K" range: [120, 500] - chunks: [1000, 15] + chunks: [1000, 22] diff --git a/test/testinput/bufr_avhrr_v2.yaml b/test/testinput/bufr_avhrr.yaml similarity index 65% rename from test/testinput/bufr_avhrr_v2.yaml rename to test/testinput/bufr_avhrr.yaml index 240b7d5d7..1f8b3ccea 100644 --- a/test/testinput/bufr_avhrr_v2.yaml +++ b/test/testinput/bufr_avhrr.yaml @@ -8,8 +8,9 @@ observations: name: bufr obsdatain: "./testinput/gdas.t00z.avcsam.tm00.bufr_d" mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLATH, CLONH] - - mnemonics: [SAID, FOVN, SAZA, SOZA] + - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU] + - mnemonics: [CLATH, CLONH, SAID] + - mnemonics: [FOVN, SAZA, SOZA] - mnemonics: [INCN, ALBD, TMBR] channels : 1-5 @@ -26,113 +27,115 @@ observations: mnemonic: CLATH longitude: mnemonic: CLONH - satelliteId: + satelliteIdentifier: mnemonic: SAID - channelNumber: - mnemonic: INCN fieldOfViewNumber: mnemonic: FOVN solarZenithAngle: mnemonic: SOZA sensorZenithAngle: mnemonic: SAZA + sensorChannelNumber: + mnemonic: INCN albedo: mnemonic: ALBD + transforms: + - scale: 0.01 brightnessTemperature: mnemonic: TMBR ioda: backend: netcdf - obsdataout: "./testrun/gdas.t12z.avcsam.tm00.nc" + obsdataout: "./testrun/gdas.t00z.avcsam.tm00.nc" dimensions: - - name: "nlocs" - size: variables/brightnessTemperature.nrows - - name: "nchans" + - name: "Location" + size: variables/latitude.nrows + - name: "Channel" size: variables/brightnessTemperature.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platformCommonName" type: string value: "AVHRR" - - name: "MetaData/platformLongDescription" + - name: "platformLongDescription" type: string value: "MTYP 021-051 PROC AVHRR(GAC) 1B Tb-CLR & SEA (N-17,N-18,M-2)" - - name: "MetaData/sensorCentralWavelength" - type: floatVector - value: [0.630, 0.865, 3.74, 10.8, 12.0] # μm +# - name: "sensorCentralWavelength" +# type: floatVector +# value: [0.630, 0.865, 3.74, 10.8, 12.0] # μm variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees_north" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees_east" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/fieldOfViewNumber" source: variables/fieldOfViewNumber - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Field of view number" units: "" - name: "MetaData/solarZenithAngle" source: variables/solarZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Solar zenith angle" units: "degrees" range: [0, 180] - name: "MetaData/sensorZenithAngle" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Sensor zenith angle" units: "degrees" range: [0, 90] + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" + - name: "ObsValue/albedo" - coordinates: "longitude latitude nchans" - source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] + source: variables/albedo + coordinates: "longitude latitude Channel" + dimensions: ["Location", "Channel"] longName: "Albedo" - units: "" - range: [120, 500] - chunks: [1000, 15] + units: "1" + range: [0, 1.0] + chunks: [1000, 5] compressionLevel: 4 - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] + coordinates: "longitude latitude Channel" + dimensions: ["Location", "Channel"] longName: "Brightness temperature" units: "K" range: [120, 500] - chunks: [1000, 15] + chunks: [1000, 5] compressionLevel: 4 diff --git a/test/testinput/bufr_cris.yaml b/test/testinput/bufr_cris.yaml new file mode 100644 index 000000000..52119aee5 --- /dev/null +++ b/test/testinput/bufr_cris.yaml @@ -0,0 +1,189 @@ +# (C) Copyright 2021 UCAR +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. + +observations: + - obs space: + name: bufr + obsdatain: "./testinput/gdas.t00z.crisf4.tm00.bufr_d" + mnemonicSets: + - mnemonics: [SAID, YEAR, MNTH, DAYS, HOUR, MINU, SECO, CLATH, CLONH] + - mnemonics: [SAZA, BEARAZ, SOZA, SOLAZI, STKO] + - mnemonics: [SLNM, FOVN, HOLS] + - mnemonics: [TOCC, HOCT, NSQF] + - mnemonics: [STCH, ENCH, CHSF] + - mnemonics: [CHNM, SRAD] + channels : 1-430 +# - mnemonics: [CHNM, SCHRAD] # VIIRS radiance +# channels : 1-16 + + exports: + variables: + timestamp: + datetime: + year: YEAR + month: MNTH + day: DAYS + hour: HOUR + minute: MINU + second: SECO + latitude: + mnemonic: CLATH + longitude: + mnemonic: CLONH + satelliteIdentifier: + mnemonic: SAID + scanLineNumber: + mnemonic: SLNM + satelliteAscendingFlag: + mnemonic: STKO + fieldOfViewNumber: + mnemonic: FOVN + solarZenithAngle: + mnemonic: SOZA + solarAzimuthAngle: + mnemonic: SOLAZI + sensorZenithAngle: + mnemonic: SAZA + sensorAzimuthAngle: + mnemonic: BEARAZ + cloudAmount: + mnemonic: TOCC + transforms: + - scale: 0.01 + heightOfTopOfCloud: + mnemonic: HOCT + qualityFlags: + mnemonic: NSQF + sensorChannelStart: + mnemonic: STCH + sensorChannelEnd: + mnemonic: ENCH + channelScaleFactor: + mnemonic: CHSF + sensorChannelNumber: + mnemonic: CHNM + radiance: # This is actually a scaled radiance, needs a fix + mnemonic: SRAD + + ioda: + backend: netcdf + obsdataout: "./testrun/gdas.t00z.crisf4.tm00.nc" + + dimensions: + - name: "Location" + size: variables/latitude.nrows + - name: "Channel" + size: variables/radiance.ncols + + globals: + - name: "platformCommonName" + type: string + value: "SNPP" + + - name: "platformLongDescription" + type: string + value: "MTYP 021-206 CrIS FULL SPCTRL RADIANCE (431 CHN SUBSET)" + + variables: + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" + units: "" + + - name: "MetaData/latitude" + source: variables/latitude + dimensions: ["Location"] + longName: "Latitude" + units: "degrees_north" + range: [-90, 90] + + - name: "MetaData/longitude" + source: variables/longitude + dimensions: ["Location"] + longName: "Longitude" + units: "degrees_east" + range: [-180, 180] + + - name: "MetaData/dateTime" + source: variables/timestamp + dimensions: ["Location"] + longName: "dateTime" + units: "seconds since 1970-01-01T00:00:00Z" + + - name: "MetaData/fieldOfViewNumber" + source: variables/fieldOfViewNumber + dimensions: ["Location"] + longName: "Field of view number" + units: "" + + - name: "MetaData/solarZenithAngle" + source: variables/solarZenithAngle + dimensions: ["Location"] + longName: "Solar zenith angle" + units: "degrees" + range: [0, 180] + + - name: "MetaData/solarAzimuthAngle" + source: variables/solarAzimuthAngle + dimensions: ["Location"] + longName: "Solar azimuth angle" + units: "degrees" + range: [0, 360] + + - name: "MetaData/sensorZenithAngle" + source: variables/sensorZenithAngle + dimensions: ["Location"] + longName: "Sensor zenith angle" + units: "degrees" + range: [0, 90] + + - name: "MetaData/sensorAzimuthAngle" + source: variables/sensorAzimuthAngle + dimensions: ["Location"] + longName: "Sensor azimuth angle" + units: "degrees" + range: [0, 360] + + - name: "MetaData/scanLineNumber" + source: variables/scanLineNumber + dimensions: ["Location"] + longName: "Sensor scan line number" + units: "" + + - name: "MetaData/qualityFlags" + source: variables/qualityFlags + dimensions: ["Location"] + longName: "Scan level quality flags" + units: "" + + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" + + - name: "ObsValue/cloudAmount" + source: variables/cloudAmount + coordinates: "longitude latitude" + dimensions: ["Location"] + longName: "Cloud fraction" + units: "1" + + - name: "ObsValue/heightOfTopOfCloud" + source: variables/heightOfTopOfCloud + coordinates: "longitude latitude" + dimensions: ["Location"] + longName: "Cloud top height" + units: "m" + + - name: "ObsValue/radiance" + source: variables/radiance + coordinates: "longitude latitude Channel" + dimensions: ["Location", "Channel"] + longName: "radiance" + units: "W m-2 sr-1" + chunks: [1000, 430] + compressionLevel: 4 diff --git a/test/testinput/bufr_cris_v2.yaml b/test/testinput/bufr_cris_v2.yaml deleted file mode 100644 index 54f8030d9..000000000 --- a/test/testinput/bufr_cris_v2.yaml +++ /dev/null @@ -1,234 +0,0 @@ -# (C) Copyright 2021 UCAR -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -observations: - - obs space: - name: bufr - obsdatain: "./testinput/gdas.t00z.crisf4.tm00.bufr_d" - mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLAT, CLON] - - mnemonics: [SAID, SIID, MYPE, SLNM, FORN, FOVN, SAZA, SOZA, BEARAZ, SOLAZI] - - mnemonics: [TOCC, HOCT, SAZA, SOZA, BEARAZ, SOLAZI] - - mnemonics: [NSQF, QMRKH] - - mnemonics: [NCQF, NFQF] - channels : 1-3 - - mnemonics: [CHNM, SRAD] - channels : 1-616 - - mnemonics: [CHNM, SCHRAD] - channels : 1-16 - - exports: - variables: - timestamp: - datetime: - year: YEAR - month: MNTH - day: DAYS - hour: HOUR - minute: MINU - latitude: - mnemonic: CLAT - longitude: - mnemonic: CLON - satelliteId: - mnemonic: SAID - sensorID: - mnemonic: SIID - measurementType: - mnemonic: MTYP - channelNumber: - mnemonic: CHNM - sensorScanLineNumber: - mnemonic: SLNM - fieldOfRegardNumber: - mnemonic: FORN - fieldOfViewNumber: - mnemonic: FOVN - solarZenithAngle: - mnemonic: SOZA - solarAzimuthAngle: - mnemonic: SOLAZI - sensorZenithAngle: - mnemonic: SAZA - sensorAzimuthAngle: - mnemonic: BEARAZ - cloudAmount: - mnemonic: TOCC - cloudTopHeight: - mnemonic: HOCT - qualityInfo: - mnemonic: QMRKH - qualityFlagsScan: - mnemonic: NSQF - qualityFlagsCalibration: - mnemonic: NCQF - qualityFlagsFieldOfView: - mnemonic: NFQF - radiance: - mnemonic: SRAD - radianceVIIRS: - mnemonic: SCHRAD - - ioda: - backend: netcdf - obsdataout: "./testrun/gdas.t00z.crisf4.tm00.nc" - - dimensions: - - name: "nlocs" - size: variables/radiance.nrows - - name: "nchans" - size: variables/radiance.ncols - - name: "nchans2" - size: variables/radianceVIIRS.ncols - - name: "nqualityFlags" - size: variables/qualityFlagsCalibration.ncols - - globals: - - name: "MetaData/platformCommonName" - type: string - value: "SNPP" - - - name: "MetaData/platformLongDescription" - type: string - value: "MTYP 021-206 CrIS FULL SPCTRL RADIANCE (431 CHN SUBSET)" - - variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" - units: "" - - - name: "MetaData/measurementType" - source: variables/measurementType - dimensions: ["nchans"] - longName: "Measurement type" - units: "" - - - name: "MetaData/latitude" - source: variables/latitude - dimensions: ["nlocs"] - longName: "Latitude" - units: "degrees_north" - range: [-90, 90] - - - name: "MetaData/longitude" - source: variables/longitude - dimensions: ["nlocs"] - longName: "Longitude" - units: "degrees_east" - range: [-180, 180] - - - name: "MetaData/dateTime" - source: variables/timestamp - dimensions: ["nlocs"] - longName: "dateTime" - units: "seconds since 1970-01-01T00:00:00Z" - - - name: "MetaData/fieldOfRegardNumber" - source: variables/fieldOfRegardNumber - dimensions: ["nlocs"] - longName: "Field of regard number" - units: "" - - - name: "MetaData/fieldOfViewNumber" - source: variables/fieldOfViewNumber - dimensions: ["nlocs"] - longName: "Field of view number" - units: "" - - - name: "MetaData/solarZenithAngle" - source: variables/solarZenithAngle - dimensions: ["nlocs"] - longName: "Solar zenith angle" - units: "degrees" - range: [0, 180] - - - name: "MetaData/solarAzimuthAngle" - source: variables/solarAzimuthAngle - dimensions: ["nlocs"] - longName: "Solar azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "MetaData/sensorZenithAngle" - source: variables/sensorZenithAngle - dimensions: ["nlocs"] - longName: "Sensor zenith angle" - units: "degrees" - range: [0, 90] - - - name: "MetaData/sensorAzimuthAngle" - source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] - longName: "Sensor azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "MetaData/sensorScanLineNumber" - source: variables/sensorScanLineNumber - dimensions: ["nlocs"] - longName: "Sensor scan line number" - units: "" - - - name: "MetaData/cloudAmount" - source: variables/cloudAmount - dimensions: ["nlocs"] - longName: "Cloud fraction" - units: "percent" - - - name: "MetaData/cloudTopHeight" - source: variables/cloudTopHeight - dimensions: ["nlocs"] - longName: "Cloud top height" - units: "m" - - - name: "MetaData/qualityInfo" - source: variables/qualityInfo - dimensions: ["nlocs"] - longName: "Quality information" - units: "" - - - name: "MetaData/qualityFlagsScan" - source: variables/qualityFlagsScan - dimensions: ["nlocs"] - longName: "Scan level quality flags" - units: "" - - - name: "MetaData/qualityFlagsCalibration" - source: variables/qualityFlagsCalibration - dimensions: ["nlocs", "nqualityFlags"] - longName: "Calibration quality flags" - units: "" - - - name: "MetaData/qualityFlagsFieldOfView" - source: variables/qualityFlagsFieldOfView - dimensions: ["nlocs", "nqualityFlags"] - longName: "Field of view quality flags" - units: "" - - - name: "ObsValue/radiance" - coordinates: "longitude latitude nchans" - source: variables/radiance - dimensions: ["nlocs", "nchans"] - longName: "radiance" - units: "W m-2 sr-1 cm" - chunks: [1000, 15] - compressionLevel: 4 - - - name: "ObsValue/radianceVIIRS" - coordinates: "longitude latitude nchans" - source: variables/radianceVIIRS - dimensions: ["nlocs", "nchans2"] - longName: "radiance" - units: "W m-2 sr-1 um-1" - chunks: [1000, 15] - compressionLevel: 4 diff --git a/test/testinput/bufr_empty_fields.yaml b/test/testinput/bufr_empty_fields.yaml index f742dfd1f..b2e3f20f9 100644 --- a/test/testinput/bufr_empty_fields.yaml +++ b/test/testinput/bufr_empty_fields.yaml @@ -18,6 +18,10 @@ observations: day: "*/DAYS" hour: "*/HOUR" minute: "*/MINU" + lat: + query: "*/CLAT" + lon: + query: "*/CLON" swellWavesDirection: query: "NC000000/WAVSQ3/DOSW" heightOfSwellWaves: @@ -31,26 +35,36 @@ observations: obsdataout: "./testrun/bufr_empty_fields.nc" variables: - - name: "MetaData/datetime" + - name: "MetaData/dateTime" source: variables/timestamp longName: "Datetime" - units: "datetime" + units: "seconds since 1970-01-01T00:00:00Z" - - name: "ObsValue/swellWavesDirection" + - name: "MetaData/latitude" + source: variables/lat + longName: "latitude" + units: "degree_north" + + - name: "MetaData/longitude" + source: variables/lon + longName: "longitude" + units: "degree_east" + + - name: "ObsValue/meanWavePropagationDirection" coordinates: "longitude latitude" source: variables/swellWavesDirection longName: "Swell Waves Direction" - units: "Degrees True" + units: "degree" - - name: "ObsValue/heightOfSwellWaves" + - name: "ObsValue/heightOfWaves" coordinates: "longitude latitude" source: variables/heightOfSwellWaves longName: "Height of Swell Waves" - units: "Meter" + units: "m" - - name: "ObsValue/periodOfSwellWaves" + - name: "ObsValue/meanPeriodOfWaves" coordinates: "longitude latitude" source: variables/periodOfSwellWaves longName: "Period of Swell Waves" - units: "Second" + units: "s" diff --git a/test/testinput/bufr_filter_split.yaml b/test/testinput/bufr_filter_split.yaml index 24b71b01b..5ef6f01aa 100644 --- a/test/testinput/bufr_filter_split.yaml +++ b/test/testinput/bufr_filter_split.yaml @@ -22,7 +22,7 @@ observations: query: "*/CLON" latitude: query: "*/CLAT" - radiance: + brightnessTemp: query: "[*/BRITCSTC/TMBR, */BRIT/TMBR]" filters: @@ -56,27 +56,27 @@ observations: - "*/BRIT" variables: - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" range: [-90, 90] - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" range: [-180, 180] - - name: "radiance@ObsValue" + - name: "ObsValue/brightnessTemperature" coordinates: "longitude latitude Channel" - source: variables/radiance - longName: "Radiance" + source: variables/brightnessTemp + longName: "Brightness temperature" units: "K" range: [120, 500] chunks: [1000, 15] diff --git a/test/testinput/bufr_filtering.yaml b/test/testinput/bufr_filtering.yaml index b16f1128e..463fd4466 100644 --- a/test/testinput/bufr_filtering.yaml +++ b/test/testinput/bufr_filtering.yaml @@ -48,18 +48,18 @@ observations: - "*/BRIT" variables: - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" range: [-90, 90] - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" diff --git a/test/testinput/bufr_hirs_v2.yaml b/test/testinput/bufr_hirs_v2.yaml deleted file mode 100644 index 310acfa1b..000000000 --- a/test/testinput/bufr_hirs_v2.yaml +++ /dev/null @@ -1,147 +0,0 @@ -# (C) Copyright 2021 UCAR -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -observations: - - obs space: - name: bufr - obsdatain: "./testinput/gdas.t00z.1bhrs4.tm00.bufr_d" - mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLAT, CLON] - - mnemonics: [SAID, SIID, FOVN, SAZA, SOZA, BEARAZ, SOLAZI] - - mnemonics: [CHNM, TMBR] - channels: 1-20 - - exports: - variables: - timestamp: - datetime: - year: YEAR - month: MNTH - day: DAYS - hour: HOUR - minute: MINU - latitude: - mnemonic: CLAT - longitude: - mnemonic: CLON - satelliteId: - mnemonic: SAID - sensorID: - mnemonic: SIID - channelNumber: - mnemonic: CHNM - fieldOfViewNumber: - mnemonic: FOVN - solarZenithAngle: - mnemonic: SOZA - solarAzimuthAngle: - mnemonic: SOLAZI - sensorZenithAngle: - mnemonic: SAZA - sensorAzimuthAngle: - mnemonic: BEARAZ - brightnessTemperature: - mnemonic: TMBR - - ioda: - backend: netcdf - obsdataout: "./testrun/gdas.t00z.hirs.tm00.nc" - - dimensions: - - name: "nlocs" - size: variables/brightnessTemperature.nrows - - name: "nchans" - size: variables/brightnessTemperature.ncols - - globals: - - name: "MetaData/platformCommonName" - type: string - value: "HIRS" - - - name: "MetaData/platformLongDescription" - type: string - value: "MTYP 021-028 PROC HIRS-4 1B Tb (NOAA-18-19, METOP-1,2)" - -# - name: "MetaData/sensorCentralFrequency" -# type: floatVector -# value: [23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.40, 54.94, 55.50, -# 57.2903, 57.2903, 57.2903, 57.2903, 57.2903, 57.2903, -# 88.20, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31] - - variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" - units: "" - - - name: "MetaData/latitude" - source: variables/latitude - dimensions: ["nlocs"] - longName: "Latitude" - units: "degrees_north" - range: [-90, 90] - - - name: "MetaData/longitude" - source: variables/longitude - dimensions: ["nlocs"] - longName: "Longitude" - units: "degrees_east" - range: [-180, 180] - - - name: "MetaData/dateTime" - source: variables/timestamp - dimensions: ["nlocs"] - longName: "dateTime" - units: "seconds since 1970-01-01T00:00:00Z" - - - name: "MetaData/fieldOfViewNumber" - source: variables/fieldOfViewNumber - dimensions: ["nlocs"] - longName: "Field of view number" - units: "" - - - name: "MetaData/solarZenithAngle" - source: variables/solarZenithAngle - dimensions: ["nlocs"] - longName: "Solar zenith angle" - units: "degrees" - range: [0, 180] - - - name: "MetaData/solarAzimuthAngle" - source: variables/solarAzimuthAngle - dimensions: ["nlocs"] - longName: "Solar azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "MetaData/sensorZenithAngle" - source: variables/sensorZenithAngle - dimensions: ["nlocs"] - longName: "Sensor zenith angle" - units: "degrees" - range: [0, 90] - - - name: "MetaData/sensorAzimuthAngle" - source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] - longName: "Sensor azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" - source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] - longName: "Brightness temperature" - units: "K" - range: [120, 500] - chunks: [1000, 15] diff --git a/test/testinput/bufr_hrs.yaml b/test/testinput/bufr_hrs.yaml index 1c99f8988..e63d3199e 100644 --- a/test/testinput/bufr_hrs.yaml +++ b/test/testinput/bufr_hrs.yaml @@ -25,7 +25,7 @@ observations: query: "*/CLAT" channel: query: "[*/BRITCSTC/CHNM, */BRIT/CHNM]" - radiance: + brightnessTemp: query: "[*/BRITCSTC/TMBR, */BRIT/TMBR]" ioda: @@ -37,29 +37,29 @@ observations: paths: - "*/BRIT" - "*/BRITCSTC" -# source: variables/channel + source: variables/channel variables: - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" range: [-90, 90] - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" range: [-180, 180] - - name: "radiance@ObsValue" + - name: "ObsValue/brightnessTemperature" coordinates: "longitude latitude Channel" - source: variables/radiance - longName: "Radiance" + source: variables/brightnessTemp + longName: "Brightness temperature" units: "K" range: [120, 500] diff --git a/test/testinput/bufr_iasi.yaml b/test/testinput/bufr_iasi.yaml new file mode 100644 index 000000000..2bc2beed8 --- /dev/null +++ b/test/testinput/bufr_iasi.yaml @@ -0,0 +1,176 @@ +# (C) Copyright 2022 UCAR +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. + +observations: + - obs space: + name: bufr + obsdatain: "./testinput/gdas.t00z.mtiasi.tm00.bufr_d" + mnemonicSets: + - mnemonics: [SAID, YEAR, MNTH, DAYS, HOUR, MINU, SECO] + - mnemonics: [CLATH, CLONH, SAZA, BEARAZ, SOZA, SOLAZI, FOVN, SLNM] + - mnemonics: [STCH, ENCH, CHSF] + channels: 1-10 + - mnemonics: [CHNM, SCRA] + channels: 1-616 + + exports: + variables: + timestamp: + datetime: + year: YEAR + month: MNTH + day: DAYS + hour: HOUR + minute: MINU + second: SECO + latitude: + mnemonic: CLATH + longitude: + mnemonic: CLONH + satelliteIdentifier: + mnemonic: SAID + scanLineNumber: + mnemonic: SLNM + fieldOfViewNumber: + mnemonic: FOVN + solarZenithAngle: + mnemonic: SOZA + solarAzimuthAngle: + mnemonic: SOLAZI + sensorZenithAngle: + mnemonic: SAZA + sensorAzimuthAngle: + mnemonic: BEARAZ + sensorChannelStart: + mnemonic: STCH + sensorChannelEnd: + mnemonic: ENCH + channelScaleFactor: + mnemonic: CHSF + sensorChannelNumber: + mnemonic: CHNM + radiance: # This is actually a scaled radiance, needs a fix + mnemonic: SCRA + + ioda: + backend: netcdf + obsdataout: "./testrun/gdas.t00z.mtiasi.tm00.nc" + + dimensions: + - name: "Location" + size: variables/latitude.nrows + - name: "Channel" + size: variables/radiance.ncols + # - name: "ChannelBlock" + # size: variables/sensorChannelStart.ncols + + globals: + - name: "platformCommonName" + type: string + value: "IASI" + + - name: "platformLongDescription" + type: string + value: "MTYP 021-241 IASI 1C RADIANCES (VARIABLE CHNS) (METOP)" + + variables: + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" + units: "" + + - name: "MetaData/latitude" + source: variables/latitude + dimensions: ["Location"] + longName: "Latitude" + units: "degrees_north" + range: [-90, 90] + + - name: "MetaData/longitude" + source: variables/longitude + dimensions: ["Location"] + longName: "Longitude" + units: "degrees_east" + range: [-180, 180] + + - name: "MetaData/dateTime" + source: variables/timestamp + dimensions: ["Location"] + longName: "dateTime" + units: "seconds since 1970-01-01T00:00:00Z" + + - name: "MetaData/fieldOfViewNumber" + source: variables/fieldOfViewNumber + dimensions: ["Location"] + longName: "Field of view number" + units: "" + + - name: "MetaData/scanLineNumber" + source: variables/scanLineNumber + dimensions: ["Location"] + longName: "Scan line number" + units: "" + + - name: "MetaData/solarZenithAngle" + source: variables/solarZenithAngle + dimensions: ["Location"] + longName: "Solar zenith angle" + units: "degrees" + range: [0, 180] + + - name: "MetaData/solarAzimuthAngle" + source: variables/solarAzimuthAngle + dimensions: ["Location"] + longName: "Solar azimuth angle" + units: "degrees" + range: [0, 360] + + - name: "MetaData/sensorZenithAngle" + source: variables/sensorZenithAngle + dimensions: ["Location"] + longName: "Sensor zenith angle" + units: "degrees" + range: [0, 90] + + - name: "MetaData/sensorAzimuthAngle" + source: variables/sensorAzimuthAngle + dimensions: ["Location"] + longName: "Sensor azimuth angle" + units: "degrees" + range: [0, 360] + +# - name: "MetaData/sensorChannelStart" +# source: variables/sensorChannelStart +# dimensions: ["ChannelBlock"] +# longName: "Starting channel number" +# units: "" + +# - name: "MetaData/sensorChannelEnd" +# source: variables/sensorChannelEnd +# dimensions: ["ChannelBlock"] +# longName: "Ending channel number" +# units: "" + +# - name: "MetaData/channelScaleFactor" +# source: variables/channelScaleFactor +# dimensions: ["ChannelBlock"] +# longName: "Channel scale factor" +# units: "" + + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" + + - name: "ObsValue/radiance" + source: variables/radiance + coordinates: "longitude latitude Channel" + dimensions: ["Location", "Channel"] + longName: "Scaled radiance" + units: "W m-2 sr-1" + chunks: [1000, 616] + compressionLevel: 4 diff --git a/test/testinput/bufr_mhs.yaml b/test/testinput/bufr_mhs.yaml index 279526979..04e91c0fc 100644 --- a/test/testinput/bufr_mhs.yaml +++ b/test/testinput/bufr_mhs.yaml @@ -20,8 +20,10 @@ observations: second: "*/SECO" height: query: "*/HMSL" + type: float hols: query: "*/HOLS" + type: float fovn: query: "*/FOVN" lsql: @@ -42,7 +44,7 @@ observations: query: "*/BEARAZ" channels: query: "[*/BRITCSTC/CHNM, */BRIT/CHNM]" - radiance: + brightnessTemp: query: "[*/BRITCSTC/TMBR, */BRIT/TMBR]" ioda: @@ -57,82 +59,85 @@ observations: source: variables/channels globals: - - name: "sensorCentralFrequency@MetaData" - type: floatVector - value: [89.0, 157.0, 183.311, 183.311, 190.311] - - name: "platformCommonName@MetaData" + - name: "platformCommonName" type: string value: "MHS" - - variables: - - - name: "stationElevation@MetaData" - source: variables/height - longName: "Station Elevation" - units: "meters" - - - name: "heightOfSurface@MetaData" - source: variables/hols - longName: "Height of Land Surface" - units: "meters" - - name: "fieldOfViewNumber@MetaData" - source: variables/fovn - longName: "Field of View Number" - units: "none" + - name: "platformLongDescription" + type: string + value: "MTYP 021-027 PROCESSED MHS Tb (NOAA-18-19, METOP-1,2)" - - name: "landSeaQualifier@MetaData" - source: variables/lsql - longName: "Land/Sea Qualifier" - units: "none" +# - name: "sensorCentralFrequency" +# type: floatVector +# value: [89.0, 157.0, 183.311, 183.311, 190.311] + + variables: - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" range: [-90, 90] - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" range: [-180, 180] - - name: "solarZenithAngle@MetaData" + - name: "MetaData/height" + source: variables/height + longName: "height" + units: "m" + + - name: "MetaData/heightOfSurface" + source: variables/hols + longName: "Height of Land Surface" + units: "m" + + - name: "MetaData/fieldOfViewNumber" + source: variables/fovn + longName: "Field of View Number" + + - name: "MetaData/landSeaQualifier" + source: variables/lsql + longName: "Land/Sea Qualifier" + + - name: "MetaData/solarZenithAngle" source: variables/sza longName: "Solar Zenith Angle" units: "degrees" range: [0, 180] - - name: "solarAzimuthAngle@MetaData" + - name: "MetaData/solarAzimuthAngle" source: variables/saz longName: "Solar Azimuth Angle" units: "degrees" range: [-180, 180] - - name: "sensorZenithAngle@MetaData" + - name: "MetaData/sensorZenithAngle" source: variables/vza longName: "Sensor Zenith Angle" units: "degrees" range: [0, 180] - - name: "sensorAzimuthAngle@MetaData" + - name: "MetaData/sensorAzimuthAngle" source: variables/vaz longName: "Sensor Azimuth Angle" units: "degrees" range: [-180, 180] - - name: "brightnessTemperature@ObsValue" + - name: "ObsValue/brightnessTemperature" coordinates: "longitude latitude Channel" - source: variables/radiance + source: variables/brightnessTemp longName: "Brightness Temperature" units: "K" range: [120, 500] - chunks: [1000, 15] + chunks: [1000, 5] compressionLevel: 4 diff --git a/test/testinput/bufr_mhs_v2.yaml b/test/testinput/bufr_mhs_v2.yaml deleted file mode 100644 index 52bbdfe23..000000000 --- a/test/testinput/bufr_mhs_v2.yaml +++ /dev/null @@ -1,146 +0,0 @@ -# (C) Copyright 2021 UCAR -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -observations: - - obs space: - name: bufr - obsdatain: "./testinput/gdas.t18z.1bmhs.tm00.bufr_d" - mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLAT, CLON] - - mnemonics: [SAID, SIID, FOVN, SAZA, SOZA, BEARAZ, SOLAZI] - - mnemonics: [CHNM, TMBR] - channels : 1-5 - - exports: - variables: - timestamp: - datetime: - year: YEAR - month: MNTH - day: DAYS - hour: HOUR - minute: MINU - latitude: - mnemonic: CLAT - longitude: - mnemonic: CLON - satelliteId: - mnemonic: SAID - sensorID: - mnemonic: SIID - channelNumber: - mnemonic: CHNM - fieldOfViewNumber: - mnemonic: FOVN - solarZenithAngle: - mnemonic: SOZA - solarAzimuthAngle: - mnemonic: SOLAZI - sensorZenithAngle: - mnemonic: SAZA - sensorAzimuthAngle: - mnemonic: BEARAZ - brightnessTemperature: - mnemonic: TMBR - - ioda: - backend: netcdf - obsdataout: "./testrun/gdas.t18z.1bmhs.tm00.nc" - - dimensions: - - name: "nlocs" - size: variables/brightnessTemperature.nrows - - name: "nchans" - size: variables/brightnessTemperature.ncols - - globals: - - name: "MetaData/platformCommonName" - type: string - value: "MHS" - - - name: "MetaData/platformLongDescription" - type: string - value: "MTYP 021-027 PROCESSED MHS Tb (NOAA-18-19, METOP-1,2)" - - - name: "MetaData/sensorCentralFrequency" - type: floatVector - value: [89.0, 157.0, 183.311, 183.311, 190.311] - - variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" - units: "" - - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" - units: "" - - - name: "MetaData/latitude" - source: variables/latitude - dimensions: ["nlocs"] - longName: "Latitude" - units: "degrees_north" - range: [-90, 90] - - - name: "MetaData/longitude" - source: variables/longitude - dimensions: ["nlocs"] - longName: "Longitude" - units: "degrees_east" - range: [-180, 180] - - - name: "MetaData/dateTime" - source: variables/timestamp - dimensions: ["nlocs"] - longName: "dateTime" - units: "seconds since 1970-01-01T00:00:00Z" - - - name: "MetaData/fieldOfViewNumber" - source: variables/fieldOfViewNumber - dimensions: ["nlocs"] - longName: "Field of view number" - units: "" - - - name: "MetaData/solarZenithAngle" - source: variables/solarZenithAngle - dimensions: ["nlocs"] - longName: "Solar zenith angle" - units: "degrees" - range: [0, 180] - - - name: "MetaData/solarAzimuthAngle" - source: variables/solarAzimuthAngle - dimensions: ["nlocs"] - longName: "Solar azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "MetaData/sensorZenithAngle" - source: variables/sensorZenithAngle - dimensions: ["nlocs"] - longName: "Sensor zenith angle" - units: "degrees" - range: [0, 90] - - - name: "MetaData/sensorAzimuthAngle" - source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] - longName: "Sensor azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" - source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] - longName: "Brightness temperature" - units: "K" - range: [120, 500] - chunks: [1000, 15] - compressionLevel: 4 diff --git a/test/testinput/bufr_ncep_aircft_AMDAR103.yaml b/test/testinput/bufr_ncep_aircft_AMDAR103.yaml index e48040922..83bad12f1 100644 --- a/test/testinput/bufr_ncep_aircft_AMDAR103.yaml +++ b/test/testinput/bufr_ncep_aircft_AMDAR103.yaml @@ -5,7 +5,7 @@ # observations: - obs space: - name: bufr + name: bufr_amdar obsdatain: "testinput/gdas.t12z.aircft.tm00.bufr_d" @@ -88,15 +88,15 @@ observations: source: variables/aircraftFlightNumber longName: "Aircraft Flight Number" - - name: "MetaData/aircraftNavigationalSystem" - source: variables/aircraftNavigationalSystem - longName: "Aircraft Navigational System" + # - name: "MetaData/aircraftNavigationalSystem" + # source: variables/aircraftNavigationalSystem + # longName: "Aircraft Navigational System" - name: "MetaData/aircraftIdentifier" source: variables/aircraftIdentifier longName: "Aircraft Identifier (Station Identification)" - - name: "MetaData/aircraftAltitude" + - name: "MetaData/height" source: variables/aircraftAltitude longName: "Aircraft Altitude" units: "m" @@ -107,7 +107,7 @@ observations: longName: "Air Temperature" units: "K" - - name: "ObsValue/dewpointTemperature" + - name: "ObsValue/dewPointTemperature" source: variables/dewpointTemperature longName: "Dewpoint Temperature" units: "K" @@ -143,4 +143,3 @@ observations: - name: "QualityMarker/windDirection" source: variables/windQM longName: "Quality Indicator for Wind Direction" - diff --git a/test/testinput/bufr_ncep_aircft_noAMDAR103.yaml b/test/testinput/bufr_ncep_aircft_noAMDAR103.yaml index 07f7075d4..f55f3a8ef 100644 --- a/test/testinput/bufr_ncep_aircft_noAMDAR103.yaml +++ b/test/testinput/bufr_ncep_aircft_noAMDAR103.yaml @@ -5,7 +5,7 @@ # observations: - obs space: - name: bufr + name: bufr_aircft_no_amdar obsdatain: "testinput/gdas.t12z.aircft.tm00.bufr_d" @@ -128,13 +128,13 @@ observations: source: variables/aircraftFlightNumber longName: "Aircraft Flight Number" - - name: "MetaData/aircraftNavigationalSystem" - source: variables/aircraftNavigationalSystem - longName: "Aircraft Navigational System" + # - name: "MetaData/aircraftNavigationalSystem" + # source: variables/aircraftNavigationalSystem + # longName: "Aircraft Navigational System" - - name: "MetaData/commercialAircraftType" - source: variables/commercialAircraftType - longName: "Commercial Aircraft Type" + # - name: "MetaData/commercialAircraftType" + # source: variables/commercialAircraftType + # longName: "Commercial Aircraft Type" - name: "MetaData/aircraftFlightPhase" source: variables/aircraftFlightPhase @@ -148,31 +148,31 @@ observations: source: variables/dataProviderRestricted longName: "Data Provider Restricted" - - name: "MetaData/dataRestrictedExpiration" - source: variables/dataRestrictedExpiration - longName: "Restricted Data Expiration" - units: "Hour" + # - name: "MetaData/dataRestrictedExpiration" + # source: variables/dataRestrictedExpiration + # longName: "Restricted Data Expiration" + # units: "Hour" - - name: "MetaData/dataReceiptTimeHour" - source: variables/dataReceiptTimeHour - longName: "Receipt Time (Hour)" - units: "Hour" + # - name: "MetaData/dataReceiptTimeHour" + # source: variables/dataReceiptTimeHour + # longName: "Receipt Time (Hour)" + # units: "Hour" - - name: "MetaData/dataReceiptTimeMinute" - source: variables/dataReceiptTimeMinute - longName: "Data Receipt Time (Minute)" - units: "Minute" + # - name: "MetaData/dataReceiptTimeMinute" + # source: variables/dataReceiptTimeMinute + # longName: "Data Receipt Time (Minute)" + # units: "Minute" - - name: "MetaData/dataReceiptTimeSignificance" - source: variables/dataReceiptTimeSignificance - longName: "Data Receipt Time Significance" + # - name: "MetaData/dataReceiptTimeSignificance" + # source: variables/dataReceiptTimeSignificance + # longName: "Data Receipt Time Significance" - - name: "MetaData/aircraftAltitude" + - name: "MetaData/height" source: variables/aircraftAltitude longName: "Aircraft Altitude" units: "m" - - name: "MetaData/relativeHumidityPercentConfidence" + - name: "MetaData/humidityPercentConfidence" source: variables/percentConfidenceRH longName: "Percent Confidence of Relative Humidity Quality" units: "percent" @@ -236,4 +236,3 @@ observations: - name: "QualityMarker/windDirection" source: variables/windQM longName: "Quality Indicator for Wind Direction" - diff --git a/test/testinput/bufr_ncep_sevcsr.yaml b/test/testinput/bufr_ncep_sevcsr.yaml index 93cfaf048..d2bd27687 100644 --- a/test/testinput/bufr_ncep_sevcsr.yaml +++ b/test/testinput/bufr_ncep_sevcsr.yaml @@ -6,7 +6,7 @@ observations: - obs space: - name: bufr + name: bufr_sevcsr obsdatain: "./testinput/gdas.t00z.sevcsr.tm00.bufr_d" exports: @@ -104,7 +104,7 @@ observations: longName: "Brightness temperature" units: "K" range: [150, 350] - chunks: [1000, 15] + chunks: [1000, 12] - name: "ObsValue/brightnessTemperatureStandardDeviation" coordinates: "longitude latitude Channel" diff --git a/test/testinput/bufr_read_wmo_radiosonde.yaml b/test/testinput/bufr_read_wmo_radiosonde.yaml index 43da84ef2..4e2587b1b 100644 --- a/test/testinput/bufr_read_wmo_radiosonde.yaml +++ b/test/testinput/bufr_read_wmo_radiosonde.yaml @@ -5,13 +5,20 @@ observations: - obs space: - name: bufr + name: bufr_wmo_sonde obsdatain: "./testinput/bufr_read_wmo_radiosonde.bufr" isWmoFormat: true tablepath: "./testinput/bufr_tables" exports: variables: + timestamp: + datetime: + year: "*/YEAR" + month: "*/MNTH" + day: "*/DAYS" + hour: "*/HOUR" + minute: "*/MINU" latitude: query: "*/CLATH" longitude: @@ -30,41 +37,45 @@ observations: obsdataout: "./testrun/bufr_read_wmo_radiosonde.nc" dimensions: - - name: Channel + - name: Level path: "*/TDWPRAOB" - variables: - - name: "latitude@MetaData" + - name: "MetaData/dateTime" + source: variables/timestamp + longName: "dateTime" + units: "seconds since 1970-01-01T00:00:00Z" + + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" - - name: "air_temperature@ObsValue" - coordinates: "longitude latitude" + - name: "ObsValue/airTemperature" + coordinates: "longitude latitude Level" source: variables/air_temperature longName: "Air Temperature" units: "K" - - name: "dewpoint_temperature@ObsValue" - coordinates: "longitude latitude" + - name: "ObsValue/dewPointTemperature" + coordinates: "longitude latitude Level" source: variables/dewpoint_temperature longName: "Dewpoint Temperature" units: "K" - - name: "wind_direction@ObsValue" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" + coordinates: "longitude latitude Level" source: variables/wind_direction longName: "Wind Direction" - units: "degrees true" + units: "degree" - - name: "wind_speed@ObsValue" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" + coordinates: "longitude latitude Level" source: variables/wind_speed longName: "Wind Speed" - units: "m/s" + units: "m s-1" diff --git a/test/testinput/bufr_satwnd_new_format.yaml b/test/testinput/bufr_satwnd_new_format.yaml index 29e059fea..3caeb0459 100644 --- a/test/testinput/bufr_satwnd_new_format.yaml +++ b/test/testinput/bufr_satwnd_new_format.yaml @@ -38,8 +38,10 @@ observations: query: "*/EHAM" pressure: query: "*/PRLC[1]" + type: float wind_direction: query: "*/WDIR" + type: float wind_speed: query: "*/WSPD" tracking_correlation_of_vector: @@ -60,96 +62,88 @@ observations: backend: netcdf obsdataout: "./testrun/NC005031.nc" + dimensions: + - name: "Confidence" + path: "*/AMVQIC" + variables: - - name: "satellite@MetaData" + - name: "MetaData/satelliteIdentifier" source: variables/satellite longName: "Satellite ID" - units: "id" - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" - units: "degrees" + units: "degree_east" range: [-180, 180] - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" - units: "degrees" + units: "degree_north" range: [-90, 90] - - name: "satellite_wind_calculation_method@MetaData" + - name: "MetaData/windComputationMethod" source: variables/satellite_wind_calculation_method longName: "Satellite Wind Calculation Method" - units: "id" - - name: "satellite_zenith_angle@MetaData" + - name: "MetaData/satelliteZenithAngle" source: variables/satellite_zenith_angle longName: "Satellite Zenith Angle" - units: "degrees" + units: "degree" - - name: "satellite_channel_center_frequency@MetaData" + - name: "MetaData/sensorCentralFrequency" source: variables/satellite_channel_center_frequency longName: "Satellite Channel Center Frequency" - units: "hz" + units: "Hz" - - name: "originating_generating_center@MetaData" + - name: "MetaData/dataProviderOrigin" source: variables/originating_generating_center longName: "Originating/Generating Center" - units: "id" - - name: "satellite_wind_quality_mark@MetaData" + - name: "QualityMarker/qualityFlags" source: variables/satellite_wind_quality_mark longName: "Satellite Wind Quality Mark" - units: "id" - - name: "extended_height_assignment_method@MetaData" + - name: "MetaData/windHeightAssignMethod" source: variables/extended_height_assignment_method longName: "Height Assignment Method" - units: "id" - - name: "pressure@ObsValue" + - name: "MetaData/windTrackingCorrelation" + coordinates: "longitude latitude" + source: variables/tracking_correlation_of_vector + longName: "Tracking Correlation of Vector" + units: "1" + + - name: "MetaData/windGeneratingApplication" + source: variables/generating_application + longName: "Generating Application" + + - name: "MetaData/windPercentConfidence" + coordinates: "longitude latitude Confidence" + source: variables/percent_confidence + longName: "Percent Confidence" + units: "percent" + + - name: "MetaData/pressure" coordinates: "longitude latitude" source: variables/pressure longName: "Pressure" - units: "pascals" + units: "Pa" - - name: "wind_direction@ObsValue" + - name: "ObsValue/windDirection" coordinates: "longitude latitude" source: variables/wind_direction longName: "Wind Direction" - units: "degrees true" + units: "degree" - - name: "wind_speed@ObsValue" + - name: "ObsValue/windSpeed" coordinates: "longitude latitude" source: variables/wind_speed longName: "Wind Speed" - units: "meters per second" - - - name: "tracking_correlation_of_vector@ObsValue" - coordinates: "longitude latitude" - source: variables/tracking_correlation_of_vector - longName: "Tracking Correlation of Vector" - units: "numeric" - - - name: "coefficient_of_variation@ObsValue" - coordinates: "longitude latitude" - source: variables/coefficient_of_variation - longName: "Coefficient of Variation" - units: "numeric" - - - name: "generating_application@MetaData" - source: variables/generating_application - longName: "Generating Application" - units: "id" - - - name: "percent_confidence@ObsValue" - coordinates: "longitude latitude" - source: variables/percent_confidence - longName: "Percent Confidence" - units: "percent" + units: "m s-1" diff --git a/test/testinput/bufr_satwnd_old_format.yaml b/test/testinput/bufr_satwnd_old_format.yaml index abebb783d..8bc45f74b 100644 --- a/test/testinput/bufr_satwnd_old_format.yaml +++ b/test/testinput/bufr_satwnd_old_format.yaml @@ -38,8 +38,10 @@ observations: query: "*/MDPT/HAMD" pressure: query: "*/PRLC" + type: float wind_direction: query: "*/WDIR" + type: float wind_speed: query: "*/WSPD" generating_application: @@ -57,86 +59,83 @@ observations: backend: netcdf obsdataout: "./testrun/NC005066.nc" + dimensions: + - name: "Confidence" + path: "*/QCPRMS" variables: - - name: "satellite@MetaData" + - name: "MetaData/satelliteIdentifier" source: variables/satellite longName: "Satellite ID" - units: "id" - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" - units: "degrees" + units: "degree_east" range: [-180, 180] - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" - units: "degrees" + units: "degree_north" range: [-90, 90] - - name: "satellite_wind_calculation_method@MetaData" + - name: "MetaData/windComputationMethod" source: variables/satellite_wind_calculation_method longName: "Satellite Wind Calculation Method" - units: "id" - - name: "satellite_channel_center_frequency@MetaData" + - name: "MetaData/sensorCentralFrequency" source: variables/satellite_channel_center_frequency longName: "Satellite Channel Center Frequency" - units: "hz" + units: "Hz" - - name: "satellite_zenith_angle@MetaData" + - name: "MetaData/satelliteZenithAngle" source: variables/satellite_zenith_angle longName: "Satellite Zenith Angle" - units: "degrees" + units: "degree" - - name: "originating_generating_center@MetaData" + - name: "MetaData/dataProviderOrigin" source: variables/originating_generating_center longName: "Originating/Generating Center" - units: "id" - - name: "satellite_wind_quality_mark@MetaData" + - name: "QualityMarker/qualityFlags" source: variables/satellite_wind_quality_mark longName: "Satellite Wind Quality Mark" - units: "id" - - name: "height_assignment_method@MetaData" + - name: "MetaData/windHeightAssignMethod" source: variables/height_assignment_method longName: "Height Assignment Method" - units: "id" - - name: "generating_application@MetaData" + - name: "MetaData/windGeneratingApplication" + coordinates: "longitude latitude Confidence" source: variables/generating_application longName: "Generating Application" - units: "id" - - name: "pressure@ObsValue" + - name: "MetaData/pressure" coordinates: "longitude latitude" source: variables/pressure longName: "Pressure" - units: "pascals" + units: "Pa" - - name: "wind_direction@ObsValue" + - name: "ObsValue/windDirection" coordinates: "longitude latitude" source: variables/wind_direction longName: "Wind Direction" - units: "degrees true" + units: "degree" - - name: "wind_speed@ObsValue" + - name: "ObsValue/windSpeed" coordinates: "longitude latitude" source: variables/wind_speed longName: "Wind Speed" - units: "meters per second" + units: "m s-1" - - name: "percent_confidence@ObsValue" - coordinates: "longitude latitude" + - name: "MetaData/windPercentConfidence" + coordinates: "longitude latitude Confidence" source: variables/percent_confidence longName: "Percent Confidence" units: "percent" diff --git a/test/testinput/bufr_simple_groupby.yaml b/test/testinput/bufr_simple_groupby.yaml index b6bdb563c..3f9e4cc9d 100644 --- a/test/testinput/bufr_simple_groupby.yaml +++ b/test/testinput/bufr_simple_groupby.yaml @@ -34,34 +34,34 @@ observations: variables: - - name: "datetime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp - longName: "Datetime" - units: "datetime" + longName: "dateTime" + units: "seconds since 1970-01-01T00:00:00Z" - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" range: [-90, 90] - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" range: [-180, 180] - - name: "depth@MetaData" + - name: "MetaData/depthBelowWaterSurface" source: variables/depth - longName: "Depth below sea surface" + longName: "Depth below water surface" units: "m" - - name: "sea_water_temperature@ObsValue" + - name: "ObsValue/waterTemperature" source: variables/temp - longName: "Temperature at depth" - units: "deg K" + longName: "water temperature at depth" + units: "K" - - name: "sea_water_salinity@ObsValue" + - name: "ObsValue/salinity" source: variables/saln longName: "Salinity at depth" units: "PSU" diff --git a/test/testinput/bufr_splitting.yaml b/test/testinput/bufr_splitting.yaml index 4b6a45948..c87b0d5c7 100644 --- a/test/testinput/bufr_splitting.yaml +++ b/test/testinput/bufr_splitting.yaml @@ -46,18 +46,18 @@ observations: path: "*/BRITCSTC" variables: - - name: "dateTime@MetaData" + - name: "MetaData/dateTime" source: variables/timestamp longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - - name: "latitude@MetaData" + - name: "MetaData/latitude" source: variables/latitude longName: "Latitude" units: "degrees_north" range: [-90, 90] - - name: "longitude@MetaData" + - name: "MetaData/longitude" source: variables/longitude longName: "Longitude" units: "degrees_east" diff --git a/test/testinput/bufr_ssmis_v2.yaml b/test/testinput/bufr_ssmis.yaml similarity index 55% rename from test/testinput/bufr_ssmis_v2.yaml rename to test/testinput/bufr_ssmis.yaml index 67b5c0dbc..7e24e2c15 100644 --- a/test/testinput/bufr_ssmis_v2.yaml +++ b/test/testinput/bufr_ssmis.yaml @@ -8,8 +8,10 @@ observations: name: bufr obsdatain: "./testinput/gdas.t00z.ssmisu.tm00.bufr_d" mnemonicSets: - - mnemonics: [YEAR, MNTH, DAYS, HOUR, MINU, CLAT, CLON] - - mnemonics: [SAID, FOVN, BEARAZ, SFLG, RFLAG] + - mnemonics: [SAID, YEAR, MNTH, DAYS, HOUR, MINU, SECO] + - mnemonics: [SLNM, FOVN] + - mnemonics: [CLAT, CLON] + - mnemonics: [SFLG, RFLAG] - mnemonics: [CHNM, TMBR] channels: 1-24 @@ -22,22 +24,23 @@ observations: day: DAYS hour: HOUR minute: MINU + second: SECO + satelliteIdentifier: + mnemonic: SAID latitude: mnemonic: CLAT longitude: mnemonic: CLON - satelliteId: - mnemonic: SAID - channelNumber: - mnemonic: CHNM + scanLineNumber: + mnemonic: SLNM fieldOfViewNumber: mnemonic: FOVN - sensorAzimuthAngle: - mnemonic: BEARAZ - surfaceFlag: + earthSurfaceType: mnemonic: SFLG - rainFlag: + qualityFlags: mnemonic: RFLAG + sensorChannelNumber: + mnemonic: CHNM brightnessTemperature: mnemonic: TMBR @@ -46,92 +49,90 @@ observations: obsdataout: "./testrun/gdas.t00z.ssmisu.tm00.nc" dimensions: - - name: "nlocs" - size: variables/brightnessTemperature.nrows - - name: "nchans" + - name: "Location" + size: variables/latitude.nrows + - name: "Channel" size: variables/brightnessTemperature.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platformCommonName" type: string - value: "SSMIS" + value: "SSMIS-DMSP18" - - name: "MetaData/platformLongDescription" + - name: "platformLongDescription" type: string value: "MTYP 021-201 DMSP SSM/IS Tb (UNIFIED PRE-PROCESSOR)" - - name: "MetaData/sensorCentralFrequency" - type: floatVector - value: [19.35, 19.35, 22.235, 37.0, 37.0, 50.3, 52.8, 53.596, - 54.4, 55.5, 57.29, 59.4, 63.283248, 60.792668, 60.792668, 60.792668, - 60.792668, 60.792668, 91.665, 91.665, 150, 183.311, 183.311, 183.311] +# - name: "sensorCentralFrequency" +# type: floatVector +# dimensions: ["Channel"] +# value: [19.35, 19.35, 22.235, 37.0, 37.0, 50.3, 52.8, 53.596, +# 54.4, 55.5, 57.29, 59.4, 63.283248, 60.792668, 60.792668, 60.792668, +# 60.792668, 60.792668, 91.665, 91.665, 150, 183.311, 183.311, 183.311] variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] longName: "Satellite identification" units: "" - - name: "MetaData/channelNumber" - source: variables/channelNumber - dimensions: ["nchans"] - longName: "Channel number" - units: "" - - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees_north" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees_east" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" + - name: "MetaData/scanLineNumber" + source: variables/scanLineNumber + dimensions: ["Location"] + longName: "Scan line number" + units: "" + - name: "MetaData/fieldOfViewNumber" source: variables/fieldOfViewNumber - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Field of view number" units: "" - - name: "MetaData/sensorAzimuthAngle" - source: variables/sensorAzimuthAngle - dimensions: ["nlocs"] - longName: "Sensor azimuth angle" - units: "degrees" - range: [0, 360] - - - name: "MetaData/surfaceFlag" - coordinates: "longitude latitude" - source: variables/surfaceFlag - dimensions: ["nlocs"] + - name: "MetaData/earthSurfaceType" + source: variables/earthSurfaceType + dimensions: ["Location"] longName: "Suface flag" - units: "code" + units: "" - - name: "MetaData/rainFlag" - coordinates: "longitude latitude" - source: variables/rainFlag - dimensions: ["nlocs"] + - name: "MetaData/qualityFlags" + source: variables/qualityFlags + dimensions: ["Location"] longName: "Rain flag" - units: "code" + units: "" + + - name: "MetaData/sensorChannelNumber" + source: variables/sensorChannelNumber + dimensions: ["Channel"] + longName: "Channel number" + units: "" - name: "ObsValue/brightnessTemperature" - coordinates: "longitude latitude nchans" source: variables/brightnessTemperature - dimensions: ["nlocs", "nchans"] + coordinates: "longitude latitude Channel" + dimensions: ["Location", "Channel"] longName: "Brightness temperature" units: "K" range: [120, 500] - chunks: [1000, 15] + chunks: [1000, 24] compressionLevel: 4 diff --git a/test/testinput/bufr_wmo_amdar_multi.yaml b/test/testinput/bufr_wmo_amdar_multi.yaml index 75019aeb8..eff8cfc0a 100644 --- a/test/testinput/bufr_wmo_amdar_multi.yaml +++ b/test/testinput/bufr_wmo_amdar_multi.yaml @@ -26,6 +26,7 @@ observations: query: "*/CLONH" pressureAltitude: query: "[*/FLVLST, */AMDARNOL/FLVLST]" + type: float aircraftRegistrationNum: query: "*/ACRN" aircraftFlightNum: @@ -40,7 +41,7 @@ observations: query: "*/TASP" aircraftHeading: query: "*/ACTH" - aircraftRollAngle: + aircraftRollAngleQuality: query: "[*/ROLQ, */AMDARNOL/ROLQ]" temperatureAir: query: "[*/TMDB, */AMDARNOL/TMDB, */TMDBST]" @@ -55,6 +56,10 @@ observations: backend: netcdf obsdataout: "./testrun/bufr_wmo_amdar_multi.nc" + dimensions: + - name: AmdarSequence + path: "*/AMDARNOL" + variables: - name: "MetaData/dateTime" source: variables/timestamp @@ -72,23 +77,24 @@ observations: units: "degrees_east" - name: "MetaData/height" + coordinates: "longitude latitude AmdarSequence" source: variables/pressureAltitude longName: "Pressure altitude" units: "m" - - name: "MetaData/aircraftRegistrationNum" + - name: "MetaData/aircraftIdentifier" source: variables/aircraftRegistrationNum longName: "Aircraft registration number or other ID" - - name: "MetaData/aircraftFlightNum" + - name: "MetaData/aircraftFlightNumber" source: variables/aircraftFlightNum longName: "Aircraft flight number" - - name: "MetaData/aircraftTailNum" + - name: "MetaData/aircraftTailNumber" source: variables/aircraftTailNum longName: "Aircraft tail number" - - name: "MetaData/observationSequenceNum" + - name: "MetaData/sequenceNumber" source: variables/observationSequenceNum longName: "Observation sequence number" @@ -96,7 +102,7 @@ observations: source: variables/aircraftFlightPhase longName: "Aircraft flight phase (ascending/descending/level)" - - name: "MetaData/aircraftTrueAirspeed" + - name: "MetaData/aircraftVelocity" source: variables/aircraftTrueAirspeed longName: "Aircraft true airspeed" units: "m s-1" @@ -104,32 +110,33 @@ observations: - name: "MetaData/aircraftHeading" source: variables/aircraftHeading longName: "Aircraft heading" - units: "deg" + units: "degree" - - name: "MetaData/aircraftRollAngle" - source: variables/aircraftRollAngle - longName: "Aircraft roll angle quality" + # - name: "MetaData/aircraftRollAngleQuality" + # coordinates: "longitude latitude AmdarSequence" + # source: variables/aircraftRollAngleQuality + # longName: "Aircraft roll angle quality" - - name: "ObsValue/air_temperature" - coordinates: "longitude latitude" + - name: "ObsValue/airTemperature" + coordinates: "longitude latitude AmdarSequence" source: variables/temperatureAir longName: "Air Temperature" units: "K" - - name: "ObsValue/specific_humidity" + - name: "ObsValue/specificHumidity" coordinates: "longitude latitude" source: variables/waterVaporMixingRatio longName: "specific humidity" units: "kg kg-1" - - name: "ObsValue/wind_direction" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" + coordinates: "longitude latitude AmdarSequence" source: variables/windDirection longName: "Wind Direction" units: "degrees" - - name: "ObsValue/wind_speed" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" + coordinates: "longitude latitude AmdarSequence" source: variables/windSpeed longName: "Wind Speed" units: "m s-1" diff --git a/test/testinput/buoy_wmoBUFR2ioda.yaml b/test/testinput/buoy_wmoBUFR2ioda.yaml index 3fc9fc9e4..d7a9e7dc8 100644 --- a/test/testinput/buoy_wmoBUFR2ioda.yaml +++ b/test/testinput/buoy_wmoBUFR2ioda.yaml @@ -37,19 +37,19 @@ observations: mnemonic: CLONH stationElevation: mnemonic: HSAWS - temperatureAir: + airTemperature: mnemonic: TMDB - temperatureDewpoint: + dewpointTemperature: mnemonic: TMDP windDirection: mnemonic: WDIR windSpeed: mnemonic: WSPD - pressureStation: + stationPressure: mnemonic: PRES - meanSeaLevelPressure: + pressureReducedToMeanSeaLevel: mnemonic: PMSL - temperatureSeaSurface: + seaSurfaceTemperature: mnemonic: SST0 ioda: @@ -57,97 +57,97 @@ observations: obsdataout: "./testrun/buoy_wmo_multi.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - - name: "MetaData/station_id" + - name: "MetaData/stationIdentification" source: variables/stationIdentification - dimensions: [ "nlocs" ] - longName: "Station Identification" + dimensions: [ "Location" ] + longName: "Station Identifier" units: "none" - name: "MetaData/stationLongName" source: variables/stationLongName - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station long name" units: "none" - - name: "MetaData/station_elevation" + - name: "MetaData/stationElevation" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station Elevation" units: "m" - name: "MetaData/height" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Height" units: "m" - - name: "ObsValue/air_temperature" + - name: "ObsValue/airTemperature" + source: variables/airTemperature coordinates: "longitude latitude" - source: variables/temperatureAir - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Air Temperature" units: "K" - - name: "ObsValue/dewpoint_temperature" + - name: "ObsValue/dewpointTemperature" + source: variables/dewpointTemperature coordinates: "longitude latitude" - source: variables/temperatureDewpoint - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Dewpoint Temperature" units: "K" - - name: "ObsValue/wind_direction" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" source: variables/windDirection - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Direction" units: "degrees" - - name: "ObsValue/wind_speed" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" source: variables/windSpeed - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Speed" units: "m s-1" - - name: "ObsValue/surface_pressure" + - name: "ObsValue/stationPressure" + source: variables/stationPressure coordinates: "longitude latitude" - source: variables/pressureStation - dimensions: [ "nlocs" ] - longName: "Surface Pressure" + dimensions: [ "Location" ] + longName: "Surface station pressure" units: "Pa" - - name: "ObsValue/sea_level_pressure" + - name: "ObsValue/pressureReducedToMeanSeaLevel" + source: variables/pressureReducedToMeanSeaLevel coordinates: "longitude latitude" - source: variables/meanSeaLevelPressure - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Mean Sea Level Pressure" units: "Pa" - - name: "ObsValue/sea_surface_temperature" + - name: "ObsValue/seaSurfaceTemperature" + source: variables/seaSurfaceTemperature coordinates: "longitude latitude" - source: variables/temperatureSeaSurface - dimensions: [ "nlocs" ] - longName: "Sea Surface Temperature" + dimensions: [ "Location" ] + longName: "Sea-surface temperature" units: "K" diff --git a/test/testinput/gdas.t00z.1bamua.tm00.bufr_d b/test/testinput/gdas.t00z.1bamua.tm00.bufr_d new file mode 100644 index 000000000..2b53e63f3 --- /dev/null +++ b/test/testinput/gdas.t00z.1bamua.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abd06cad28d4bb5d18ee03ffd90a78bc6099ba543f76d6cdb99b0f8b90f6a5d8 +size 492555 diff --git a/test/testinput/gdas.t00z.1bhrs4.tm00.bufr_d b/test/testinput/gdas.t00z.1bhrs4.tm00.bufr_d index cdc7fe9d2..bbc30b310 100644 --- a/test/testinput/gdas.t00z.1bhrs4.tm00.bufr_d +++ b/test/testinput/gdas.t00z.1bhrs4.tm00.bufr_d @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6aa27e06ca5cca41952f6a39728001b138c2079d41bc2a1f59cad2a436417eb4 -size 501104 +oid sha256:dda9ac9eda2e1c5dffcce9b3774a1836e1d46181003e93984a20abe443b8b74c +size 271749 diff --git a/test/testinput/gdas.t00z.1bmhs.tm00.bufr_d b/test/testinput/gdas.t00z.1bmhs.tm00.bufr_d new file mode 100644 index 000000000..e91430c52 --- /dev/null +++ b/test/testinput/gdas.t00z.1bmhs.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c91f1d846f75c808a3944c6f6c2bb126f3632920dbdd0e167f3178f9e83f11c8 +size 941961 diff --git a/test/testinput/gdas.t00z.airsev.tm00.bufr_d b/test/testinput/gdas.t00z.airsev.tm00.bufr_d new file mode 100644 index 000000000..37b9ad514 --- /dev/null +++ b/test/testinput/gdas.t00z.airsev.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f929d61d06c3ec8612cb56a21c9ae391c9d79376d356bb52c49b67f2131a574 +size 380537 diff --git a/test/testinput/gdas.t00z.atms.tm00.bufr_d b/test/testinput/gdas.t00z.atms.tm00.bufr_d new file mode 100644 index 000000000..b0cd69743 --- /dev/null +++ b/test/testinput/gdas.t00z.atms.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:982c56d7173d765dde58797bfac37bbcfe28b8c2d22bdcf92ef4b7520607f679 +size 584798 diff --git a/test/testinput/gdas.t00z.avcsam.tm00.bufr_d b/test/testinput/gdas.t00z.avcsam.tm00.bufr_d new file mode 100644 index 000000000..445dc3989 --- /dev/null +++ b/test/testinput/gdas.t00z.avcsam.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:20f37fe4b9b0b7f0eca694c562696540f1b84e6851909651cbb466cf0aed41d5 +size 405237 diff --git a/test/testinput/gdas.t00z.crisf4.tm00.bufr_d b/test/testinput/gdas.t00z.crisf4.tm00.bufr_d new file mode 100644 index 000000000..024d7a5fa --- /dev/null +++ b/test/testinput/gdas.t00z.crisf4.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3bb2ebff85458875644569f2a503e0dcfa5da6fa344b8876ec409972c8e64af0 +size 792000 diff --git a/test/testinput/gdas.t00z.mtiasi.tm00.bufr_d b/test/testinput/gdas.t00z.mtiasi.tm00.bufr_d new file mode 100644 index 000000000..9fb5902dc --- /dev/null +++ b/test/testinput/gdas.t00z.mtiasi.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:521fd7f05f1fa15a95386b74cf856dba340180587d4d4d4f2ac43b095cfa242b +size 574611 diff --git a/test/testinput/gdas.t00z.ssmisu.tm00.bufr_d b/test/testinput/gdas.t00z.ssmisu.tm00.bufr_d new file mode 100644 index 000000000..c01801747 --- /dev/null +++ b/test/testinput/gdas.t00z.ssmisu.tm00.bufr_d @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd00bffeef1f3d3a4a5fb8275310a426e97fb77ba87adfdd007d29ba08696868 +size 604509 diff --git a/test/testinput/gdas.t12z.aircft.tm00.bufr_d b/test/testinput/gdas.t12z.aircft.tm00.bufr_d index e1a2a7e89..704f6fa51 100644 --- a/test/testinput/gdas.t12z.aircft.tm00.bufr_d +++ b/test/testinput/gdas.t12z.aircft.tm00.bufr_d @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f6d73827ee889642bb30366eade2a4dfbee134ff2f2c6d93bcd9d123071419bb -size 331688 +oid sha256:41b1ee8552c02f8015a32fe3eea1708fa3fd39c8bf02bd5cd4a7a42d50c2adf9 +size 123088 diff --git a/test/testinput/gmao-obs-20180415.nc b/test/testinput/gmao-obs-20180415.nc new file mode 100644 index 000000000..8045a2450 --- /dev/null +++ b/test/testinput/gmao-obs-20180415.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55bb4525755dcf3b76667b5e1ea276d4b67df3f790a343084d93df4971fd88e4 +size 192616 diff --git a/test/testinput/gnssro_wmoBUFR2ioda.yaml b/test/testinput/gnssro_wmoBUFR2ioda.yaml index 8cf315c04..0bfc57cfc 100644 --- a/test/testinput/gnssro_wmoBUFR2ioda.yaml +++ b/test/testinput/gnssro_wmoBUFR2ioda.yaml @@ -37,7 +37,7 @@ observations: mnemonic: HEIT geopotentialHeight: mnemonic: GPHTST - airPressure: + pressure: mnemonic: PRES airTemperature: mnemonic: TMDBST @@ -48,28 +48,26 @@ observations: mnemonic: SAID instrumentIdentifier: mnemonic: SIID - satelliteClassification: - mnemonic: SCLF platformTransmitterId: mnemonic: PTID - originatingCenter: + dataProviderOrigin: mnemonic: OGCE qualityFlags: mnemonic: QFRO percentConfidence: mnemonic: PCCF - impactParameter: + impactParameterRO: mnemonic: IMPP geoidUndulation: mnemonic: GEODU - azimuth: + sensorAzimuthAngle: mnemonic: BEARAZ earthRadiusCurvature: mnemonic: ELRC bendingAngle: mnemonic: BNDA - refractivity: + atmosphericRefractivity: mnemonic: ARFR ioda: @@ -77,141 +75,124 @@ observations: obsdataout: "./testrun/gnssro_2020-306-2358C2E6.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - name: "MetaData/height" source: variables/height - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Height" units: "m" - name: "MetaData/geopotentialHeight" source: variables/geopotentialHeight - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Geopotential height" units: "m" - - name: "MetaData/airPressure" - coordinates: "longitude latitude" - source: variables/airPressure - dimensions: [ "nlocs" ] + - name: "MetaData/pressure" + source: variables/pressure + dimensions: [ "Location" ] longName: "Air pressure" units: "Pa" - - name: "MetaData/process_center - coordinates: "longitude latitude" - source: variables/originatingCenter - dimensions: [ "nlocs" ] + - name: "MetaData/dataProviderOrigin" + source: variables/dataProviderOrigin + dimensions: [ "Location" ] longName: "Originating center" - units: "unitless" + units: "" - - name: "MetaData/gnss_sat_class - coordinates: "longitude latitude" - source: variables/satelliteClassification - dimensions: [ "nlocs" ] - longName: "GNSS satellite classification" - units: "unitless" - - - name: "MetaData/reference_sat_id - coordinates: "longitude latitude" + - name: "MetaData/satelliteIdentifier" source: variables/satelliteIdentifier - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "GNSS satellite ID" - units: "unitless" + units: "" - - name: "MetaData/occulting_sat_is - coordinates: "longitude latitude" + - name: "MetaData/instrumentIdentifier" source: variables/instrumentIdentifier - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Instrument ID" - units: "unitless" + units: "" - - name: "MetaData/occulting_sat_id - coordinates: "longitude latitude" + - name: "MetaData/platformTransmitterId" source: variables/platformTransmitterId - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "GNSS platform transmitter ID" - units: "unitless" + units: "" - - name: "MetaData/qualityFlags - coordinates: "longitude latitude" + - name: "QualityMarker/qualityFlags" source: variables/qualityFlags - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Quality flags" - units: "unitless" + units: "" - - name: "MetaData/earth_radius_of_curvature" - coordinates: "longitude latitude" + - name: "MetaData/earthRadiusCurvature" source: variables/earthRadiusCurvature - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Earth local radius of curvature" units: "m" - - name: "MetaData/geoid_height_above_reference_ellipsoid - coordinates: "longitude latitude" + - name: "MetaData/geoidUndulation" source: variables/geoidUndulation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Geoid undulation" units: "m" - - name: "MetaData/impact_parameter - coordinates: "longitude latitude" - source: variables/impactParameter - dimensions: [ "nlocs" ] + - name: "MetaData/impactParameterRO" + source: variables/impactParameterRO + dimensions: [ "Location" ] longName: "Impact parameter" - units: "m" + units: "" - - name: "MetaData/sensor_azimuth_angle - coordinates: "longitude latitude" - source: variables/azimuth - dimensions: [ "nlocs" ] + - name: "MetaData/sensorAzimuthAngle" + source: variables/sensorAzimuthAngle + dimensions: [ "Location" ] longName: "Sensor azimuth angle" units: "degrees" - name: "ObsValue/airTemperature" - coordinates: "longitude latitude" source: variables/airTemperature - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Air temperature" units: "K" - name: "ObsValue/specificHumidity" - coordinates: "longitude latitude" source: variables/specificHumidity - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Specific humidity" units: "kg kg-1" - - name: "ObsValue/bending_angle" - coordinates: "longitude latitude" + - name: "ObsValue/bendingAngle" source: variables/bendingAngle - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Bending angle" units: "radians" - - name: "ObsValue/refractivity" + - name: "ObsValue/atmosphericRefractivity" + source: variables/atmosphericRefractivity coordinates: "longitude latitude" - source: variables/refractivity - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Refractivity" - units: "N units" + units: "INVALID N units" diff --git a/test/testinput/imssnow_24km.grib2 b/test/testinput/imssnow_24km.grib2 deleted file mode 100644 index a15cd5705..000000000 --- a/test/testinput/imssnow_24km.grib2 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:98e353147e3375927fcdfe9e976e9d19c4a2b3881990fd7c23ae4f25eb443462 -size 7241 diff --git a/test/testinput/rass_wmoBUFR2ioda.yaml b/test/testinput/rass_wmoBUFR2ioda.yaml index e8b793fc2..d9b190a65 100644 --- a/test/testinput/rass_wmoBUFR2ioda.yaml +++ b/test/testinput/rass_wmoBUFR2ioda.yaml @@ -26,9 +26,9 @@ observations: day: DAYS hour: HOUR minute: MINU - stationIdWMOblock: + wmoBlockNumber: mnemonic: WMOB - stationIdWMOstation: + wmoStationNumber: mnemonic: WMOS latitude: mnemonic: CLAT @@ -50,68 +50,69 @@ observations: obsdataout: "./testrun/rass_wmo_multi.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - - name: "MetaData/stationIdWMOblock" - source: variables/stationIdWMOblock - dimensions: [ "nlocs" ] + - name: "MetaData/wmoBlockNumber" + source: variables/wmoBlockNumber + dimensions: [ "Location" ] longName: "Station Identification WMO block number" - units: "none" + units: "" - - name: "MetaData/stationIdWMOstation" - source: variables/stationIdWMOstation - dimensions: [ "nlocs" ] + - name: "MetaData/wmoStationNumber" + source: variables/wmoStationNumber + dimensions: [ "Location" ] longName: "Station Identification WMO station number" - units: "none" + units: "" - - name: "MetaData/station_elevation" + - name: "MetaData/stationElevation" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station Elevation" units: "m" - name: "MetaData/height" source: variables/height - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Height" units: "m" # ADD this in later # - name: "MetaData/qualityIndicator" # source: variables/qualityIndicator - # dimensions: [ "nlocs" ] + # dimensions: [ "Location" ] # longName: "Quality Indicator (0=good, 1=no-good)" # units: "unitless" - - name: "MetaData/signalToNoiseRatio" + - name: "ObsValue/signalToNoiseRatio" source: variables/signalToNoiseRatio - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Signal-to-noise Ratio" - units: "unitless" + units: "0.1 lg(re 0.001 m2 kg s-3)" - - name: "ObsValue/virtual_temperature" - coordinates: "longitude latitude" + - name: "ObsValue/virtualTemperature" source: variables/virtualTemperature - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Virtual Temperature" units: "K" diff --git a/test/testinput/satwind_EUMet_wmoBUFR2ioda.yaml b/test/testinput/satwind_EUMet_wmoBUFR2ioda.yaml index 2bf8b503e..7c539a97a 100644 --- a/test/testinput/satwind_EUMet_wmoBUFR2ioda.yaml +++ b/test/testinput/satwind_EUMet_wmoBUFR2ioda.yaml @@ -38,7 +38,7 @@ observations: mnemonic: CLATH longitude: mnemonic: CLONH - satelliteId: + satelliteIdentifier: mnemonic: SAID generatingApplication: mnemonic: GNAPS @@ -54,7 +54,7 @@ observations: mnemonic: AMVQ sensorZenithAngle: mnemonic: SAZA - pressureAir: + pressure: mnemonic: PRLC windEastward: mnemonic: UWND @@ -68,103 +68,97 @@ observations: obsdataout: "./testrun/satwind_EUMet.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows - - name: "nconfidences" + - name: "Confidence" size: variables/windPercentConfidence.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platform" type: string value: "EUMetSat_AMV" - - name: "MetaData/platformLongDescription" - type: string - value: "EUMetSat AMV from IR cloudy regions" - variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] - longName: "Satellite identification" + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] + longName: "Satellite identifier" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/generatingApplication" source: variables/generatingApplication - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Generating application" - units: "unitless" + units: "" - name: "MetaData/windComputationMethod" source: variables/windComputationMethod - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite wind calculation method" - units: "unitless" + units: "" - name: "MetaData/windHeightAssignMethod" source: variables/windHeightAssignMethod - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite wind height assignment method" - units: "unitless" + units: "" - name: "MetaData/sensorZenithAngle" - coordinates: "longitude latitude" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite zenith angle" units: "degrees" - - name: "MetaData/pressureAir" - coordinates: "longitude latitude" - source: variables/pressureAir - dimensions: ["nlocs"] + - name: "MetaData/pressure" + source: variables/pressure + dimensions: ["Location"] longName: "Pressure" units: "Pa" - name: "ObsValue/windEastward" - coordinates: "longitude latitude" source: variables/windEastward - dimensions: ["nlocs"] + coordinates: "longitude latitude" + dimensions: ["Location"] longName: "Wind eastward component" units: "m s-1" - name: "ObsValue/windNorthward" - coordinates: "longitude latitude" source: variables/windNorthward - dimensions: ["nlocs"] + coordinates: "longitude latitude" + dimensions: ["Location"] longName: "Wind northward component" units: "m s-1" - name: "MetaData/windPercentConfidence" - coordinates: "longitude latitude nconfidences" source: variables/windPercentConfidence - dimensions: ["nlocs", "nconfidences"] + coordinates: "longitude latitude Confidence" + dimensions: ["Location", "Confidence"] longName: "Percent confidence" units: "percent" - - name: "MetaData/sensorCentralFrequency" - coordinates: "longitude latitude nconfidences" - source: variables/sensorCentralFrequency - dimensions: ["nlocs"] - longName: "Sensor Central Frequency" - units: "hz" +# - name: "MetaData/sensorCentralFrequency" +# source: variables/sensorCentralFrequency +# coordinates: "longitude latitude Confidence" +# dimensions: ["Location", "Confidence"] +# longName: "Sensor Central Frequency" +# units: "Hz" diff --git a/test/testinput/satwind_Himawari_wmoBUFR2ioda.yaml b/test/testinput/satwind_Himawari_wmoBUFR2ioda.yaml index 1059bc7e5..f4f041c43 100644 --- a/test/testinput/satwind_Himawari_wmoBUFR2ioda.yaml +++ b/test/testinput/satwind_Himawari_wmoBUFR2ioda.yaml @@ -40,7 +40,7 @@ observations: mnemonic: CLATH longitude: mnemonic: CLONH - satelliteId: + satelliteIdentifier: mnemonic: SAID dataProviderOrigin: mnemonic: GCLONG @@ -56,7 +56,7 @@ observations: mnemonic: TCMD sensorZenithAngle: mnemonic: SAZA - pressureAir: + pressure: mnemonic: PRLC windDirection: mnemonic: WDIR @@ -70,109 +70,103 @@ observations: obsdataout: "./testrun/satwind_Himawari.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows - - name: "nconfidences" + - name: "Confidence" size: variables/windPercentConfidence.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platform" type: string value: "Himiwari_AMV" - - name: "MetaData/platformLongDescription" - type: string - value: "Himiwari AMV from IR cloudy regions" - variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] longName: "Satellite identification" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/dataProviderOrigin" source: variables/dataProviderOrigin - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Data provider origin" units: "" - name: "MetaData/generatingApplication" source: variables/generatingApplication - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Generating application" units: "" - name: "MetaData/windComputationMethod" source: variables/windComputationMethod - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite wind calculation method" - units: " " + units: "" - name: "MetaData/windHeightAssignMethod" source: variables/windHeightAssignMethod - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite wind height assignment method" - units: " " + units: "" - name: "MetaData/sensorZenithAngle" - coordinates: "longitude latitude" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite zenith angle" units: "degrees" - - name: "MetaData/pressureAir" - coordinates: "longitude latitude" - source: variables/pressureAir - dimensions: ["nlocs"] + - name: "MetaData/pressure" + source: variables/pressure + dimensions: ["Location"] longName: "Pressure" units: "Pa" - name: "ObsValue/windDirection" - coordinates: "longitude latitude" source: variables/windDirection - dimensions: ["nlocs"] + coordinates: "longitude latitude" + dimensions: ["Location"] longName: "Wind direction" - units: "degrees true" + units: "degrees" - name: "ObsValue/windSpeed" - coordinates: "longitude latitude" source: variables/windSpeed - dimensions: ["nlocs"] + coordinates: "longitude latitude" + dimensions: ["Location"] longName: "Wind Speed" units: "m s-1" - name: "MetaData/windPercentConfidence" - coordinates: "longitude latitude nconfidences" source: variables/windPercentConfidence - dimensions: ["nlocs", "nconfidences"] + coordinates: "longitude latitude Confidence" + dimensions: ["Location", "Confidence"] longName: "Percent confidence" units: "percent" - - name: "MetaData/sensorCentralFrequency" - coordinates: "longitude latitude nconfidences" - source: variables/sensorCentralFrequency - dimensions: ["nlocs"] - longName: "Sensor Central Frequency" - units: "hz" +# - name: "MetaData/sensorCentralFrequency" +# source: variables/sensorCentralFrequency +# coordinates: "longitude latitude nconfidences" +# dimensions: ["Location"] +# longName: "Sensor Central Frequency" +# units: "Hz" diff --git a/test/testinput/satwind_Insat_wmoBUFR2ioda.yaml b/test/testinput/satwind_Insat_wmoBUFR2ioda.yaml index 078970ad5..c35f816ba 100644 --- a/test/testinput/satwind_Insat_wmoBUFR2ioda.yaml +++ b/test/testinput/satwind_Insat_wmoBUFR2ioda.yaml @@ -39,7 +39,7 @@ observations: mnemonic: CLATH longitude: mnemonic: CLONH - satelliteId: + satelliteIdentifier: mnemonic: SAID dataProviderOrigin: mnemonic: GCLONG @@ -55,7 +55,7 @@ observations: mnemonic: TCMD sensorZenithAngle: mnemonic: SAZA - pressureAir: + pressure: mnemonic: PRLC windDirection: mnemonic: WDIR @@ -69,109 +69,103 @@ observations: obsdataout: "./testrun/satwind_Insat.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows - - name: "nconfidences" + - name: "Confidence" size: variables/windPercentConfidence.ncols globals: - - name: "MetaData/platformCommonName" + - name: "platform" type: string value: "Insat_AMV" - - name: "MetaData/platformLongDescription" - type: string - value: "Insat (Indian) AMV from IR cloudy regions" - variables: - - name: "MetaData/satelliteId" - source: variables/satelliteId - dimensions: ["nlocs"] + - name: "MetaData/satelliteIdentifier" + source: variables/satelliteIdentifier + dimensions: ["Location"] longName: "Satellite identification" units: "" - name: "MetaData/latitude" source: variables/latitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Latitude" units: "degrees" range: [-90, 90] - name: "MetaData/longitude" source: variables/longitude - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Longitude" units: "degrees" range: [-180, 180] - name: "MetaData/dateTime" source: variables/timestamp - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/dataProviderOrigin" source: variables/dataProviderOrigin - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Data provider origin" units: "" - name: "MetaData/generatingApplication" source: variables/generatingApplication - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Generating application" units: "" - name: "MetaData/windComputationMethod" source: variables/windComputationMethod - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite wind calculation method" - units: " " + units: "" - name: "MetaData/windHeightAssignMethod" source: variables/windHeightAssignMethod - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite wind height assignment method" - units: " " + units: "" - name: "MetaData/sensorZenithAngle" - coordinates: "longitude latitude" source: variables/sensorZenithAngle - dimensions: ["nlocs"] + dimensions: ["Location"] longName: "Satellite zenith angle" units: "degrees" - - name: "MetaData/air_pressure" - coordinates: "longitude latitude" - source: variables/pressureAir - dimensions: ["nlocs"] + - name: "MetaData/pressure" + source: variables/pressure + dimensions: ["Location"] longName: "Pressure" units: "Pa" + - name: "MetaData/windPercentConfidence" + source: variables/windPercentConfidence + coordinates: "longitude latitude Confidence" + dimensions: ["Location", "Confidence"] + longName: "Percent confidence" + units: "percent" + +# - name: "MetaData/sensorCentralFrequency" +# source: variables/sensorCentralFrequency +# coordinates: "longitude latitude Confidence" +# dimensions: ["Location"] +# longName: "Sensor Central Frequency" +# units: "Hz" + - name: "ObsValue/windDirection" - coordinates: "longitude latitude" source: variables/windDirection - dimensions: ["nlocs"] + coordinates: "longitude latitude" + dimensions: ["Location"] longName: "Wind direction" - units: "degrees true" + units: "degrees" - name: "ObsValue/windSpeed" - coordinates: "longitude latitude" source: variables/windSpeed - dimensions: ["nlocs"] + coordinates: "longitude latitude" + dimensions: ["Location"] longName: "Wind Speed" units: "m s-1" - - - name: "MetaData/windPercentConfidence" - coordinates: "longitude latitude nconfidences" - source: variables/windPercentConfidence - dimensions: ["nlocs", "nconfidences"] - longName: "Percent confidence" - units: "percent" - - - name: "MetaData/sensorCentralFrequency" - coordinates: "longitude latitude nconfidences" - source: variables/sensorCentralFrequency - dimensions: ["nlocs"] - longName: "Sensor Central Frequency" - units: "hz" diff --git a/test/testinput/ship_wmoBUFR2ioda.yaml b/test/testinput/ship_wmoBUFR2ioda.yaml index 6e4522076..12f079466 100644 --- a/test/testinput/ship_wmoBUFR2ioda.yaml +++ b/test/testinput/ship_wmoBUFR2ioda.yaml @@ -40,21 +40,21 @@ observations: mnemonic: CLON stationElevation: mnemonic: HBMSL - temperatureAir: + airTemperature: mnemonic: TMDB - temperatureDewpoint: + dewpointTemperature: mnemonic: TMDP - temperatureWetbulb: + wetBulbTemperature: mnemonic: TMWB windDirection: mnemonic: WDIR windSpeed: mnemonic: WSPD - pressureStation: + stationPressure: mnemonic: PRES - pressureMeanSeaLevel: + pressureReducedToMeanSeaLevel: mnemonic: PMSL - temperatureSeaSurface: + seaSurfaceTemperature: mnemonic: SST1 ioda: @@ -62,97 +62,104 @@ observations: obsdataout: "./testrun/ship_wmo_multi.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - name: "MetaData/stationIdentification" source: variables/stationIdentification - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station Identification" units: "none" - name: "MetaData/stationType" source: variables/stationType - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station type" units: "none" - - name: "MetaData/station_elevation" + - name: "MetaData/stationElevation" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station Elevation" units: "m" - name: "MetaData/height" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Height" units: "m" - - name: "ObsValue/air_temperature" + - name: "ObsValue/airTemperature" + source: variables/airTemperature coordinates: "longitude latitude" - source: variables/temperatureAir - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Air Temperature" units: "K" - - name: "ObsValue/dewpoint_temperature" + - name: "ObsValue/dewpointTemperature" + source: variables/dewpointTemperature coordinates: "longitude latitude" - source: variables/temperatureDewpoint - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Dewpoint Temperature" units: "K" - - name: "ObsValue/wetbulb_temperature" + - name: "ObsValue/wetBulbTemperature" + source: variables/wetBulbTemperature coordinates: "longitude latitude" - source: variables/temperatureWetbulb - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Wetbulb Temperature" units: "K" - - name: "ObsValue/wind_direction" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" source: variables/windDirection - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Direction" units: "degrees" - - name: "ObsValue/wind_speed" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" source: variables/windSpeed - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Speed" units: "m s-1" - - name: "ObsValue/surface_pressure" + - name: "ObsValue/pressureReducedToMeanSeaLevel" + source: variables/pressureReducedToMeanSeaLevel + coordinates: "longitude latitude" + dimensions: [ "Location" ] + longName: "Mean sea-level pressure" + units: "Pa" + + - name: "ObsValue/stationPressure" + source: variables/stationPressure coordinates: "longitude latitude" - source: variables/pressureStation - dimensions: [ "nlocs" ] - longName: "Surface Pressure" + dimensions: [ "Location" ] + longName: "Surface station Pressure" units: "Pa" - - name: "ObsValue/sea_surface_temperature" + - name: "ObsValue/seaSurfaceTemperature" + source: variables/seaSurfaceTemperature coordinates: "longitude latitude" - source: variables/temperatureSeaSurface - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Sea Surface Temperature" units: "K" diff --git a/test/testinput/sonde_wmo_multi.bufr b/test/testinput/sonde_wmo_multi.bufr new file mode 100644 index 000000000..f619a0dc6 --- /dev/null +++ b/test/testinput/sonde_wmo_multi.bufr @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:988697588688cd7074c535b7e0ff399a230800f5af753ea79a6bed2a41cffbeb +size 147187 diff --git a/test/testinput/synop_wmoBUFR2ioda.yaml b/test/testinput/synop_wmoBUFR2ioda.yaml index fd1747eb5..3997e9f93 100644 --- a/test/testinput/synop_wmoBUFR2ioda.yaml +++ b/test/testinput/synop_wmoBUFR2ioda.yaml @@ -25,9 +25,9 @@ observations: day: DAYS hour: HOUR minute: MINU - stationIdWMOblock: + wmoBlockNumber: mnemonic: WMOB - stationIdWMOstation: + wmoStationNumber: mnemonic: WMOS stationLongName: mnemonic: STSN @@ -37,15 +37,15 @@ observations: mnemonic: CLONH stationElevation: mnemonic: HSMSL - temperatureAir: + airTemperature: mnemonic: TMDB - temperatureDewpoint: + dewpointTemperature: mnemonic: TMDP windDirection: mnemonic: WDIR windSpeed: mnemonic: WSPD - pressureStation: + stationPressure: mnemonic: PRES ioda: @@ -53,83 +53,83 @@ observations: obsdataout: "./testrun/synop_wmo_multi.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - - name: "MetaData/stationIdWMOblock" - source: variables/stationIdWMOblock - dimensions: [ "nlocs" ] + - name: "MetaData/wmoBlockNumber" + source: variables/wmoBlockNumber + dimensions: [ "Location" ] longName: "Station Identification WMO block number" units: "none" - - name: "MetaData/stationIdWMOstation" - source: variables/stationIdWMOstation - dimensions: [ "nlocs" ] + - name: "MetaData/wmoStationNumber" + source: variables/wmoStationNumber + dimensions: [ "Location" ] longName: "Station Identification WMO station number" units: "none" - - name: "MetaData/station_elevation" + - name: "MetaData/stationElevation" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station Elevation" units: "m" - name: "MetaData/height" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Height" units: "m" - - name: "ObsValue/air_temperature" + - name: "ObsValue/airTemperature" + source: variables/airTemperature coordinates: "longitude latitude" - source: variables/temperatureAir - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Air Temperature" units: "K" - - name: "ObsValue/dewpoint_temperature" + - name: "ObsValue/dewpointTemperature" + source: variables/dewpointTemperature coordinates: "longitude latitude" - source: variables/temperatureDewpoint - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Dewpoint Temperature" units: "K" - - name: "ObsValue/wind_direction" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" source: variables/windDirection - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Direction" units: "degrees" - - name: "ObsValue/wind_speed" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" source: variables/windSpeed - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Speed" units: "m s-1" - - name: "ObsValue/surface_pressure" + - name: "ObsValue/stationPressure" + source: variables/stationPressure coordinates: "longitude latitude" - source: variables/pressureStation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Surface Pressure" units: "Pa" diff --git a/test/testinput/vadwinds_wmoBUFR2ioda.yaml b/test/testinput/vadwinds_wmoBUFR2ioda.yaml index 49e546617..44b3c8be8 100644 --- a/test/testinput/vadwinds_wmoBUFR2ioda.yaml +++ b/test/testinput/vadwinds_wmoBUFR2ioda.yaml @@ -25,9 +25,9 @@ observations: day: DAYS hour: HOUR minute: MINU - stationIdWMOblock: + wmoBlockNumber: mnemonic: WMOB - stationIdWMOstation: + wmoStationNumber: mnemonic: WMOS latitude: mnemonic: CLAT @@ -49,69 +49,69 @@ observations: obsdataout: "./testrun/vadwinds_wmo_multi.nc" dimensions: - - name: "nlocs" + - name: "Location" size: variables/latitude.nrows variables: - name: "MetaData/dateTime" source: variables/timestamp - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "dateTime" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" source: variables/latitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Latitude" units: "degrees_north" - name: "MetaData/longitude" source: variables/longitude - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Longitude" units: "degrees_east" - - name: "MetaData/stationIdWMOblock" - source: variables/stationIdWMOblock - dimensions: [ "nlocs" ] + - name: "MetaData/wmoBlockNumber" + source: variables/wmoBlockNumber + dimensions: [ "Location" ] longName: "Station Identification WMO block number" units: "none" - - name: "MetaData/stationIdWMOstation" - source: variables/stationIdWMOstation - dimensions: [ "nlocs" ] + - name: "MetaData/wmoStationNumber" + source: variables/wmoStationNumber + dimensions: [ "Location" ] longName: "Station Identification WMO station number" units: "none" - - name: "MetaData/station_elevation" + - name: "MetaData/stationElevation" source: variables/stationElevation - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Station Elevation" units: "m" - name: "MetaData/height" source: variables/height - dimensions: [ "nlocs" ] + dimensions: [ "Location" ] longName: "Height" units: "m" # ADD this in later # - name: "MetaData/qualityIndicator" # source: variables/qualityIndicator - # dimensions: [ "nlocs" ] + # dimensions: [ "Location" ] # longName: "Quality Indicator" # units: "unitless" - - name: "ObsValue/wind_direction" - coordinates: "longitude latitude" + - name: "ObsValue/windDirection" source: variables/windDirection - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Direction" units: "degrees" - - name: "ObsValue/wind_speed" - coordinates: "longitude latitude" + - name: "ObsValue/windSpeed" source: variables/windSpeed - dimensions: [ "nlocs" ] + coordinates: "longitude latitude" + dimensions: [ "Location" ] longName: "Wind Speed" units: "m s-1" diff --git a/test/testoutput/2020100106_metars_small.nc b/test/testoutput/2020100106_metars_small.nc index b2df9de81..a855888cd 100644 --- a/test/testoutput/2020100106_metars_small.nc +++ b/test/testoutput/2020100106_metars_small.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:644097a3c509be82cfbece9e127516b7c1d1de2d94b69bbe6b9a38f6ee9cb8b4 -size 28143 +oid sha256:22d1a4434d3ef77dfa3beb4d9aacb8262b17c83d9f209a51cee72c2f70162872 +size 27572 diff --git a/test/testoutput/2021081612_sonde_small.nc b/test/testoutput/2021081612_sonde_small.nc index a506d0c86..49826b760 100644 --- a/test/testoutput/2021081612_sonde_small.nc +++ b/test/testoutput/2021081612_sonde_small.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:83bb5c8519e0b5d043e8988a08654722cf7c9b84937394f11bf07306085cb5cf -size 69269 +oid sha256:3afff9e03d286457e5b3dce869a235277e70d014fb1e385c6486ba7328daeaa6 +size 54622 diff --git a/test/testoutput/2021120600_atms_sdr.nc4 b/test/testoutput/2021120600_atms_sdr.nc4 index f1f8c245e..e889cabca 100644 --- a/test/testoutput/2021120600_atms_sdr.nc4 +++ b/test/testoutput/2021120600_atms_sdr.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:1b05795f3abe9f7106046e3754b91c2b5e09458d004e2e508993dfac4c682ac5 -size 34987 +oid sha256:28242cb8c6e63bc2a3435144a4ed4e6f27ca578f535470f813559929ea05bcac +size 31283 diff --git a/test/testoutput/NC005031.nc b/test/testoutput/NC005031.nc index 4c40934c2..a3b767fd3 100644 --- a/test/testoutput/NC005031.nc +++ b/test/testoutput/NC005031.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b37f6c880046f75137cc0398aba6b7e78a0a4a91e6389c20722d9e743ecc340d -size 215757 +oid sha256:5d8b72d1543a5a0e67e25d1a4986444ba461a1e1e89ffa9eee110c2aa1425ab5 +size 212606 diff --git a/test/testoutput/NC005066.nc b/test/testoutput/NC005066.nc index 5aee04f05..a9fb31ad2 100644 --- a/test/testoutput/NC005066.nc +++ b/test/testoutput/NC005066.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:997931919211bf20a19b0711d3e24e6d46ed41e40b724d1034487358d8e8e829 -size 129007 +oid sha256:83909059d5d5fa2f176d390e67ff45b5ac691310bdd6fc5b77c5b68589d78b94 +size 129367 diff --git a/test/testoutput/SWOT_L2_ADT.nc b/test/testoutput/SWOT_L2_ADT.nc index eeca2d0f2..ed9ef3e0f 100644 --- a/test/testoutput/SWOT_L2_ADT.nc +++ b/test/testoutput/SWOT_L2_ADT.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d5369824cb8f792de487b7abd7b4212818f437534914bbaad031c387704fd17d -size 1259650 +oid sha256:bf9cb8027603cf146856af57cf1303a9090c2e2cce45b7bbded9206ba80a5cc2 +size 298133 diff --git a/test/testoutput/adpupa_prepbufr.nc b/test/testoutput/adpupa_prepbufr.nc index 4808938c0..8988b8f93 100644 --- a/test/testoutput/adpupa_prepbufr.nc +++ b/test/testoutput/adpupa_prepbufr.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d2a0727a471571d9e9a09dd80e1f7125a999d137a875d7e9a62e2489c1084dcd -size 409232 +oid sha256:c77088226b7e8f8284d6b19c6fe261c77a2c3d28f9a17404c7d3c41f1116f5c9 +size 390505 diff --git a/test/testoutput/adpupa_prepbufr_group_by.nc b/test/testoutput/adpupa_prepbufr_group_by.nc deleted file mode 100644 index 50374d37c..000000000 --- a/test/testoutput/adpupa_prepbufr_group_by.nc +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9c31f373858d01bf89403ee97613719fb3fb184a481b6edd0c68ff03578eb551 -size 477389 diff --git a/test/testoutput/aeronet_aaod.nc b/test/testoutput/aeronet_aaod.nc index b87a71b4e..7cfb699e3 100644 --- a/test/testoutput/aeronet_aaod.nc +++ b/test/testoutput/aeronet_aaod.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:157c045d401834c601eedf642c410f44e49d896a9c6e3bb685c179198c5234c6 -size 35317 +oid sha256:95f7501f713037dbb85c6e17e4362fa9aa2ff85b0667429de4f07380f2315059 +size 26741 diff --git a/test/testoutput/aeronet_aod.nc b/test/testoutput/aeronet_aod.nc index 963233d81..c7f365a90 100644 --- a/test/testoutput/aeronet_aod.nc +++ b/test/testoutput/aeronet_aod.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8e3f2b436cbde7dae0df5d118ec441f3021f45a8259e8c63e54c2519a36cefe7 -size 18323 +oid sha256:edcaac8e4bddee835c4ec13bd862d009289c13e7b92e0abf7d92af8b5cfc469b +size 16906 diff --git a/test/testoutput/afwa_snod.nc b/test/testoutput/afwa_snod.nc deleted file mode 100644 index ad87fb395..000000000 --- a/test/testoutput/afwa_snod.nc +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0fd163d4b281183c6e301e2ee878cc4bd43577d8cc8491c54272d49ca2c29e6d -size 89525 diff --git a/test/testoutput/aircraft_prepbufr_acftprofiles.nc b/test/testoutput/aircraft_prepbufr_acftprofiles.nc new file mode 100644 index 000000000..678968025 --- /dev/null +++ b/test/testoutput/aircraft_prepbufr_acftprofiles.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:66486f8d8abe5350794db0ddab4b2e0cd10f4a1c031e326e76311ada41bea2c2 +size 217400 diff --git a/test/testoutput/airep_multi.nc b/test/testoutput/airep_multi.nc index 1da573380..3913dd1c2 100644 --- a/test/testoutput/airep_multi.nc +++ b/test/testoutput/airep_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:62d580598af8555471d9cec146653fa3ddecb8e172d9c23636b90413f4be7ba0 -size 33659 +oid sha256:b9a6569409cefda0378adbf2031bd0bb3bd529482d863f68f221676f977091ab +size 33577 diff --git a/test/testoutput/airnow_2020081306.nc b/test/testoutput/airnow_2020081306.nc index 986b67a5e..d22255473 100644 --- a/test/testoutput/airnow_2020081306.nc +++ b/test/testoutput/airnow_2020081306.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b1b2d8e178d3c24266c88ded97b38a6a1f20e252222bb5eef34ab9aee206b82f -size 138122 +oid sha256:0158eec64aa8456dd27efc84d7dd497eb4123acf0a95117962f363b2bb3b8b83 +size 75443 diff --git a/test/testoutput/amdar_wmo_multi2.nc b/test/testoutput/amdar_wmo_multi2.nc index 8b165c904..022b0d44f 100644 --- a/test/testoutput/amdar_wmo_multi2.nc +++ b/test/testoutput/amdar_wmo_multi2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0bb45dbeb1cb09f81c52e542f3d447db67c56f653931164bcaa3ae8da3b1a5c6 -size 198005 +oid sha256:27f4d5448dc3ec99c37dbf67106e9084791ab01340dd5f48aedb574acd37e7b9 +size 146615 diff --git a/test/testoutput/amsr2_icec_l2p.nc b/test/testoutput/amsr2_icec_l2p.nc index 5ac443805..bab3bc66f 100644 --- a/test/testoutput/amsr2_icec_l2p.nc +++ b/test/testoutput/amsr2_icec_l2p.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:36c2d2b5e4f2719aef5749f6e1b5383c5431b4cc0ecab3d87e2f527f2d76fac2 -size 16951 +oid sha256:3525e8f4e685a0eefebfd8606ce82599d85a6588cbf0a70895fe0417797cf79e +size 13045 diff --git a/test/testoutput/amsua_aqua_obs_2018041500.nc4 b/test/testoutput/amsua_aqua_obs_2018041500.nc4 index dc36b97be..682fa5af2 100644 --- a/test/testoutput/amsua_aqua_obs_2018041500.nc4 +++ b/test/testoutput/amsua_aqua_obs_2018041500.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:773297fe6bc2bd2cac87b63982f988a1c785cbc6e6ebf2b98de61706f889cedd -size 64896 +oid sha256:74a5e41f9ad54cddbbcc9bc66e8052dba447ef1ba1d4ceba0515e2bd160d8ec1 +size 60622 diff --git a/test/testoutput/aod_viirs_obs_2018041500_s.nc4 b/test/testoutput/aod_viirs_obs_2018041500_s.nc4 deleted file mode 100644 index 0bc5dd02c..000000000 --- a/test/testoutput/aod_viirs_obs_2018041500_s.nc4 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:951a9a397b3ea705c43d7681c707b93c235ba559632d1c5ae4e4b48b39cafa2d -size 39597 diff --git a/test/testoutput/argoclim.nc b/test/testoutput/argoclim.nc index bf8470524..90bb5ac98 100644 --- a/test/testoutput/argoclim.nc +++ b/test/testoutput/argoclim.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f52814c88e6b2f0eecc2b0be522bb979ce151236481611bd7883be701a246194 -size 18121 +oid sha256:e2e634b3337857e2322f2cc0560fc899010a10dedec5108d9c0eedcce83a62d0 +size 13888 diff --git a/test/testoutput/ascat_ssm.nc b/test/testoutput/ascat_ssm.nc index 14c96788f..52300c185 100644 --- a/test/testoutput/ascat_ssm.nc +++ b/test/testoutput/ascat_ssm.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:97ca61128d62966925576c65807de0898d09ff0f89bc2e0e19a109ec5b9e15c3 -size 15471 +oid sha256:4d0ae27f5cbdce24646c8953a16c476d1371cc77434656eaf2687846eb5a653a +size 15347 diff --git a/test/testoutput/avhrr_radiance.nc b/test/testoutput/avhrr_radiance.nc index 736a57150..a1b7d07e5 100644 --- a/test/testoutput/avhrr_radiance.nc +++ b/test/testoutput/avhrr_radiance.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2b3c944bc34d2a9afff10cfa9764d0a924eac1b9dca90755099bc66d9e2da0ce -size 26734 +oid sha256:b9fc26d9574374e1f27eea0c64c292cb211186186bf0ce6d3009373c533de805 +size 22687 diff --git a/test/testoutput/bufr_empty_fields.nc b/test/testoutput/bufr_empty_fields.nc index 5d657f386..d696d2f62 100644 --- a/test/testoutput/bufr_empty_fields.nc +++ b/test/testoutput/bufr_empty_fields.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9dae902e3517280891dbb4fbdf39f7d47ec72c066daedaac438f1520fa9de17f -size 24615 +oid sha256:9fd43140ff8bdade562ffa51f67def17b5c7e91e069e1a92ce68f95f86640f10 +size 29913 diff --git a/test/testoutput/bufr_read_wmo_radiosonde.nc b/test/testoutput/bufr_read_wmo_radiosonde.nc index fa1b2f41f..d24d4baf0 100644 --- a/test/testoutput/bufr_read_wmo_radiosonde.nc +++ b/test/testoutput/bufr_read_wmo_radiosonde.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b362fa9a8366a099cf76b15721a0018c23843c17256728b9868b2a50764ee3c1 -size 130942 +oid sha256:926c03c2467d3816e196544ccae679383153e447726dfb64b13fcc76490498cb +size 133403 diff --git a/test/testoutput/bufr_simple_groupby.nc b/test/testoutput/bufr_simple_groupby.nc index cb27ee167..ccf948cd8 100644 --- a/test/testoutput/bufr_simple_groupby.nc +++ b/test/testoutput/bufr_simple_groupby.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9450b5569481b60f7408568e41174b33926e7777ad43ebb338eea5025bb5bcb8 +oid sha256:a0bdeb657ace3bc2b58498e98cfef589d296d165c99acc4477722d7834463dcb size 48992 diff --git a/test/testoutput/bufr_specifying_subsets.nc b/test/testoutput/bufr_specifying_subsets.nc index 334a2e4dd..ecb61585f 100644 --- a/test/testoutput/bufr_specifying_subsets.nc +++ b/test/testoutput/bufr_specifying_subsets.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d097d617b383fa7c7f8aaf7a16ddc7afea98019eca836f59fa42c854541ea998 -size 29690 +oid sha256:4f3b86786b014bf39de2abaa815688ee982a32aadebe50149ea016acadf5646f +size 24062 diff --git a/test/testoutput/bufr_wmo_amdar_multi.nc b/test/testoutput/bufr_wmo_amdar_multi.nc index 6a4b7e26c..b2f5b80c0 100644 --- a/test/testoutput/bufr_wmo_amdar_multi.nc +++ b/test/testoutput/bufr_wmo_amdar_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:c102e5a2bba7b3674263adf3cd4a4e7d4ba232ded173b4399a67033e542f8435 -size 198964 +oid sha256:d318df930041666b0b68d11350abc242ebcd0c5ad5caf655d67c31e65acbec64 +size 195827 diff --git a/test/testoutput/buoy_wmo_multi.nc b/test/testoutput/buoy_wmo_multi.nc index c2e45a473..9090719aa 100644 --- a/test/testoutput/buoy_wmo_multi.nc +++ b/test/testoutput/buoy_wmo_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:979fdaa036c7e97ed2307d673458a80155efb574a1b4d457d4ad1ca6173999c9 -size 47553 +oid sha256:01fee1267bf77e643dabdd0671db26f6b8f0581189186782d02b6e2b4ca83d81 +size 47373 diff --git a/test/testoutput/buoy_wmo_multi2.nc b/test/testoutput/buoy_wmo_multi2.nc index 3194b8dd9..2f608b947 100644 --- a/test/testoutput/buoy_wmo_multi2.nc +++ b/test/testoutput/buoy_wmo_multi2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:616201c4f19e90dcfe87e6146fbbe4393dfd5e91dc122f8af050d8c13d5cba35 -size 32297 +oid sha256:ca24658dd52d6bd16010582d4219987af9b6bd010192871a4dd6395bf9524e01 +size 31453 diff --git a/test/testoutput/cryosat2_L2.nc b/test/testoutput/cryosat2_L2.nc index 0f9adde5b..7938110ef 100644 --- a/test/testoutput/cryosat2_L2.nc +++ b/test/testoutput/cryosat2_L2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:dc8b78827a2cea5943bef40eb45532cf69b61daf56750ab16daa00d184a76d80 -size 17278 +oid sha256:16bdc636983ca93f6fcee945014cc0fc2f724f744dd440f788a1c4fa25ea75d1 +size 13045 diff --git a/test/testoutput/emc_ice_ioda2.nc b/test/testoutput/emc_ice_ioda2.nc index 0087bfc84..60e506762 100644 --- a/test/testoutput/emc_ice_ioda2.nc +++ b/test/testoutput/emc_ice_ioda2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7ae508722faa80a09bc145bc3d008b5630b4cff8e62f9d48b07106b55e57f522 -size 17278 +oid sha256:41498c982b0babebcad01deb7ec2cd8e333a2c1ee2231820e239f026c4e6dedf +size 13045 diff --git a/test/testoutput/gdas.aircar.t00z.20210801.nc b/test/testoutput/gdas.aircar.t00z.20210801.nc index 7bfc3a9cd..fd4942040 100644 --- a/test/testoutput/gdas.aircar.t00z.20210801.nc +++ b/test/testoutput/gdas.aircar.t00z.20210801.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:aec810bef04cbc5a159ad69d7a14b4c4b1a8ac4a0781f2c0cfdcb63d04a567d9 -size 357770 +oid sha256:6d2df314a0a55292db8badf4fd473bab5394a755b1780c1219265208803f52b7 +size 359813 diff --git a/test/testoutput/gdas.t00z.1bamsua.tm00.nc b/test/testoutput/gdas.t00z.1bamsua.tm00.nc new file mode 100644 index 000000000..9d9c40777 --- /dev/null +++ b/test/testoutput/gdas.t00z.1bamsua.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ccfe44e77b5d6e3043b623d82025379a436b72fca7c124e73a06b83878548ff +size 751996 diff --git a/test/testoutput/gdas.t00z.1bhrs4.tm00.nc b/test/testoutput/gdas.t00z.1bhrs4.tm00.nc index f793b4632..402e4f3f3 100644 --- a/test/testoutput/gdas.t00z.1bhrs4.tm00.nc +++ b/test/testoutput/gdas.t00z.1bhrs4.tm00.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f2e3363afc3d34660d95864f81d529e3f840fe035ea1413e4cac1f0a5736c6fb -size 653448 +oid sha256:42927e06829bc7e814627e98bbd09760e51d173efab79bc71a2c3bad96183d2e +size 351832 diff --git a/test/testoutput/gdas.t00z.airsev.tm00.nc b/test/testoutput/gdas.t00z.airsev.tm00.nc new file mode 100644 index 000000000..d5ea4df44 --- /dev/null +++ b/test/testoutput/gdas.t00z.airsev.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e4add417ebb2b367300fa761c7d7af770496c5f6f793a44a98a282377c9dea4 +size 75289 diff --git a/test/testoutput/gdas.t00z.atms.tm00.nc b/test/testoutput/gdas.t00z.atms.tm00.nc new file mode 100644 index 000000000..76b35bc42 --- /dev/null +++ b/test/testoutput/gdas.t00z.atms.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77d9251b041da01394737ed1da1cb9f2db6e950244cbb56adc0f5e099f43c89f +size 432204 diff --git a/test/testoutput/gdas.t00z.avcsam.tm00.nc b/test/testoutput/gdas.t00z.avcsam.tm00.nc new file mode 100644 index 000000000..fd78e96cd --- /dev/null +++ b/test/testoutput/gdas.t00z.avcsam.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9137c417faf5a03900c92d3625aac74104a8a0c3795b4e0b034324f4b58609ff +size 178778 diff --git a/test/testoutput/gdas.t00z.crisf4.tm00.nc b/test/testoutput/gdas.t00z.crisf4.tm00.nc new file mode 100644 index 000000000..604003c9e --- /dev/null +++ b/test/testoutput/gdas.t00z.crisf4.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:415e75fbbd176ff5ae7c4fb82a94faf53005beccfb17117293d137f4584caca1 +size 1294387 diff --git a/test/testoutput/gdas.t00z.mtiasi.tm00.nc b/test/testoutput/gdas.t00z.mtiasi.tm00.nc new file mode 100644 index 000000000..790c6cb70 --- /dev/null +++ b/test/testoutput/gdas.t00z.mtiasi.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18b8d5f75790d58673fcd6ed846a0e9644b327efe4a63155c918209ab0057846 +size 658210 diff --git a/test/testoutput/gdas.t00z.sevcsr.tm00.nc b/test/testoutput/gdas.t00z.sevcsr.tm00.nc index 4cb60869a..236876737 100644 --- a/test/testoutput/gdas.t00z.sevcsr.tm00.nc +++ b/test/testoutput/gdas.t00z.sevcsr.tm00.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8f323a107a1d9758c0222249aafe80f00eef792ced712786074b4381766cab75 +oid sha256:1f3b1be4165f7956905736ab3dba73aa153dcbe8e94b6537fa44920cb1ae6171 size 56554 diff --git a/test/testoutput/gdas.t00z.ssmisu.tm00.nc b/test/testoutput/gdas.t00z.ssmisu.tm00.nc new file mode 100644 index 000000000..cb1e3d33f --- /dev/null +++ b/test/testoutput/gdas.t00z.ssmisu.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:599961c8e519956026aa75900889f594a1021b3f251b0bca98f5e9fd756e1921 +size 732450 diff --git a/test/testoutput/gdas.t12z.aircft_AMDAR103.tm00.nc b/test/testoutput/gdas.t12z.aircft_AMDAR103.tm00.nc index 707fd2ea9..c7c7857b5 100644 --- a/test/testoutput/gdas.t12z.aircft_AMDAR103.tm00.nc +++ b/test/testoutput/gdas.t12z.aircft_AMDAR103.tm00.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:31e39bcbb8e5c96c6945e1e2052dacddf584eba852e45f9bfcee7dd438ded811 -size 72503 +oid sha256:150334fc6c5b48bd170a14175bdd90ed5009c1847d0c74416df7dc89f132677e +size 65766 diff --git a/test/testoutput/gdas.t12z.aircft_noAMDAR103.tm00.nc b/test/testoutput/gdas.t12z.aircft_noAMDAR103.tm00.nc index b821462c4..ec73a2f50 100644 --- a/test/testoutput/gdas.t12z.aircft_noAMDAR103.tm00.nc +++ b/test/testoutput/gdas.t12z.aircft_noAMDAR103.tm00.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2bad9ec8fbc0102eee629e1335e1ab130cecdaab6536f781353dffcb56f9661f -size 252208 +oid sha256:190e75722ba657dfa1aec9a6748b84c10ce4709383ec8b66fdaa1d5b6c453c46 +size 111768 diff --git a/test/testoutput/gdas.t12z.avcsam.tm00.nc b/test/testoutput/gdas.t12z.avcsam.tm00.nc new file mode 100644 index 000000000..b2b517da2 --- /dev/null +++ b/test/testoutput/gdas.t12z.avcsam.tm00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07bfbcf1c8e1f914e7644369d788c70f2881cf598c3f25eb4389a026e1e9aabd +size 178778 diff --git a/test/testoutput/gdas.t18z.1bmhs.tm00.15.7.filter_split.nc b/test/testoutput/gdas.t18z.1bmhs.tm00.15.7.filter_split.nc index 0d3db7f22..33f2400d6 100644 --- a/test/testoutput/gdas.t18z.1bmhs.tm00.15.7.filter_split.nc +++ b/test/testoutput/gdas.t18z.1bmhs.tm00.15.7.filter_split.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:fb2278b9c7b894e243efd7c3705c59ec0b29e20dfafe6c516c33ed6438b83ab5 +oid sha256:945a67a855b23c8a2fcdf939a579a1fb614b4c4a7e879d6160e3be8616947735 size 57916 diff --git a/test/testoutput/gdas.t18z.1bmhs.tm00.nc b/test/testoutput/gdas.t18z.1bmhs.tm00.nc index 41e8fa886..262dbc60b 100644 --- a/test/testoutput/gdas.t18z.1bmhs.tm00.nc +++ b/test/testoutput/gdas.t18z.1bmhs.tm00.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b8dde16bfd27b73a4777f48ddbaa76b86c32d82994c57091bf2f6eb4269192a1 -size 798784 +oid sha256:e2bc65e8cce714d0b253527f7e9e36591d963fdc299db10de06d86c09d8d53d2 +size 798562 diff --git a/test/testoutput/gds2_sst_l2p.nc b/test/testoutput/gds2_sst_l2p.nc index cc5ae6970..4e6c78bfd 100644 --- a/test/testoutput/gds2_sst_l2p.nc +++ b/test/testoutput/gds2_sst_l2p.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8ae3caa9313de4670f6e8b2f112ccaaf97707db0b8607d551143f79cd5d43eb9 -size 20190 +oid sha256:f2f5fc17845dacb931aba0c7096b44bded5eac1e2be558ac29a3ab0a98c7e3fb +size 17767 diff --git a/test/testoutput/gds2_sst_l3u.nc b/test/testoutput/gds2_sst_l3u.nc index 248c3ce8c..b1f168cc1 100644 --- a/test/testoutput/gds2_sst_l3u.nc +++ b/test/testoutput/gds2_sst_l3u.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:55049b4906887a975cf1a16e67645297a2340993ec47ad4b71be3e86577c6ab7 -size 20194 +oid sha256:e33fadc7883fac36f093ba1128a6e00b75cca9843e32c8e44dfee687782335f9 +size 17799 diff --git a/test/testoutput/ghcn_snod_20200228.nc b/test/testoutput/ghcn_snod_20200228.nc index bbd010d03..26c866515 100644 --- a/test/testoutput/ghcn_snod_20200228.nc +++ b/test/testoutput/ghcn_snod_20200228.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7fa912f6fb85dc0ba3367275667ffc6f17ef27d33bd492b9b4e7d7af1f9c6347 -size 24459 +oid sha256:e0ab8ce7f4faf8d4ec8c9a31f4e2308bca584ca89c582ce51e12b8e8cc854a8c +size 19129 diff --git a/test/testoutput/gmao_oceanObs.nc b/test/testoutput/gmao_oceanObs.nc new file mode 100644 index 000000000..af74fee9f --- /dev/null +++ b/test/testoutput/gmao_oceanObs.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ccad2263b7af3aac27becb7e21dd131118bbafbd38674a109c11b4f0a9f7d32 +size 150021 diff --git a/test/testoutput/gnssro_2020-306-2358C2E6.nc b/test/testoutput/gnssro_2020-306-2358C2E6.nc index a7459bb25..46e4694a9 100644 --- a/test/testoutput/gnssro_2020-306-2358C2E6.nc +++ b/test/testoutput/gnssro_2020-306-2358C2E6.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d0693e36a339294ef235a9abfa9c90d26d95cc5a3550c4eada6d7dbc69b9f56a -size 65140 +oid sha256:ec91ca5584dedc5d9e0c22b8bbccc9aed6560edc7386b87f832e0e0bc7655602 +size 61514 diff --git a/test/testoutput/gnssro_cosmic2_2021080212.nc4 b/test/testoutput/gnssro_cosmic2_2021080212.nc4 index ec2663a6f..5a32a72b1 100644 --- a/test/testoutput/gnssro_cosmic2_2021080212.nc4 +++ b/test/testoutput/gnssro_cosmic2_2021080212.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:1e60ddf5b10dbbaf12fb5d68d0bdd8cbf2f22defd13fa88678ac86df78641fe4 -size 52741 +oid sha256:952cdd00d90fa6b08c62efae46d6948b1cfca367e0c7149fd261c515764e36cc +size 36106 diff --git a/test/testoutput/godae_bgc_argo.nc b/test/testoutput/godae_bgc_argo.nc index 7491a6298..17c26cf3c 100644 --- a/test/testoutput/godae_bgc_argo.nc +++ b/test/testoutput/godae_bgc_argo.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e36507afc4a2a8cfb54dd5609967bc978d5dea8cc99a2d0d68311729f75e559e -size 18121 +oid sha256:4d8f983506567e8c8448c178020d09b2fbc1381c8c64b56828cf2095bbfce326 +size 14330 diff --git a/test/testoutput/godae_prof.nc b/test/testoutput/godae_prof.nc index 38df571a8..9563cd663 100644 --- a/test/testoutput/godae_prof.nc +++ b/test/testoutput/godae_prof.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:fe90fe3f60fdf2333dd4e6f72c3cdf7d559417db70adfed5056494030411a277 -size 20730 +oid sha256:b6dc6f95ba4f1934dfc7c46d97c2de9a71f0e18921bad49d2cd9952e51daf5e6 +size 16842 diff --git a/test/testoutput/godae_ship.nc b/test/testoutput/godae_ship.nc index 3cffb2317..546731439 100644 --- a/test/testoutput/godae_ship.nc +++ b/test/testoutput/godae_ship.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:eb5b6486dc23a4fb3bd8e1e6836963d84495682e76825e26973bbf23f8dc6fb1 -size 94944 +oid sha256:d0981e8a90dbe9d9c4ea87c3ce393996d7a6bd24042d50c6a773689d768676b3 +size 32180 diff --git a/test/testoutput/godae_trak.nc b/test/testoutput/godae_trak.nc index 7765671da..ae5e3daa1 100644 --- a/test/testoutput/godae_trak.nc +++ b/test/testoutput/godae_trak.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0ecce007f3bec66c166558b4353a79377b1d1757f6044eaa7caade5fdc954921 -size 25133 +oid sha256:d4ae8bcf7442e41135e283b0701043690b9d3283ed858280f6d52735fb347ea1 +size 20986 diff --git a/test/testoutput/hgodas_adt.nc b/test/testoutput/hgodas_adt.nc index a8177c9fd..7e8c38fab 100644 --- a/test/testoutput/hgodas_adt.nc +++ b/test/testoutput/hgodas_adt.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:839f04d876f27cf41ac9850a510bd7a50b1d974d7ddce79e8bf78cf949e6fb6b -size 23967 +oid sha256:e53c7d98047c59052646abf9630022c24da4d51ab047d86f48cf81c5d000c574 +size 17625 diff --git a/test/testoutput/hgodas_insitu.nc b/test/testoutput/hgodas_insitu.nc index 0d07e9b2b..e59ccf110 100644 --- a/test/testoutput/hgodas_insitu.nc +++ b/test/testoutput/hgodas_insitu.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7f2dbda139cac51380389670e5474f411e4f2951dc297ff6b112411177e8a32b -size 22807 +oid sha256:1251a83a04239cf4766f3eb22f69f7f3469f8f3c28607afb8c3419bc72c6ce42 +size 18545 diff --git a/test/testoutput/hgodas_sst.nc b/test/testoutput/hgodas_sst.nc index 91f598da4..8f7355fcd 100644 --- a/test/testoutput/hgodas_sst.nc +++ b/test/testoutput/hgodas_sst.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:68787f1cf3f94b411dc5ee49be0ce85b3209bfc483e2ed536852b4ad0a33fce3 -size 19871 +oid sha256:2f6488f24340011b7e6832caf728b78633a3ab5bb67eec5a26125daf65236dcf +size 15144 diff --git a/test/testoutput/imsfv3_scf.nc b/test/testoutput/imsfv3_scf.nc index 0795e3958..1636cd79c 100644 --- a/test/testoutput/imsfv3_scf.nc +++ b/test/testoutput/imsfv3_scf.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8b0560a9f19ff661314b56a32d3de6bf6724f98178597ed97d1b4e87b5a96fc0 -size 225977 +oid sha256:522b1d3f752b33c42baab2067d21b3c3f2427c6b17d069a7a6a7392c1f370901 +size 67978 diff --git a/test/testoutput/imssnow_scf.nc b/test/testoutput/imssnow_scf.nc deleted file mode 100644 index 9dd91719b..000000000 --- a/test/testoutput/imssnow_scf.nc +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:aceabea45a7fac7cb333e033903ff8b598afcb9aef2a6d18577583907c7b9b5d -size 430262 diff --git a/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc b/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc index f0429273b..b237ec98e 100644 --- a/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc +++ b/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:22c34ec032f3d07eec9537d04ccaa244e492c11f3b54fd279a1686a49d0f9b10 -size 210989 +oid sha256:9110199c196fef3566a0ac706c14617af020d293ff3124fbd8ba48ba36061f6a +size 53684 diff --git a/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc b/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc index 13af5c3b5..5d8d66bb2 100644 --- a/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc +++ b/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7ecf547e32167c57a48f165dcde0e82b1673ffde255c04619468f07d28eca10e -size 13172 +oid sha256:ef8cae7c44a66142e6a17e15db5e19ffc5e5d9b70cd66154dece8279f15f43b7 +size 13182 diff --git a/test/testoutput/mls_o3_l2.nc b/test/testoutput/mls_o3_l2.nc index 9b9bb1a25..b477f70bd 100644 --- a/test/testoutput/mls_o3_l2.nc +++ b/test/testoutput/mls_o3_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0c10b89fb04865b64292e7e5538feedbbce3df9af95e76584d2c5887ff52d440 -size 26380 +oid sha256:cf8598f69958cc90c5a477cc2f29823fb26bbd2a5f564d18a8d420aadc507acb +size 23308 diff --git a/test/testoutput/modis_aod.nc b/test/testoutput/modis_aod.nc index d908ce1fc..98455d5aa 100644 --- a/test/testoutput/modis_aod.nc +++ b/test/testoutput/modis_aod.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:dc2f73b0a3b80fb9f5e322ec6a57ec1b9828b09a4495f699d1c47fa064942a04 -size 13825 +oid sha256:ab9e4bfedbcdbf9f9e6dcda9dd45a523859eba33440fab096fa59effa42f783c +size 13678 diff --git a/test/testoutput/modis_aqua_oc_l2.nc b/test/testoutput/modis_aqua_oc_l2.nc index a0f196769..7fb4d2df9 100644 --- a/test/testoutput/modis_aqua_oc_l2.nc +++ b/test/testoutput/modis_aqua_oc_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:64ce19f609f14f5d8e86836fcdefedc6c195b3247b8a8423abdd12bc18e103a6 -size 278474 +oid sha256:5063fac43470b07f1af53f94ee9631eafdc462c55f45a569d86e71b6ee31666f +size 85990 diff --git a/test/testoutput/mopitt_co.nc b/test/testoutput/mopitt_co.nc index 33cccafeb..198927297 100644 --- a/test/testoutput/mopitt_co.nc +++ b/test/testoutput/mopitt_co.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5a5703deff298e440fb7f4349a4d8585335d16b3d9b9e0e141f4927ac3f42ab4 -size 171846 +oid sha256:a85e9b3040c2dcf52afb7a5f43bddfdcb8a6bf654f63b68132335eaa070e371f +size 111294 diff --git a/test/testoutput/ndbc_hfradar_out.nc b/test/testoutput/ndbc_hfradar_out.nc index a23bc5abf..9ac674e11 100644 --- a/test/testoutput/ndbc_hfradar_out.nc +++ b/test/testoutput/ndbc_hfradar_out.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8b03cd62c49130dbb2f608b275d560de963faccbf90462b5032596cbe8f80e87 -size 15354 +oid sha256:0a51b56df0638b660d9859bf08ff7e098baac46ae30649294a54bc0c5620594e +size 15574 diff --git a/test/testoutput/nsidc_l4_icec.nc b/test/testoutput/nsidc_l4_icec.nc index 3960e6353..fde521036 100644 --- a/test/testoutput/nsidc_l4_icec.nc +++ b/test/testoutput/nsidc_l4_icec.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:401d3b02f5287dd6eb7a8d2904770385fece53f499720183e414a5cfa3035695 -size 133049 +oid sha256:27db502e26407438741a7bc4f49c87f4d17eb5db759903d3899515a1dfe4200b +size 37020 diff --git a/test/testoutput/omi_o3_l2.nc b/test/testoutput/omi_o3_l2.nc index a651a6488..61d12ae5d 100644 --- a/test/testoutput/omi_o3_l2.nc +++ b/test/testoutput/omi_o3_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2b5965a3f258eb901fe23c21f542189dde2d759d40e16a682b6f81e670aa6951 -size 838859 +oid sha256:bcf3e4ee1a875c3e76484eed9a66e65590d6dd78298c56fd77652918bfa0eace +size 861537 diff --git a/test/testoutput/ompsnm_o3_l2.nc b/test/testoutput/ompsnm_o3_l2.nc index f0cce6a4e..c98a72c0b 100644 --- a/test/testoutput/ompsnm_o3_l2.nc +++ b/test/testoutput/ompsnm_o3_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:51ec919979f11174f267aa001eb9d4854071bae6c0c991c27daf9b818b5f4051 -size 296953 +oid sha256:3592da2dca9d234bca6947683f0669fbc42207d82b7128671ef65c0e9483ad44 +size 369174 diff --git a/test/testoutput/owp_snow_obs_dup_thin_err_fn.nc b/test/testoutput/owp_snow_obs_dup_thin_err_fn.nc index dc34a1d30..8d6f1857d 100644 --- a/test/testoutput/owp_snow_obs_dup_thin_err_fn.nc +++ b/test/testoutput/owp_snow_obs_dup_thin_err_fn.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:48f799714c45565a43b978e4dfa061c670f40bdf5dbed9d7f8a973e30280e108 -size 15506 +oid sha256:6f0a000a4984ebd3f9bb98058f51e6695ec7292f4ba6fd31c0aca64a8d3db957 +size 15132 diff --git a/test/testoutput/pace_oc_l2.nc b/test/testoutput/pace_oc_l2.nc index 41cc206d4..252fde76b 100644 --- a/test/testoutput/pace_oc_l2.nc +++ b/test/testoutput/pace_oc_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5e4cba5c84d5ce0a23eaec05691d93cdb4ecde72d9dc20dc58a879cd8cf82f27 -size 13545 +oid sha256:2a15e2e3062abb765a2cfd84042ffac27fdf14fa5d3487f103da2c89902245c1 +size 13464 diff --git a/test/testoutput/pace_radiance_L1B.nc b/test/testoutput/pace_radiance_L1B.nc index 2ba8e349d..b603463a2 100644 --- a/test/testoutput/pace_radiance_L1B.nc +++ b/test/testoutput/pace_radiance_L1B.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ca2da8d7caae002d9e6e3f4d571341430daf063ff5aec6aa0b4d242932731205 -size 92888 +oid sha256:f488fc989e2363703368e2813c84936e7a8a0a25afaff79ce3cc7e151f4558aa +size 84161 diff --git a/test/testoutput/rads_adt.nc b/test/testoutput/rads_adt.nc index 81c4310c7..cd85dc901 100644 --- a/test/testoutput/rads_adt.nc +++ b/test/testoutput/rads_adt.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:88d99de258b09a6a591e5e7d37984be6c74c290d90acda7e1216a8750ed45e8f -size 17278 +oid sha256:e24415f20f3203d6141e24315447cbb5f4244da5bf37e8152afc94e9711cbaf7 +size 15519 diff --git a/test/testoutput/rass_wmo_multi.nc b/test/testoutput/rass_wmo_multi.nc index 28d5d7d4a..faf554056 100644 --- a/test/testoutput/rass_wmo_multi.nc +++ b/test/testoutput/rass_wmo_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e33e964986d3adef96cc3c8f0ef88ea2edc6017285d17dded21469e3cbcbb188 -size 33579 +oid sha256:b49142f3d4bca8fc17349e61731eaa7b3cb192a1fe1dd59fc5bf341f0febbb8d +size 33411 diff --git a/test/testoutput/satwind_EUMet.nc b/test/testoutput/satwind_EUMet.nc index 8522ebd07..19065dfb8 100644 --- a/test/testoutput/satwind_EUMet.nc +++ b/test/testoutput/satwind_EUMet.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f3a5972a66ad38c489314764654cdd996c5e9d0b74e2ffbebabd3829999a8a63 -size 1479625 +oid sha256:4669b55b5e5969f63f16eb53c4b769c80743eebdd1d5c0a62ec75417570e255f +size 1476245 diff --git a/test/testoutput/satwind_Himawari.nc b/test/testoutput/satwind_Himawari.nc index 61b0e44db..d66f4ee68 100644 --- a/test/testoutput/satwind_Himawari.nc +++ b/test/testoutput/satwind_Himawari.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:540f64fd02a5d7297107f366e3187ae0852319a42be1055fa00925e2927843ba -size 62451 +oid sha256:a1b37129b086c40d6b6212fda399b787e0897836ad5570671d37286afc0340a0 +size 58760 diff --git a/test/testoutput/satwind_Insat.nc b/test/testoutput/satwind_Insat.nc index 92f7d4509..01b561b2e 100644 --- a/test/testoutput/satwind_Insat.nc +++ b/test/testoutput/satwind_Insat.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:53c69f2962c5a6e436be6540acf92c0ea6e52dd44cad708ff12cc1595822aae2 -size 144131 +oid sha256:b15bd7c3cb2e13c0969d56cb2ff36ceb4266996320f6578e9e723312cbffd445 +size 140365 diff --git a/test/testoutput/satwind_obs_2018041500.nc4 b/test/testoutput/satwind_obs_2018041500.nc4 index 61713e65f..ecfec9c73 100644 --- a/test/testoutput/satwind_obs_2018041500.nc4 +++ b/test/testoutput/satwind_obs_2018041500.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:fda9b93b1dfd73f0a0e6b1172664ce97378766e07e68c0546caa86075f3b8531 -size 59133 +oid sha256:ef098e6c8d12f6ac41b9068f8b3164bcc352ca2ddec18923a2b63f7dfd4737b7 +size 53661 diff --git a/test/testoutput/satwinds_ssec2021080103.nc b/test/testoutput/satwinds_ssec2021080103.nc index 8c99a3fb4..7cc7cfd7f 100644 --- a/test/testoutput/satwinds_ssec2021080103.nc +++ b/test/testoutput/satwinds_ssec2021080103.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b3f2133d3228b9d9d77ec7eec2a08ff8de9b86c7438f360803add170ed6ca3ad -size 61483 +oid sha256:812bca662886a24c147c6a2534703d73d575885d5e8ba6f76b44b987fa14ffd8 +size 69439 diff --git a/test/testoutput/sfc_tv_obs_2018041500.nc4 b/test/testoutput/sfc_tv_obs_2018041500.nc4 index 2f7b62982..e8d94a6da 100644 --- a/test/testoutput/sfc_tv_obs_2018041500.nc4 +++ b/test/testoutput/sfc_tv_obs_2018041500.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:afa890d54e55b2a7acd27624c90d0adf1d9da8d10329a3b540a4355fd5bd119e -size 30423 +oid sha256:818f83e72837622382a3c0118ff0da34185abf50e57fee6d6efbc14bdebaff8c +size 28567 diff --git a/test/testoutput/ship_wmo_multi.nc b/test/testoutput/ship_wmo_multi.nc index 23027386f..6c3479333 100644 --- a/test/testoutput/ship_wmo_multi.nc +++ b/test/testoutput/ship_wmo_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:83fbd1a46909cf5f103cbb3e708daae0bc9313ac9cd1c25ed1a106318001569a -size 73395 +oid sha256:64002bc2bed0a9b06c717a2c2671bc8f5916892d998cc887f75b123d6d1f4aec +size 78355 diff --git a/test/testoutput/ship_wmo_multi2.nc b/test/testoutput/ship_wmo_multi2.nc index ddb9333fd..b929fd3f9 100644 --- a/test/testoutput/ship_wmo_multi2.nc +++ b/test/testoutput/ship_wmo_multi2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:32822267c1547a48bd654c08c4f28bbf3c07f4473f811a1f71d2c7f6a32fa0d3 -size 133084 +oid sha256:afffe0c1e257550feed5c4d61508b73455054c8a02242deef5ab8e2be9636f9f +size 139331 diff --git a/test/testoutput/smap9km_ssm.nc b/test/testoutput/smap9km_ssm.nc index 6157f123a..9e7f0d770 100644 --- a/test/testoutput/smap9km_ssm.nc +++ b/test/testoutput/smap9km_ssm.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:13ed25faaf92745945d1a997c58935616e31035ad6d6068af6172761e47669d7 -size 1846040 +oid sha256:b10fdb9f77aeb56fd06ab5848fd36ef6a43e086c6e1b6d32653b37f490ea8e42 +size 386924 diff --git a/test/testoutput/smap_ssm.nc b/test/testoutput/smap_ssm.nc index 286d204c6..fc9aefbac 100644 --- a/test/testoutput/smap_ssm.nc +++ b/test/testoutput/smap_ssm.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:94801ada54e92e129059d0cc0ebb618c56ef3bbbed83f6e173b191ffee4dbb58 -size 415332 +oid sha256:0232524f086e41464c097f7092772c4e9d5eaa5e3b3f4d16d7d5a118c1dc53d5 +size 99937 diff --git a/test/testoutput/smap_sss_rss.nc b/test/testoutput/smap_sss_rss.nc index 1835e59d3..d9bbf16cf 100644 --- a/test/testoutput/smap_sss_rss.nc +++ b/test/testoutput/smap_sss_rss.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8bd74f13c4583624502403987cdf47996e7389f013da9ea1affa92eb5855f084 -size 13162 +oid sha256:40659647fd83b477fd81ba3d8749f0a6305bc65b126aae08cc0b14c271b81cc1 +size 13021 diff --git a/test/testoutput/smos_ssm.nc b/test/testoutput/smos_ssm.nc index c83960d26..5ece4fc1e 100644 --- a/test/testoutput/smos_ssm.nc +++ b/test/testoutput/smos_ssm.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:beda15a0b7b5433a3dc1ae9922c78d679f841bd962a75452bbd67c9d2aa32ae4 -size 1236129 +oid sha256:9346736a2c8ce8bc760b4513ef173b9cf1f37ee62e35bc2165c1dcbbf7952560 +size 374772 diff --git a/test/testoutput/smos_sss_l2.nc b/test/testoutput/smos_sss_l2.nc index b1262a2f3..d81c12ce1 100644 --- a/test/testoutput/smos_sss_l2.nc +++ b/test/testoutput/smos_sss_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:88bd52c978fc38df8885be74aa321dbfc7aa84830a111962810ff16b0763438d -size 92978 +oid sha256:ca857e0f7bafa9350582c79981c3c7f1448ccfb0cad17084adce4a11988d66ff +size 29183 diff --git a/test/testoutput/sonde_wmo_multi.nc b/test/testoutput/sonde_wmo_multi.nc new file mode 100644 index 000000000..a017fcf4f --- /dev/null +++ b/test/testoutput/sonde_wmo_multi.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f44ea5a22aed43389b02a26900d22baa5aa164a77004db34ea3d07f9faf5b4ad +size 405222 diff --git a/test/testoutput/sst_ostia.nc b/test/testoutput/sst_ostia.nc index cc0b62c0b..99ae73687 100644 --- a/test/testoutput/sst_ostia.nc +++ b/test/testoutput/sst_ostia.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8dd373cb95217395787e4821f957e4a5fd7fea8be351cc78fe0167600dd04f88 -size 468061 +oid sha256:3b1557ca76d226272427db416d3f6be6072fa13746e7d66e035030bbfd7dfbac +size 89386 diff --git a/test/testoutput/synop_wmo_multi.nc b/test/testoutput/synop_wmo_multi.nc index 8bf2a916e..fecc3fabf 100644 --- a/test/testoutput/synop_wmo_multi.nc +++ b/test/testoutput/synop_wmo_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:11216f7bd66acc5481f83381be3905562c446daef3b0f13db19e7595f0cb1713 -size 71715 +oid sha256:8836c57fc59600b21b1d8f1a340c724d891cd4cc234f367d8468b791fd0bec4c +size 71524 diff --git a/test/testoutput/synop_wmo_multi2.nc b/test/testoutput/synop_wmo_multi2.nc index 975b576fa..3dd156efb 100644 --- a/test/testoutput/synop_wmo_multi2.nc +++ b/test/testoutput/synop_wmo_multi2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6a19573817ad33395685ff3024ddd6107543f599ee612b60693d50cf2716a331 -size 99417 +oid sha256:a2f2b360e707c8bd8ca0d4f6f65c6ca5b73382baaacf73b148a25dc31bebfc52 +size 104167 diff --git a/test/testoutput/test_glider.nc b/test/testoutput/test_glider.nc index daa535f22..5d23cc35b 100644 --- a/test/testoutput/test_glider.nc +++ b/test/testoutput/test_glider.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0b2eec3253e1caffbf0771a2540f90199e59270c9543935f733b59636f79e47b -size 57253 +oid sha256:e0ac6cb75d2fb89a9ff366daa8f1ba1d9263501b289d9fe935821997b990085e +size 26382 diff --git a/test/testoutput/tropomi_co_total.nc b/test/testoutput/tropomi_co_total.nc index ee68500a3..8ffd7adeb 100644 --- a/test/testoutput/tropomi_co_total.nc +++ b/test/testoutput/tropomi_co_total.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6f849808e207c2fc62ba6c63f986c478d39ffdf52d300f51085c445ae0ecbde6 -size 173772 +oid sha256:4adfe705a0b97177f12664ab248b61c8e7793e118aac7271e44fd74b071cb92a +size 164142 diff --git a/test/testoutput/tropomi_no2_total.nc b/test/testoutput/tropomi_no2_total.nc index 25b22a419..5b833ecf7 100644 --- a/test/testoutput/tropomi_no2_total.nc +++ b/test/testoutput/tropomi_no2_total.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:3aa14976cfb990be77b1869ec0cb5e4830d752820f0dbbd3ce94f01b0e125a13 -size 116243 +oid sha256:8ad06871bbad75e935c19db1a2da4a03e2ad18e58b3eec896123f769414b005f +size 111804 diff --git a/test/testoutput/tropomi_no2_tropo.nc b/test/testoutput/tropomi_no2_tropo.nc index 18c15fab6..4c870fb63 100644 --- a/test/testoutput/tropomi_no2_tropo.nc +++ b/test/testoutput/tropomi_no2_tropo.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2dea49aa55cf13b61f1be1e96ca814fd85cc838814aed63b7957772d13489013 -size 108155 +oid sha256:bc3aaa051c22494dc7cb301c1d5b74b0bf68bd371a1c82d5de663e3c3e4e5d18 +size 103449 diff --git a/test/testoutput/vadwinds_wmo_multi.nc b/test/testoutput/vadwinds_wmo_multi.nc index e074f0f65..35582992f 100644 --- a/test/testoutput/vadwinds_wmo_multi.nc +++ b/test/testoutput/vadwinds_wmo_multi.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:3cfe2c86ccd07f412e04e6d3907703980107fa4ea60577be117d30a3c7721cbf -size 33606 +oid sha256:88040a8312a5d5401b25d1462aa963837faceb8902e2a7e74e134009c55e80d2 +size 33411 diff --git a/test/testoutput/viirs_aod.nc b/test/testoutput/viirs_aod.nc index dd0e32a05..bb89386ed 100644 --- a/test/testoutput/viirs_aod.nc +++ b/test/testoutput/viirs_aod.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:08baff6fb016ecffc7b7471cb8caffa39ed8be9bc84b1ceb71245be6ecffa9f2 -size 14480 +oid sha256:c849da808edeb5ccf871ff644339ffbe72bfd694509af61b19a82dc317a262d8 +size 13692 diff --git a/test/testoutput/viirs_jpss1_oc_l2.nc b/test/testoutput/viirs_jpss1_oc_l2.nc index 98c94d6b2..48f2acdcd 100644 --- a/test/testoutput/viirs_jpss1_oc_l2.nc +++ b/test/testoutput/viirs_jpss1_oc_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:40c149cd8f34db4211d8cb6966c8340919bbd2a88aff3afbe6b3c74e8bfbab2b -size 484634 +oid sha256:f427d03b07bda75d58f655a6ea9e4a119f217fda587b69db6547db6657ed7e49 +size 139085 diff --git a/test/testoutput/viirs_jpss1_oc_l3.nc b/test/testoutput/viirs_jpss1_oc_l3.nc index 43389a969..e1aeb7090 100644 --- a/test/testoutput/viirs_jpss1_oc_l3.nc +++ b/test/testoutput/viirs_jpss1_oc_l3.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:fb96f50b7fa2f0885bd6b23ef242c7b047042e305c9f863bdb5d6891ad1a22b8 -size 17642 +oid sha256:af4b39af59916a755248db68985c871cfa231175aeed0598e26bdbbc830a2b45 +size 13387 diff --git a/test/testoutput/wmo_raob_double.nc4 b/test/testoutput/wmo_raob_double.nc4 index 77b748252..ec5efc069 100644 --- a/test/testoutput/wmo_raob_double.nc4 +++ b/test/testoutput/wmo_raob_double.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:49cbb72d61aad85330a5382d0a454fccf578c146dc0e34c39fe65cb5cbd23a4c -size 190349 +oid sha256:76c154bb6c514751fbd67bf5875cd6c92d890eea2e40018beb6976b10c7c1f86 +size 202605