diff --git a/src/compo/mls_o3_nc2ioda.py b/src/compo/mls_o3_nc2ioda.py index e3a67056d..85442570b 100755 --- a/src/compo/mls_o3_nc2ioda.py +++ b/src/compo/mls_o3_nc2ioda.py @@ -29,36 +29,37 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("air_pressure", "float"), - ("dateTime", "integer"), + ("pressure", "float"), + ("dateTime", "long"), ] +varname_ozone = 'ozoneProfile' + ioda2nc = {} ioda2nc['latitude'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Latitude' ioda2nc['longitude'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Longitude' ioda2nc['dateTime'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Time' -ioda2nc['air_pressure'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Pressure' +ioda2nc['pressure'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/Pressure' ioda2nc['valKey'] = 'HDFEOS/SWATHS/O3/Data Fields/O3' ioda2nc['precision'] = 'HDFEOS/SWATHS/O3/Data Fields/O3Precision' ioda2nc['convergence'] = 'HDFEOS/SWATHS/O3/Data Fields/Convergence' ioda2nc['status'] = 'HDFEOS/SWATHS/O3/Data Fields/Status' ioda2nc['quality'] = 'HDFEOS/SWATHS/O3/Data Fields/Quality' -ioda2nc['solar_zenith_angle'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/SolarZenithAngle' +ioda2nc['solarZenithAngle'] = 'HDFEOS/SWATHS/O3/Geolocation Fields/SolarZenithAngle' obsvars = { - 'mole_fraction_of_ozone_in_air': 'mole_fraction_of_ozone_in_air', + 'mole_fraction_of_ozone_in_air': varname_ozone, } AttrData = { - 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), + 'converter': os.path.basename(__file__) } DimDict = { } VarDims = { - 'mole_fraction_of_ozone_in_air': ['nlocs'], + varname_ozone: ['Location'], } @@ -79,10 +80,10 @@ def __init__(self, filenames, lvmin, lvmax, sTAI, eTAI, nrt, qcOn, errorOn): if(v != 'valKey' and v != 'errKey'): self.outdata[(v, 'MetaData')] = [] self.outdata[('level', 'MetaData')] = [] - self._setVarDict('mole_fraction_of_ozone_in_air') - self.outdata[self.varDict['mole_fraction_of_ozone_in_air']['valKey']] = [] + self._setVarDict(varname_ozone) + self.outdata[self.varDict[varname_ozone]['valKey']] = [] if(self.qcOn): - self.outdata[self.varDict['mole_fraction_of_ozone_in_air']['errKey']] = [] + self.outdata[self.varDict[varname_ozone]['errKey']] = [] self._read() @@ -112,12 +113,8 @@ def _setVarAttr(self, iodavar): self.varAttrs[vkey]['units'] = 'seconds since 1993-01-01T00:00:00Z' elif('angle' in v.lower() or 'latitude' in v.lower() or 'longitude' in v.lower()): self.varAttrs[vkey]['units'] = 'degrees' - elif('flag' in v.lower()): - self.varAttrs[vkey]['units'] = 'unitless' elif('prior' in v.lower()): self.varAttrs[vkey]['units'] = 'ppmv' - else: - self.varAttrs[vkey]['units'] = 'unitless' # Read data needed from raw MLS file. def _read_nc(self, filename, ifile, maxfile): @@ -139,7 +136,7 @@ def _read_nc(self, filename, ifile, maxfile): d = {} for k in list(ioda2nc.keys()): - if (k == 'air_pressure'): + if (k == 'pressure'): d[k] = ncd[ioda2nc[k]][...]*100. # convert to Pa d[k].mask = False else: @@ -179,13 +176,13 @@ def _just_flatten(self, d): dd['precision'] = d['precision'][idx, self.lmin:self.lmax+1] lvec = np.arange(self.lmin+1, self.lmax+2) dd['level'], dd['status'] = np.meshgrid(np.arange(self.lmin+1, self.lmax+2), d['status'][idx]) - dd['air_pressure'], dd['dateTime'] = np.meshgrid(d['air_pressure'][self.lmin:self.lmax+1], d['dateTime'][idx]) + dd['pressure'], dd['dateTime'] = np.meshgrid(d['pressure'][self.lmin:self.lmax+1], d['dateTime'][idx]) dd['quality'] = np.tile(d['quality'][idx], (lvec.shape[0], 1)).T dd['convergence'] = np.tile(d['convergence'][idx], (lvec.shape[0], 1)).T dd['status'] = np.tile(d['status'][idx], (lvec.shape[0], 1)).T dd['latitude'] = np.tile(d['latitude'][idx], (lvec.shape[0], 1)).T dd['longitude'] = np.tile(d['longitude'][idx], (lvec.shape[0], 1)).T - dd['solar_zenith_angle'] = np.tile(d['solar_zenith_angle'][idx], (lvec.shape[0], 1)).T + dd['solarZenithAngle'] = np.tile(d['solarZenithAngle'][idx], (lvec.shape[0], 1)).T for k in list(dd.keys()): dd[k] = np.asarray(dd[k]) dd[k] = dd[k].flatten().tolist() @@ -209,9 +206,9 @@ def _do_qc(self, d): if(d['dateTime'][irec] < self.startTAI or d['dateTime'][irec] > self.endTAI): continue for k in list(d.keys()): - if (len(d[k].shape) == 1 and k != 'air_pressure'): + if (len(d[k].shape) == 1 and k != 'pressure'): dd[k].append(d[k][irec]) - elif (k == 'air_pressure'): + elif (k == 'pressure'): dd[k].append(d[k][ilev]) elif k != 'errKey': dd[k].append(d[k][irec, ilev]) @@ -220,7 +217,7 @@ def _do_qc(self, d): def _read(self): # set up variable names for IODA - self._setVarAttr('mole_fraction_of_ozone_in_air') + self._setVarAttr(varname_ozone) # loop through input filenames for i, f in enumerate(self.filenames): @@ -246,8 +243,7 @@ def _read(self): if(self.errorOn): self.outdata[self.varDict[iodavar]['errKey']].extend(d['errKey']) - DimDict['nlocs'] = np.float32(len(self.outdata[('dateTime', 'MetaData')])) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = np.float32(len(self.outdata[('dateTime', 'MetaData')])) for k in self.outdata.keys(): self.outdata[k] = np.asarray(self.outdata[k]) diff --git a/src/compo/modis_aod2ioda.py b/src/compo/modis_aod2ioda.py index a380d8bc8..561946204 100755 --- a/src/compo/modis_aod2ioda.py +++ b/src/compo/modis_aod2ioda.py @@ -14,6 +14,7 @@ from datetime import datetime, date, timedelta import os from pathlib import Path +from pyhdf.SD import SD, SDC IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" if not IODA_CONV_PATH.is_dir(): @@ -33,8 +34,10 @@ obsvars = ["aerosolOpticalDepth"] # A dictionary of global attributes. More filled in further down. -AttrData = {} -AttrData['ioda_object_type'] = 'AOD at 550nm' +AttrData = { + 'converter': os.path.basename(__file__), + 'description': 'AOD at 550nm' +} # A dictionary of variable dimensions. DimDict = {} @@ -52,12 +55,19 @@ class AOD(object): - def __init__(self, filenames, obs_time_arg): + def __init__(self, filenames, obs_time, pltfrm): self.filenames = filenames - self.obs_time = obs_time_arg + self.obs_time = obs_time + self.pltfrm = pltfrm self.varDict = defaultdict(lambda: defaultdict(dict)) self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) + # there's absolutely no difference in the hdf4 files attributes + # between Terra and Aqua files. So it is user specified + AttrData['platform'] = pltfrm + # sensor would be always MODIS for this converter + AttrData['sensor'] = 'MODIS' + AttrData['datetimeReference'] = obs_time self._read() def _read(self): @@ -79,50 +89,45 @@ def _read(self): self.varAttrs[('dateTime', metaDataName)]['units'] = 'seconds since 1993-01-01T00:00:00Z' # Make empty lists for the output vars - self.outdata[('latitude', metaDataName)] = np.array([], dtype=np.float64) - self.outdata[('longitude', metaDataName)] = np.array([], dtype=np.float64) + self.outdata[('latitude', metaDataName)] = np.array([], dtype=np.float32) + self.outdata[('longitude', metaDataName)] = np.array([], dtype=np.float32) self.outdata[('dateTime', metaDataName)] = np.array([], dtype=np.int64) for iodavar in obsvars: - self.outdata[self.varDict[iodavar]['valKey']] = np.array([], dtype=np.float64) - self.outdata[self.varDict[iodavar]['errKey']] = np.array([], dtype=np.float64) + self.outdata[self.varDict[iodavar]['valKey']] = np.array([], dtype=np.float32) + self.outdata[self.varDict[iodavar]['errKey']] = np.array([], dtype=np.float32) self.outdata[self.varDict[iodavar]['qcKey']] = np.array([], dtype=np.int32) # loop through input filenames for f in self.filenames: - ncd = nc.Dataset(f, 'r') - ncd_str = str(ncd) - if 'Terra' in ncd_str: - AttrData['platform'] = 'Terra' - elif 'Aqua' in ncd_str: - AttrData['platform'] = 'Aqua' - if 'MODIS' in ncd_str: - AttrData['sensor'] = 'v.modis_terra' + hdf = SD(f, SDC.READ) # Get variables - modis_time = ncd.variables['Scan_Start_Time'][:].ravel() - modis_time = modis_time.astype('float64') - lats = ncd.variables['Latitude'][:].ravel() + modis_time = hdf.select('Scan_Start_Time')[:].ravel() + print(f"length of time var: {len(modis_time)}") + modis_time = modis_time.astype('float32') + lats = hdf.select('Latitude')[:].ravel() lats = lats.astype('float32') - lons = ncd.variables['Longitude'][:].ravel() + lons = hdf.select('Longitude')[:].ravel() lons = lons.astype('float32') - aod = ncd.variables['AOD_550_Dark_Target_Deep_Blue_Combined'][:].ravel() - land_sea_flag = ncd.variables['Land_sea_Flag'][:].ravel() - QC_flag = ncd.variables['Land_Ocean_Quality_Flag'][:].ravel() + aod = hdf.select('AOD_550_Dark_Target_Deep_Blue_Combined')[:].ravel() + aod = aod.astype('float64') + land_sea_flag = hdf.select('Land_sea_Flag')[:].ravel() + QC_flag = hdf.select('Land_Ocean_Quality_Flag')[:].ravel() QC_flag = QC_flag.astype('int32') - sol_zen = ncd.variables['Solar_Zenith'][:].ravel() - sen_zen = ncd.variables['Sensor_Zenith'][:].ravel() - unc_land = ncd.variables['Deep_Blue_Aerosol_Optical_Depth_550_Land_Estimated_Uncertainty'][:].ravel() + sol_zen = hdf.select('Solar_Zenith')[:].ravel() + sen_zen = hdf.select('Sensor_Zenith')[:].ravel() + unc_land = hdf.select('Deep_Blue_Aerosol_Optical_Depth_550_Land_Estimated_Uncertainty')[:].ravel() # Remove undefined values pos_index = np.where(aod > 0) lats = lats[pos_index] lons = lons[pos_index] - aod = aod[pos_index] + aod = aod[pos_index] * 1E-3 # see scale factor land_sea_flag = land_sea_flag[pos_index] QC_flag = QC_flag[pos_index] sol_zen = sol_zen[pos_index] sen_zen = sen_zen[pos_index] - unc_land = unc_land[pos_index] + unc_land = unc_land[pos_index] * 1E-3 # see scale factor modis_time = modis_time[pos_index] obs_time = np.full(len(modis_time), long_missing_value, dtype=np.int64) for n, t in enumerate(modis_time): @@ -151,7 +156,7 @@ def _read(self): def main(): # get command line arguments - # Usage: python blah.py -i /path/to/obs/2021060801.nc /path/to/obs/2021060802.nc ... -t Analysis_time /path/to/obs/2021060823.nc + # Usage: python blah.py -i /path/to/obs/2021060801.nc /path/to/obs/2021060802.nc ... -p -t Analysis_time /path/to/obs/2021060823.nc # -o /path/to/ioda/20210608.nc # where the input obs could be for any desired interval to concatenated together. Analysis time is generally the midpoint of # analysis window. @@ -171,6 +176,10 @@ def main(): '-t', '--time', help="Observation time in global attributes (YYYYMMDDHH)", type=int, required=True) + required.add_argument( + '-p', '--platform', + help="AQUA or TERRA satellite?", + type=str, required=True) required.add_argument( '-o', '--output', help="path of IODA output file", @@ -184,7 +193,7 @@ def main(): obs_time = args.time # Read in the AOD data - aod_class = AOD(args.input, args.time) + aod_class = AOD(args.input, args.time, args.platform) # write everything out writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) writer.BuildIoda(aod_class.outdata, VarDims, aod_class.varAttrs, AttrData) diff --git a/src/compo/mopitt_co_nc2ioda.py b/src/compo/mopitt_co_nc2ioda.py index a88b7aed8..3d2d01972 100755 --- a/src/compo/mopitt_co_nc2ioda.py +++ b/src/compo/mopitt_co_nc2ioda.py @@ -27,23 +27,25 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] +varname_co = 'carbonmonoxideColumn' + +# This is apparently not used obsvars = { 'carbonmonoxide_total_column': 'carbon_monoxide_in_total_column', } AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), } DimDict = { } VarDims = { - 'carbon_monoxide_in_total_column': ['nlocs'], + varname_co: ['Location'], } # constants @@ -64,7 +66,7 @@ def __init__(self, filenames, time_range): def _read(self): # set up variable names for IODA - for iodavar in ['carbon_monoxide_in_total_column']: + for iodavar in [varname_co, ]: self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -73,7 +75,6 @@ def _read(self): self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'mol m-2' self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'mol m-2' - self.varAttrs[iodavar, iconv.OqcName()]['units'] = 'unitless' # loop through input filenames first = True for f in self.filenames: @@ -102,11 +103,12 @@ def _read(self): qa = dat.variables['RetrievalAnomalyDiagnostic'][:].sum(axis=1) # time data, we don't need precision beyond the second - inittime = datetime.strptime(StartDateTime, "%Y-%m-%dT%H:%M:%S.%fZ") - times = np.array([datetime.strftime( - inittime + timedelta(seconds=int(i)) - timedelta(seconds=int(secd[0])), - "%Y-%m-%dT%H:%M:%S")+"Z" for i in secd], dtype=object) - AttrData['date_time_string'] = times[0] + inittime = datetime.strptime(StartDateTime[:19], "%Y-%m-%dT%H:%M:%S") + time_units = 'seconds since ' + StartDateTime[:19] + 'Z' + self.varAttrs[('dateTime', 'MetaData')]['units'] = time_units + times = [int(i) - int(secd[0]) for i in secd] + times = np.array(times) + AttrData['datetimeReference'] = StartDateTime[:19] + 'Z' # get ak AttrData['averaging_kernel_levels'] = np.int32(nlevs) @@ -156,15 +158,17 @@ def _read(self): # date range to fit DA window date_start = datetime.strptime(self.time_range[0], "%Y%m%d%H") + time_offset1 = round((date_start - inittime).total_seconds()) date_end = datetime.strptime(self.time_range[1], "%Y%m%d%H") - date_list = [datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") for date in times] - tsf = [(date_i >= date_start) & (date_i < date_end) for date_i in date_list] - - flg = np.logical_and(qaf, tsf) + time_offset2 = round((date_end - inittime).total_seconds()) + flg = [] + for n, t in enumerate(times): + if t >= time_offset1 and t < time_offset2 and qaf[n]: + flg.append(n) if first: # add metadata variables - self.outdata[('datetime', 'MetaData')] = times[flg] + self.outdata[('dateTime', 'MetaData')] = times[flg].astype(np.int64) self.outdata[('latitude', 'MetaData')] = lats[flg] self.outdata[('longitude', 'MetaData')] = lons[flg] self.outdata[('apriori_term', 'RtrvlAncData')] = ap_tc[flg] @@ -179,44 +183,42 @@ def _read(self): self.outdata[self.varDict[iodavar]['valKey']] = xr_tc[flg] self.outdata[self.varDict[iodavar]['errKey']] = er_tc[flg] - self.outdata[self.varDict[iodavar]['qcKey']] = qa[flg] + self.outdata[self.varDict[iodavar]['qcKey']] = qa[flg].astype(np.int32) else: - self.outdata[('datetime', 'MetaData')] = np.concatenate(( - self.outdata[('datetime', 'MetaData')], times[flg])) - self.outdata[('latitude', 'MetaData')] = np.concatenate(( - self.outdata[('latitude', 'MetaData')], lats[flg])) - self.outdata[('longitude', 'MetaData')] = np.concatenate(( - self.outdata[('longitude', 'MetaData')], lons[flg])) - self.outdata[('apriori_term', 'RtrvlAncData')] = np.concatenate(( - self.outdata[('apriori_term', 'RtrvlAncData')], ap_tc[flg])) + self.outdata[('datetime', 'MetaData')] = np.concatenate( + self.outdata[('datetime', 'MetaData')], times[flg].astype(np.int64)) + self.outdata[('latitude', 'MetaData')] = np.concatenate( + self.outdata[('latitude', 'MetaData')], lats[flg]) + self.outdata[('longitude', 'MetaData')] = np.concatenate( + self.outdata[('longitude', 'MetaData')], lons[flg]) + self.outdata[('apriori_term', 'RtrvlAncData')] = np.concatenate( + self.outdata[('apriori_term', 'RtrvlAncData')], ap_tc[flg]) for k in range(nlevs): varname_ak = ('averaging_kernel_level_'+str(k+1), 'RtrvlAncData') self.outdata[varname_ak] = np.concatenate( - (self.outdata[varname_ak], ak_tc_dimless[:, k][flg])) + self.outdata[varname_ak], ak_tc_dimless[:, k][flg]) # add top vertice in IODA file, here it is 0hPa but can be different # for other obs stream for k in range(nlevs+1): varname_pr = ('pressure_level_'+str(k+1), 'RtrvlAncData') self.outdata[varname_pr] = np.concatenate( - (self.outdata[varname_pr], hPa2Pa * pr_gd[:, k][flg])) + self.outdata[varname_pr], hPa2Pa * pr_gd[:, k][flg]) self.outdata[self.varDict[iodavar]['valKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['valKey']], xr_tc[flg])) + self.outdata[self.varDict[iodavar]['valKey']], xr_tc[flg]) self.outdata[self.varDict[iodavar]['errKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['errKey']], er_tc[flg])) + self.outdata[self.varDict[iodavar]['errKey']], er_tc[flg]) self.outdata[self.varDict[iodavar]['qcKey']] = np.concatenate( - (self.outdata[self.varDict[iodavar]['qcKey']], qa[flg])) + self.outdata[self.varDict[iodavar]['qcKey']], qa[flg]).astype(np.int32) first = False - DimDict['nlocs'] = len(self.outdata[('datetime', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')]) for k in range(nlevs): varname = 'averaging_kernel_level_'+str(k+1) vkey = (varname, 'RtrvlAncData') self.varAttrs[vkey]['coordinates'] = 'longitude latitude' - self.varAttrs[vkey]['units'] = '' def main(): diff --git a/src/compo/omi_o3_nc2ioda.py b/src/compo/omi_o3_nc2ioda.py index 778001ce3..41f757f07 100755 --- a/src/compo/omi_o3_nc2ioda.py +++ b/src/compo/omi_o3_nc2ioda.py @@ -24,38 +24,35 @@ from orddicts import DefaultOrderedDict import ioda_conv_engines as iconv - -def is_bit_set(integer_value, bit_position): - return (integer_value & (1 << bit_position)) != 0 - - # Global Dictionaries. locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("dateTime", "integer"), + ("dateTime", "long"), ] +# Name to call the output ozone variable +varname_ozone = 'ozoneTotal' + +# Dictionary of variable name found in input file and corresponding output name +obsvars = { + 'integrated_layer_ozone_in_air': varname_ozone, +} + ioda2nc = {} ioda2nc['latitude'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/Latitude' ioda2nc['longitude'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/Longitude' ioda2nc['dateTime'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/Time' -ioda2nc['solar_zenith_angle'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/SolarZenithAngle' +ioda2nc['solarZenithAngle'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Geolocation Fields/SolarZenithAngle' ioda2nc['prior_o3'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/APrioriLayerO3' # ioda2nc['Layer_Efficiency'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/LayerEfficiency' ioda2nc['valKey'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/ColumnAmountO3' ioda2nc['quality_flag'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/QualityFlags' ioda2nc['algorithm_flag'] = 'HDFEOS/SWATHS/OMI Column Amount O3/Data Fields/AlgorithmFlags' - -obsvars = { - 'integrated_layer_ozone_in_air': 'integrated_layer_ozone_in_air', -} - AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), 'satellite': 'aura', 'sensor': 'omi', } @@ -64,9 +61,16 @@ def is_bit_set(integer_value, bit_position): } VarDims = { - 'integrated_layer_ozone_in_air': ['nlocs'], + varname_ozone: ['Location'], } +missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] + + +def is_bit_set(integer_value, bit_position): + return (integer_value & (1 << bit_position)) != 0 + class omi(object): def __init__(self, filenames, sTAI, eTAI, qcOn): @@ -77,15 +81,15 @@ def __init__(self, filenames, sTAI, eTAI, qcOn): self.startTAI = sTAI self.endTAI = eTAI self.qcOn = qcOn - self._setVarDict('integrated_layer_ozone_in_air') + self._setVarDict(varname_ozone) # initialize dictionary with empty list for MetaData variable to populate later. vars2output = list(ioda2nc.keys()) - vars2output.append('scan_position') + vars2output.append('scanPosition') for v in vars2output: if(v != 'valKey'): self.outdata[(v, 'MetaData')] = [] - self.outdata[self.varDict['integrated_layer_ozone_in_air']['valKey']] = [] + self.outdata[self.varDict[varname_ozone]['valKey']] = [] self._read() # set ioda variable keys @@ -95,11 +99,9 @@ def _setVarDict(self, iodavar): # set variable attributes for IODA def _setVarAttr(self, iodavar): self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' - missing_value = 9.96921e+36 - int_missing_value = -2147483647 self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = missing_value varsToAddUnits = list(ioda2nc.keys()) - varsToAddUnits.append('scan_position') + varsToAddUnits.append('scanPosition') for v in varsToAddUnits: if(v != 'valKey'): vkey = (v, 'MetaData') @@ -109,8 +111,6 @@ def _setVarAttr(self, iodavar): self.varAttrs[vkey]['units'] = 'seconds since 1993-01-01T00:00:00Z' elif('angle' in v.lower()): self.varAttrs[vkey]['units'] = 'degrees' - elif('flag' in v.lower()): - self.varAttrs[vkey]['units'] = 'unitless' elif('prior' in v.lower()): self.varAttrs[vkey]['units'] = 'ppmv' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'DU' @@ -137,7 +137,7 @@ def _just_flatten(self, d): scn_tmp = scn_tmp.astype('float32') tmp = tmp.astype(np.int64) dd[k] = tmp.flatten().tolist() - dd['scan_position'] = scn_tmp.flatten().tolist() + dd['scanPosition'] = scn_tmp.flatten().tolist() elif(k == 'Prior_O3'): dd[k] = d[k][:, :, 0].flatten().tolist() else: @@ -153,7 +153,7 @@ def _do_qc(self, d): # intialize dictonary of qc'd variables dd = {} flatVars = list(ioda2nc.keys()) - flatVars.append('scan_position') + flatVars.append('scanPosition') for v in flatVars: dd[v] = [] @@ -200,20 +200,20 @@ def _do_qc(self, d): if (six_set or eight_set or nine_set): continue # could simply this further with one if statement possibly more clever use of a bit masking. - dd['scan_position'].append(float(iscan+1)) + dd['scanPosition'].append(float(iscan+1)) for v in flatVars: if(v == 'dateTime'): dd[v].append(d[v][itime]) elif(v == 'prior_o3'): dd[v].append(d[v][itime, iscan, 0]) - elif(v != 'scan_position'): + elif(v != 'scanPosition'): dd[v].append(d[v][itime, iscan]) return dd def _read(self): # set up variable names for IODA - for iodavar in ['integrated_layer_ozone_in_air', ]: + for iodavar in [varname_ozone, ]: # self._setVarDict(var) self._setVarAttr(iodavar) # loop through input filenames @@ -233,12 +233,11 @@ def _read(self): for ncvar, iodavar in obsvars.items(): self.outdata[self.varDict[iodavar] ['valKey']].extend(d['valKey']) - DimDict['nlocs'] = len(self.outdata[('longitude', 'MetaData')]) - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = len(self.outdata[('longitude', 'MetaData')]) # add dummy air_pressure so UFO will know this is a total column ob, and not partial. - self.outdata[('air_pressure', 'MetaData')] = np.zeros( - DimDict['nlocs']).tolist() - self.varAttrs[('air_pressure', 'MetaData')]['units'] = 'Pa' + self.outdata[('pressure', 'MetaData')] = np.zeros( + DimDict['Location']).tolist() + self.varAttrs[('pressure', 'MetaData')]['units'] = 'Pa' for k in self.outdata.keys(): self.outdata[k] = np.asarray(self.outdata[k]) @@ -247,7 +246,7 @@ def _read(self): elif(self.outdata[k].dtype == 'int64' and k != ('dateTime', 'MetaData')): self.outdata[k] = self.outdata[k].astype('int32') elif(self.outdata[k].dtype == 'uint16' or self.outdata[k].dtype == 'uint8'): - self.outdata[k] = self.outdata[k].astype(int) + self.outdata[k] = self.outdata[k].astype('int32') self.outdata[('dateTime', 'MetaData')] = self.outdata[('dateTime', 'MetaData')].astype('int64') # ensure lon is 0-360 self.outdata[('longitude', 'MetaData')] = self.outdata[('longitude', 'MetaData')] % 360 diff --git a/src/compo/ompsnm_o3_nc2ioda.py b/src/compo/ompsnm_o3_nc2ioda.py index ca72c0785..16ae4d7d8 100755 --- a/src/compo/ompsnm_o3_nc2ioda.py +++ b/src/compo/ompsnm_o3_nc2ioda.py @@ -25,21 +25,27 @@ from orddicts import DefaultOrderedDict import ioda_conv_engines as iconv - -# Global Dictionaries. +# Dictionary of essential MetaData locationKeyList = [ ("latitude", "float"), ("longitude", "float"), ("dateTime", "long"), ] +# Name to call the output ozone variable +varname_ozone = 'ozoneTotal' + +# Dictionary of variable name found in input file and corresponding output name +obsvars = { + 'integrated_layer_ozone_in_air': varname_ozone, +} # dictionary to map things we're putting into ioda and taking out of instrument native format ioda2nc = {} ioda2nc['latitude'] = 'GeolocationData/Latitude' ioda2nc['longitude'] = 'GeolocationData/Longitude' ioda2nc['dateTime'] = 'GeolocationData/Time' -ioda2nc['solar_zenith_angle'] = 'GeolocationData/SolarZenithAngle' +ioda2nc['solarZenithAngle'] = 'GeolocationData/SolarZenithAngle' ioda2nc['valKey'] = 'ScienceData/ColumnAmountO3' ioda2nc['ground_pixel_quality'] = 'GeolocationData/GroundPixelQualityFlags' ioda2nc['quality_flags'] = 'ScienceData/QualityFlags' @@ -47,14 +53,8 @@ ioda2nc['measurement_quality_flags'] = 'ScienceData/MeasurementQualityFlags' ioda2nc['instrument_quality_flags'] = 'GeolocationData/InstrumentQualityFlags' - -obsvars = { - 'integrated_layer_ozone_in_air': 'integrated_layer_ozone_in_air', -} - AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), 'satellite': 'npp', 'sensor': 'ompsnm', } @@ -63,7 +63,7 @@ } VarDims = { - 'integrated_layer_ozone_in_air': ['nlocs'], + varname_ozone: ['Location'], } @@ -75,17 +75,17 @@ def __init__(self, filenames, sTAI, eTAI): self.varAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) self.startTAI = sTAI self.endTAI = eTAI - self._setVarDict('integrated_layer_ozone_in_air') + self._setVarDict(varname_ozone) vars2output = list(ioda2nc.keys()) - vars2output.append('scan_position') + vars2output.append('scanPosition') for v in vars2output: if(v != 'valKey'): self.outdata[(v, 'MetaData')] = [] - self.outdata[self.varDict['integrated_layer_ozone_in_air']['valKey']] = [] + self.outdata[self.varDict[varname_ozone]['valKey']] = [] - self._setVarDict('integrated_layer_ozone_in_air') - self.outdata[self.varDict['integrated_layer_ozone_in_air']['valKey']] = [] + self._setVarDict(varname_ozone) + self.outdata[self.varDict[varname_ozone]['valKey']] = [] self._read() @@ -98,7 +98,7 @@ def _setVarAttr(self, iodavar): self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' varsToAddUnits = list(ioda2nc.keys()) - varsToAddUnits.append('scan_position') + varsToAddUnits.append('scanPosition') for v in varsToAddUnits: if(v != 'valKey'): vkey = (v, 'MetaData') @@ -108,15 +108,11 @@ def _setVarAttr(self, iodavar): self.varAttrs[vkey]['units'] = 'seconds since 1993-01-01T00:00:00Z' elif('angle' in v.lower()): self.varAttrs[vkey]['units'] = 'degrees' - elif('flag' in v.lower()): - self.varAttrs[vkey]['units'] = 'unitless' elif('prior' in v.lower()): self.varAttrs[vkey]['units'] = 'ppmv' - else: - self.varAttrs[vkey]['units'] = 'unitless' self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'DU' - vkey = ('air_pressure', 'MetaData') + vkey = ('pressure', 'MetaData') self.varAttrs[vkey]['units'] = 'Pa' # Read data needed from raw OMPSNM file. @@ -132,8 +128,8 @@ def _read_nc(self, filename): # mesh time and scan_position to get flattened array instead of using loops time_vec = d['dateTime'] scan_position_vec = np.arange(1, d['valKey'].shape[1]+1) - d['scan_position'], d['dateTime'] = np.meshgrid(scan_position_vec, time_vec) - d['scan_position'] = d['scan_position'].astype('float32') + d['scanPosition'], d['dateTime'] = np.meshgrid(scan_position_vec, time_vec) + d['scanPosition'] = d['scanPosition'].astype('float32') d['measurement_quality_flags'].mask = False d['instrument_quality_flags'].mask = False d['measurement_quality_flags'] = np.tile(d['measurement_quality_flags'], (scan_position_vec.shape[0], 1)).T @@ -144,7 +140,7 @@ def _read_nc(self, filename): def _read(self): # set up variable names for IODA - for iodavar in ['integrated_layer_ozone_in_air', ]: + for iodavar in [varname_ozone, ]: # self._setVarDict(var) self._setVarAttr(iodavar) # loop through input filenames @@ -160,7 +156,7 @@ def _read(self): # add dummy air_pressure so UFO will know this is a total column ob, and not partial. nloc = len(self.outdata[('dateTime', 'MetaData')]) - self.outdata[('air_pressure', 'MetaData')] = np.zeros(nloc).tolist() + self.outdata[('pressure', 'MetaData')] = np.zeros(nloc).tolist() for k in self.outdata.keys(): self.outdata[k] = np.asarray(self.outdata[k]) @@ -168,8 +164,7 @@ def _read(self): self.outdata[k] = self.outdata[k].astype('float32') elif(self.outdata[k].dtype == 'int64' and k != ('dateTime', 'MetaData')): self.outdata[k] = self.outdata[k].astype('int32') - DimDict['nlocs'] = self.outdata[('dateTime', 'MetaData')].shape[0] - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = nloc self.outdata[('dateTime', 'MetaData')] = self.outdata[('dateTime', 'MetaData')].astype(np.int64) self.outdata[('longitude', 'MetaData')] = self.outdata[('longitude', 'MetaData')] % 360 # end ompsnm object. diff --git a/src/gnssro/gnssro_bufr2ioda.py b/src/gnssro/gnssro_bufr2ioda.py index 3b681430c..ac17004f2 100644 --- a/src/gnssro/gnssro_bufr2ioda.py +++ b/src/gnssro/gnssro_bufr2ioda.py @@ -17,6 +17,7 @@ import os from pathlib import Path from itertools import repeat +import netCDF4 as nc IODA_CONV_PATH = Path(__file__).parent/"@SCRIPT_LIB_PATH@" if not IODA_CONV_PATH.is_dir(): @@ -30,13 +31,18 @@ # globals ioda_float_type = 'float32' ioda_int_type = 'int32' -float_missing_value = -1.0e+37 -int_missing_value = -2147483647 +float_missing_value = nc.default_fillvals['f4'] +int_missing_value = nc.default_fillvals['i4'] +long_missing_value = nc.default_fillvals['i8'] +string_missing_value = '_' + +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] @@ -68,42 +74,38 @@ def main(args): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file GlobalAttrs = {} - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") - date_time_int32 = np.array(int(args.date.strftime("%Y%m%d%H")), dtype='int32') - GlobalAttrs['date_time'] = date_time_int32.item() + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['converter'] = os.path.basename(__file__) # pass parameters to the IODA writer VarDims = { - 'bending_angle': ['nlocs'], - 'refractivity': ['nlocs'] + 'bendingAngle': ['Location'], + 'atmosphericRefractivity': ['Location'] } # write them out - nlocs = obs_data[('bending_angle', 'ObsValue')].shape[0] - DimDict = {'nlocs': nlocs} + nlocs = obs_data[('bendingAngle', 'ObsValue')].shape[0] + DimDict = {'Location': nlocs} meta_data_types = def_meta_types() for k, v in meta_data_types.items(): locationKeyList.append((k, v)) writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('bending_angle', 'ObsValue')]['units'] = 'Radians' - VarAttrs[('bending_angle', 'ObsError')]['units'] = 'Radians' - VarAttrs[('bending_angle', 'PreQC')]['units'] = 'unitless' - VarAttrs[('refractivity', 'ObsValue')]['units'] = 'N' - VarAttrs[('refractivity', 'ObsError')]['units'] = 'N' - VarAttrs[('refractivity', 'PreQC')]['units'] = 'unitless' - - VarAttrs[('bending_angle', 'ObsValue')]['_FillValue'] = float_missing_value - VarAttrs[('bending_angle', 'ObsError')]['_FillValue'] = float_missing_value - VarAttrs[('bending_angle', 'PreQC')]['_FillValue'] = int_missing_value - VarAttrs[('refractivity', 'ObsValue')]['_FillValue'] = float_missing_value - VarAttrs[('refractivity', 'ObsError')]['_FillValue'] = float_missing_value - VarAttrs[('refractivity', 'PreQC')]['_FillValue'] = int_missing_value + VarAttrs[('bendingAngle', 'ObsValue')]['units'] = 'Radians' + VarAttrs[('bendingAngle', 'ObsError')]['units'] = 'Radians' + VarAttrs[('atmosphericRefractivity', 'ObsValue')]['units'] = 'N' + VarAttrs[('atmosphericRefractivity', 'ObsError')]['units'] = 'N' + + VarAttrs[('bendingAngle', 'ObsValue')]['_FillValue'] = float_missing_value + VarAttrs[('bendingAngle', 'ObsError')]['_FillValue'] = float_missing_value + VarAttrs[('bendingAngle', 'PreQC')]['_FillValue'] = int_missing_value + VarAttrs[('atmosphericRefractivity', 'ObsValue')]['_FillValue'] = float_missing_value + VarAttrs[('atmosphericRefractivity', 'ObsError')]['_FillValue'] = float_missing_value + VarAttrs[('atmosphericRefractivity', 'PreQC')]['_FillValue'] = int_missing_value VarAttrs[('latitude', 'MetaData')]['_FillValue'] = float_missing_value VarAttrs[('longitude', 'MetaData')]['_FillValue'] = float_missing_value - VarAttrs[('altitude', 'MetaData')]['_FillValue'] = float_missing_value + VarAttrs[('height', 'MetaData')]['_FillValue'] = float_missing_value # final write to IODA file writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) @@ -153,10 +155,13 @@ def get_meta_data(bufr): hour = codes_get(bufr, 'hour') minute = codes_get(bufr, 'minute') second = codes_get(bufr, 'second') # non-integer value + second = round(second) - # should really add seconds - dtg = ("%4i-%.2i-%.2iT%.2i:%.2i:00Z" % (year, month, day, hour, minute)) - profile_meta_data['datetime'] = datetime.strptime(dtg, "%Y-%m-%dT%H:%M:%SZ") + # get string date, translate to a datetime object, then offset from epoch + dtg = ("%4i-%.2i-%.2iT%.2i:%.2i:%.2iZ" % (year, month, day, hour, minute, second)) + this_datetime = datetime.strptime(dtg, "%Y-%m-%dT%H:%M:%SZ") + time_offset = round((this_datetime - epoch).total_seconds()) + profile_meta_data['dateTime'] = np.int64(time_offset) return profile_meta_data @@ -194,7 +199,7 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): i_bang_non_nominal = get_normalized_bit(profile_meta_data['qualityFlag'], bit_index=16-5) iasc = get_normalized_bit(profile_meta_data['qualityFlag'], bit_index=16-3) # add rising/setting (ascending/descending) bit - obs_data[('ascending_flag', 'MetaData')] = np.array(np.repeat(iasc, krepfac[0]), dtype=ioda_int_type) + obs_data[('satelliteAscendingFlag', 'MetaData')] = np.array(np.repeat(iasc, krepfac[0]), dtype=ioda_int_type) # print( " ... RO QC flags: %i %i %i %i" % (i_non_nominal, i_phase_non_nominal, i_bang_non_nominal, iasc) ) @@ -203,9 +208,9 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): return {} # value, ob_error, qc - obs_data[('bending_angle', "ObsValue")] = assign_values(bang) - obs_data[('bending_angle', "ObsError")] = assign_values(bang_err) - obs_data[('bending_angle', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) + obs_data[('bendingAngle', "ObsValue")] = assign_values(bang) + obs_data[('bendingAngle', "ObsError")] = assign_values(bang_err) + obs_data[('bendingAngle', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) # (geometric) height is read as integer but expected as float in output height = codes_get_array(bufr, 'height', ktype=float) @@ -216,31 +221,32 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): refrac_conf = codes_get_array(bufr, 'percentConfidence')[sum(krepfac[:1])+1:sum(krepfac[:2])+1] # value, ob_error, qc - obs_data[('refractivity', "ObsValue")] = assign_values(refrac) - obs_data[('refractivity', "ObsError")] = assign_values(refrac_err) - obs_data[('refractivity', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) + obs_data[('atmosphericRefractivity', "ObsValue")] = assign_values(refrac) + obs_data[('atmosphericRefractivity', "ObsError")] = assign_values(refrac_err) + obs_data[('atmosphericRefractivity', "PreQC")] = np.full(krepfac[0], 0, dtype=ioda_int_type) meta_data_types = def_meta_types() obs_data[('latitude', 'MetaData')] = assign_values(lats) obs_data[('longitude', 'MetaData')] = assign_values(lons) - obs_data[('impact_parameter', 'MetaData')] = assign_values(impact) - obs_data[('altitude', 'MetaData')] = assign_values(height) + obs_data[('impactParameterRO', 'MetaData')] = assign_values(impact) + obs_data[('height', 'MetaData')] = assign_values(height) for k, v in profile_meta_data.items(): - if type(v) is int: + if type(v) is np.int64: + obs_data[(k, 'MetaData')] = np.array(np.repeat(v, krepfac[0]), dtype=np.int64) + elif type(v) is int: obs_data[(k, 'MetaData')] = np.array(np.repeat(v, krepfac[0]), dtype=ioda_int_type) elif type(v) is float: obs_data[(k, 'MetaData')] = np.array(np.repeat(v, krepfac[0]), dtype=ioda_float_type) - else: # something else (datetime for instance) - string_array = np.repeat(v.strftime("%Y-%m-%dT%H:%M:%SZ"), krepfac[0]) - obs_data[(k, 'MetaData')] = string_array.astype(object) + else: # something else (what do we do with it) + print(f"Found neither float nor in, type={type(v)}; skipping") # set record number (multi file procesing will change this) if record_number is None: nrec = 1 else: nrec = record_number - obs_data[('record_number', 'MetaData')] = np.array(np.repeat(nrec, krepfac[0]), dtype=ioda_int_type) + obs_data[('sequenceNumber', 'MetaData')] = np.array(np.repeat(nrec, krepfac[0]), dtype=ioda_int_type) # get derived profiles geop = codes_get_array(bufr, 'geopotentialHeight')[:-1] @@ -250,10 +256,10 @@ def get_obs_data(bufr, profile_meta_data, add_qc, record_number=None): prof_conf = codes_get_array(bufr, 'percentConfidence')[sum(krepfac[:2])+1:sum(krepfac)+1] # Compute impact height - obs_data[('impact_height', 'MetaData')] = \ - obs_data[('impact_parameter', 'MetaData')] - \ - obs_data[('geoid_height_above_reference_ellipsoid', 'MetaData')] - \ - obs_data[('earth_radius_of_curvature', 'MetaData')] + obs_data[('impactHeightRO', 'MetaData')] = \ + obs_data[('impactParameterRO', 'MetaData')] - \ + obs_data[('geoidUndulation', 'MetaData')] - \ + obs_data[('earthRadiusCurvature', 'MetaData')] if add_qc: good = quality_control(profile_meta_data, height, lats, lons) @@ -272,8 +278,8 @@ def quality_control(profile_meta_data, heights, lats, lons): # bad radius or # large geoid undulation - if (profile_meta_data['earth_radius_of_curvature'] > 6450000.) or (profile_meta_data['earth_radius_of_curvature'] < 6250000.) or \ - (abs(profile_meta_data['geoid_height_above_reference_ellipsoid']) > 200): + if (profile_meta_data['earthRadiusCurvature'] > 6450000.) or (profile_meta_data['earthRadiusCurvature'] < 6250000.) or \ + (abs(profile_meta_data['geoidUndulation']) > 200): good = [] # bad profile return good @@ -283,15 +289,15 @@ def def_meta_data(): meta_data_keys = { "qualityFlag": 'radioOccultationDataQualityFlags', - "geoid_height_above_reference_ellipsoid": 'geoidUndulation', - "sensor_azimuth_angle": 'bearingOrAzimuth', - "time": 'timeIncrement', - "earth_radius_of_curvature": 'earthLocalRadiusOfCurvature', - "occulting_sat_id": 'satelliteIdentifier', - "occulting_sat_is": 'satelliteInstruments', - "process_center": 'centre', - "reference_sat_id": 'platformTransmitterIdNumber', - "gnss_sat_class": 'satelliteClassification', + "geoidUndulation": 'geoidUndulation', + "sensorAzimuthAngle": 'bearingOrAzimuth', + "timeIncrement": 'timeIncrement', + "earthRadiusCurvature": 'earthLocalRadiusOfCurvature', + "satelliteIdentifier": 'satelliteIdentifier', + "satelliteInstrument": 'satelliteInstruments', + "dataProviderOrigin": 'centre', + "satelliteTransmitterId": 'platformTransmitterIdNumber', + "satelliteConstellationRO": 'satelliteClassification', } return meta_data_keys @@ -302,18 +308,18 @@ def def_meta_types(): meta_data_types = { "latitude": "float", "longitude": "float", - "datetime": "string", - 'impact_parameter': 'float', - 'impact_height': 'float', + "dateTime": "long", + 'impactParameterRO': 'float', + 'impactHeightRO': 'float', 'height': 'float', "qualityFlag": 'integer', - "geoid_height_above_reference_ellipsoid": 'float', - "earth_radius_of_curvature": 'float', - "occulting_sat_id": 'integer', - "occulting_sat_is": 'integer', - "process_center": 'string', - "reference_sat_id": 'integer', - "gnss_sat_class": 'integer', + "geoidUndulation": 'float', + "earthRadiusCurvature": 'float', + "satelliteIdentifier": 'integer', + "satelliteInstrument": 'integer', + "dataProviderOrigin": 'string', + "satelliteTransmitterId": 'integer', + "satelliteConstellationRO": 'integer', } return meta_data_types @@ -347,11 +353,13 @@ def concat_obs_dict(obs_data, append_obs_data): else: if obs_data[gv_key].dtype == float: fill_data = np.repeat(float_missing_value, append_length, dtype=ioda_float_type) + elif obs_data[gv_key].dtype == np.int64: + fill_data = np.repeat(long_missing_value, append_length, dtype=np.int64) elif obs_data[gv_key].dtype == int: fill_data = np.repeat(int_missing_value, append_length, dtype=ioda_int_type) elif obs_data[gv_key].dtype == object: - # string type, extend with empty strings - fill_data = np.repeat("", append_length, dtype=object) + # string type, extend with string missing value + fill_data = np.repeat(string_missing_value, append_length, dtype=object) obs_data[gv_key] = np.append(obs_data[gv_key], fill_data) diff --git a/src/goes/goes.py b/src/goes/goes.py index b1095e901..cfe4ac1b0 100644 --- a/src/goes/goes.py +++ b/src/goes/goes.py @@ -2,8 +2,8 @@ # goes.py # # This class loads, calculates, filters, and makes accessible the variables and attributes required by the -# GoesConverter class for a single GOES-16 or GOES-17 LB1 ABI channel (1-16). The brightness temperature and reflectance -# factor calculations used in this class are derived from sections 3.4.1.2 and 3.4.1.3 in the +# GoesConverter class for a single GOES-16 or GOES-17 LB1 ABI channel (1-16). The brightness temperature and albedo +# calculations used in this class are derived from sections 3.4.1.2 and 3.4.1.3 in the # "GOES-R Advanced Baseline Imager (ABI) Algorithm Theoretical Basis Document For Cloud and Moisture Imagery Product # (CMIP)" Version 3.0 July 30, 2012 (https://www.star.nesdis.noaa.gov/goesr/docs/ATBD/Imagery.pdf). The calculations for # the propagation of standard error are from section 2.5.5 of the "NIST/SEMATECH e-Handbook of Statistical Methods" @@ -184,7 +184,7 @@ def _load_rad_data_array(self): def _create_obsvalue_rf_data_array(self): """ - Creates a local data array variable containing the calculated obsvalue reflectance factor data + Creates a local data array variable containing the calculated obsvalue albedo data after fill value filtering by the DQF flags. """ self._obsvalue_rf_data_array = self._rad_data_array * self._kappa0 @@ -199,7 +199,7 @@ def _create_obsvalue_bt_data_array(self): def _create_obserror_rf_data_array(self): """ - Creates a local data array variable containing the calculated obserror reflectance factor data + Creates a local data array variable containing the calculated obserror albedo data after fill value filtering by the DQF flags. """ sqrt_comp = np.power(self._kappa0, 2) * np.power(self._std_dev_radiance_value_of_valid_pixels, 2) @@ -225,7 +225,7 @@ def get_input_file_path(self): def get_obsvalue_rf_data_array(self): """ - Returns the obsvalue reflectance factor data array. + Returns the obsvalue albedo data array. """ return self._obsvalue_rf_data_array @@ -237,7 +237,7 @@ def get_obsvalue_bt_data_array(self): def get_obserror_rf_data_array(self): """ - Returns the obserror reflectance factor data array. + Returns the obserror albedo data array. """ return self._obserror_rf_data_array diff --git a/src/goes/goes_converter.py b/src/goes/goes_converter.py index c496db5e3..c858a6572 100755 --- a/src/goes/goes_converter.py +++ b/src/goes/goes_converter.py @@ -17,24 +17,24 @@ # /MetaData/dateTime # /MetaData/latitude -> units # /MetaData/longitude -> units -# /MetaData/elevation_angle -> units -# /MetaData/scan_angle -> units -# /MetaData/scan_position -# /MetaData/sensor_azimuth_angle -> units -# /MetaData/sensor_view_angle -> units -# /MetaData/sensor_zenith_angle -> units -# /MetaData/solar_azimuth_angle -> units -# /MetaData/solar_zenith_angle -> units -# /MetaData/sensor_channel -# /ObsError/reflectance_factor or /ObsError/brightness_temperature -# /ObsValue/reflectance_factor or /ObsValue/brightness_temperature -# /ObsError/brightness_temperature -> units -# /ObsValue/brightness_temperature -> units -# /PreQC/reflectance_factor or /PreQC/brightness_temperature -# /PreQC/reflectance_factor -> flag_values or /PreQC/brightness_temperature -> flag_values -# /PreQC/reflectance_factor -> flag_meanings or /PreQC/brightness_temperature -> flag_meanings -# /nchans -# /nlocs +# /MetaData/sensorElevationAngle -> units +# /MetaData/sensorScanAngle -> units +# /MetaData/sensorScanPosition +# /MetaData/sensorAzimuthAngle -> units +# /MetaData/sensorViewAngle -> units +# /MetaData/sensorZenithAngle -> units +# /MetaData/solarAzimuthAngle -> units +# /MetaData/solarZenithAngle -> units +# /MetaData/sensorChannelNumber +# /ObsError/albedo or /ObsError/brightnessTemperature +# /ObsValue/albedo or /ObsValue/brightnessTemperature +# /ObsError/brightnessTemperature -> units +# /ObsValue/brightnessTemperature -> units +# /PreQC/albedo or /PreQC/brightnessTemperature +# /PreQC/albedo -> flag_values or /PreQC/brightnessTemperature -> flag_values +# /PreQC/albedo -> flag_meanings or /PreQC/brightnessTemperature -> flag_meanings +# /Channel +# /Location # import os @@ -56,9 +56,9 @@ def __init__(self, input_file_paths, latlon_file_path, output_file_path_rf, outp Constructor input_file_paths - A list of the absolute paths to all 16 ABI channels from the same hour latlon_file_path - The path to an existing GoesLatLon file or if it does not exist the path to write the file - output_file_path_rf - The path to write the IODAv2 reflectance factor data file + output_file_path_rf - The path to write the IODAv2 albedo (reflectance factor) data file output_file_path_bt - The path to write the IODAv2 brightness temperature data file - include_rf - Boolean value indicating whether to create the reflectance factor output data file: False (default) + include_rf - Boolean value indicating whether to create the albedo output data file: False (default) resolution - The resolution in km: 8 (default), 4, 8, 16, 32, 64 """ self._input_file_paths = input_file_paths @@ -92,7 +92,7 @@ def _check_arguments(self): def _initialize(self): """ - Create two local dictionaries contained the Goes class instances for reflectance factor (ABI channels 1-6) + Create two local dictionaries contained the Goes class instances for albedo (ABI channels 1-6) and brightness temperature (ABI channels 7-16). This function also assigns the file path for a template GOES file from ABI channel 7. """ @@ -161,7 +161,7 @@ def _create_metadata_latitude_variable(self, output_dataset): output_dataset - A netCDF Dataset object """ latitude_data_array = self._latlon_dataset['MetaData'].variables['latitude'][:].real - output_dataset.createVariable('/MetaData/latitude', 'f4', 'nlocs', fill_value=-999) + output_dataset.createVariable('/MetaData/latitude', 'f4', 'Location', fill_value=-999) output_dataset['/MetaData/latitude'][:] = latitude_data_array output_dataset['/MetaData/latitude'].setncattr('units', 'degrees_north') @@ -171,72 +171,72 @@ def _create_metadata_longitude_variable(self, output_dataset): output_dataset - A netCDF4 Dataset object """ longitude_data_array = self._latlon_dataset['MetaData'].variables['longitude'][:].real - output_dataset.createVariable('/MetaData/longitude', 'f4', 'nlocs', fill_value=-999) + output_dataset.createVariable('/MetaData/longitude', 'f4', 'Location', fill_value=-999) output_dataset['/MetaData/longitude'][:] = longitude_data_array output_dataset['/MetaData/longitude'].setncattr('units', 'degrees_east') def _create_metadata_scan_angle_variable(self, output_dataset): """ - Creates the /MetaData/scan_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorScanAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - scan_angle_data_array = self._latlon_dataset['MetaData'].variables['scan_angle'][:].real - output_dataset.createVariable('/MetaData/scan_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/scan_angle'][:] = scan_angle_data_array - output_dataset['/MetaData/scan_angle'].setncattr('units', 'degrees') + scan_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorScanAngle'][:].real + output_dataset.createVariable('/MetaData/sensorScanAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorScanAngle'][:] = scan_angle_data_array + output_dataset['/MetaData/sensorScanAngle'].setncattr('units', 'degrees') def _create_metadata_elevation_angle_variable(self, output_dataset): """ - Creates the /MetaData/elevation_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorElevationAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - elevation_angle_data_array = self._latlon_dataset['MetaData'].variables['elevation_angle'][:].real - output_dataset.createVariable('/MetaData/elevation_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/elevation_angle'][:] = elevation_angle_data_array - output_dataset['/MetaData/elevation_angle'].setncattr('units', 'degrees') + elevation_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorElevationAngle'][:].real + output_dataset.createVariable('/MetaData/sensorElevationAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorElevationAngle'][:] = elevation_angle_data_array + output_dataset['/MetaData/sensorElevationAngle'].setncattr('units', 'degrees') def _create_metadata_scan_position_variable(self, output_dataset): """ - Creates the /MetaData/scan_position variable in an output netCDF4 dataset. + Creates the /MetaData/sensorScanPosition variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - scan_position_data_array = self._latlon_dataset['MetaData'].variables['scan_position'][:].real - output_dataset.createVariable('/MetaData/scan_position', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/scan_position'][:] = scan_position_data_array + scan_position_data_array = self._latlon_dataset['MetaData'].variables['sensorScanPosition'][:].real + output_dataset.createVariable('/MetaData/sensorScanPosition', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorScanPosition'][:] = scan_position_data_array def _create_metadata_sensor_zenith_angle_variable(self, output_dataset): """ - Creates the /MetaData/sensor_zenith_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorZenithAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - sensor_zenith_angle_data_array = self._latlon_dataset['MetaData'].variables['sensor_zenith_angle'][:].real - output_dataset.createVariable('/MetaData/sensor_zenith_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/sensor_zenith_angle'][:] = sensor_zenith_angle_data_array - output_dataset['/MetaData/sensor_zenith_angle'].setncattr('units', 'degrees') + sensor_zenith_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorZenithAngle'][:].real + output_dataset.createVariable('/MetaData/sensorZenithAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorZenithAngle'][:] = sensor_zenith_angle_data_array + output_dataset['/MetaData/sensorZenithAngle'].setncattr('units', 'degrees') def _create_metadata_sensor_azimuth_angle_variable(self, output_dataset): """ - Creates the /MetaData/sensor_azimuth_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorAzimuthAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - sensor_azimuth_angle_data_array = self._latlon_dataset['MetaData'].variables['sensor_azimuth_angle'][:].real - output_dataset.createVariable('/MetaData/sensor_azimuth_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/sensor_azimuth_angle'][:] = sensor_azimuth_angle_data_array - output_dataset['/MetaData/sensor_azimuth_angle'].setncattr('units', 'degrees') + sensor_azimuth_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorAzimuthAngle'][:].real + output_dataset.createVariable('/MetaData/sensorAzimuthAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorAzimuthAngle'][:] = sensor_azimuth_angle_data_array + output_dataset['/MetaData/sensorAzimuthAngle'].setncattr('units', 'degrees') def _create_metadata_sensor_view_angle_variable(self, output_dataset): """ - Creates the /MetaData/sensor_view_angle variable in an output netCDF4 dataset. + Creates the /MetaData/sensorViewAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - sensor_view_angle_data_array = self._latlon_dataset['MetaData'].variables['sensor_view_angle'][:].real - output_dataset.createVariable('/MetaData/sensor_view_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/sensor_view_angle'][:] = sensor_view_angle_data_array - output_dataset['/MetaData/sensor_view_angle'].setncattr('units', 'degrees') + sensor_view_angle_data_array = self._latlon_dataset['MetaData'].variables['sensorViewAngle'][:].real + output_dataset.createVariable('/MetaData/sensorViewAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/sensorViewAngle'][:] = sensor_view_angle_data_array + output_dataset['/MetaData/sensorViewAngle'].setncattr('units', 'degrees') def _create_metadata_solar_zenith_angle_variable(self, output_dataset): """ - Creates the /MetaData/solar_zenith_angle variable in an output netCDF4 dataset. + Creates the /MetaData/solarZenithAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ dataset = Dataset(self._input_file_path_template, 'r') @@ -269,13 +269,13 @@ def _create_metadata_solar_zenith_angle_variable(self, output_dataset): dataset.close() dataset_latlon.close() solar_zenith_angle_data_array = np.nan_to_num(solar_zenith_angle_data_array, nan=-999) - output_dataset.createVariable('/MetaData/solar_zenith_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/solar_zenith_angle'][:] = solar_zenith_angle_data_array - output_dataset['/MetaData/solar_zenith_angle'].setncattr('units', 'degrees') + output_dataset.createVariable('/MetaData/solarZenithAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/solarZenithAngle'][:] = solar_zenith_angle_data_array + output_dataset['/MetaData/solarZenithAngle'].setncattr('units', 'degrees') def _create_metadata_solar_azimuth_angle_variable(self, output_dataset): """ - Creates the /MetaData/solar_azimuth_angle variable in an output netCDF4 dataset. + Creates the /MetaData/solarAzimuthAngle variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ dataset = Dataset(self._input_file_path_template, 'r') @@ -299,20 +299,20 @@ def _create_metadata_solar_azimuth_angle_variable(self, output_dataset): dataset.close() dataset_latlon.close() solar_azimuth_angle_data_array = np.nan_to_num(solar_azimuth_angle_data_array, nan=-999) - output_dataset.createVariable('/MetaData/solar_azimuth_angle', 'f4', 'nlocs', fill_value=-999) - output_dataset['/MetaData/solar_azimuth_angle'][:] = solar_azimuth_angle_data_array - output_dataset['/MetaData/solar_azimuth_angle'].setncattr('units', 'degrees') + output_dataset.createVariable('/MetaData/solarAzimuthAngle', 'f4', 'Location', fill_value=-999) + output_dataset['/MetaData/solarAzimuthAngle'][:] = solar_azimuth_angle_data_array + output_dataset['/MetaData/solarAzimuthAngle'].setncattr('units', 'degrees') - def _create_nlocs_dimension(self, output_dataset): + def _create_location_dimension(self, output_dataset): """ - Creates the nlocs dimension in an output netCDF4 dataset. + Creates the Location dimension in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - nlocs = self._latlon_dataset.dimensions['nlocs'].size - output_dataset.createDimension('nlocs', nlocs) - output_dataset.createVariable('nlocs', 'i4', 'nlocs') - output_dataset.variables['nlocs'].setncattr('suggested_chunk_dim', nlocs) - output_dataset.variables['nlocs'][:] = np.arange(1, nlocs + 1, 1, dtype='int32') + Location = self._latlon_dataset.dimensions['Location'].size + output_dataset.createDimension('Location', Location) + output_dataset.createVariable('Location', 'i4', 'Location') + output_dataset.variables['Location'].setncattr('suggested_chunk_dim', round(Location*0.01)) + output_dataset.variables['Location'][:] = np.arange(1, Location + 1, 1, dtype='int32') @staticmethod def _create_groups(output_dataset): @@ -326,28 +326,28 @@ def _create_groups(output_dataset): output_dataset.createGroup('PreQC') @staticmethod - def _create_nchans_dimension(output_dataset, nchans): + def _create_channel_dimension(output_dataset, Channel): """ - Creates the nchans dimension in an output netCDF4 dataset. + Creates the Channel dimension in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object - nchans - An integer indicating the number of nchans: 6 (for reflectance factor) + Channel - An integer indicating the number of Channel: 6 (for albedo) or 10 (for brightness temperature) """ - output_dataset.createDimension('nchans', nchans) - output_dataset.createVariable('nchans', 'i4', 'nchans') - if nchans == 6: - output_dataset.variables['nchans'][:] = [1, 2, 3, 4, 5, 6] - elif nchans == 10: - output_dataset.variables['nchans'][:] = [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + output_dataset.createDimension('Channel', Channel) + output_dataset.createVariable('Channel', 'i4', 'Channel') + if Channel == 6: + output_dataset.variables['Channel'][:] = [1, 2, 3, 4, 5, 6] + elif Channel == 10: + output_dataset.variables['Channel'][:] = [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] @staticmethod def _create_metadata_sensor_channel_variable(output_dataset): """ - Creates the /MetaData/sensor_channel variable in an output netCDF4 dataset. + Creates the /MetaData/sensorChannelNumber variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ - output_dataset.createVariable('/MetaData/sensor_channel', 'i4', 'nchans') - output_dataset['/MetaData/sensor_channel'][:] = output_dataset['nchans'][:] + output_dataset.createVariable('/MetaData/sensorChannelNumber', 'i4', 'Channel') + output_dataset['/MetaData/sensorChannelNumber'][:] = output_dataset['Channel'][:] @staticmethod def _create_root_group_attributes(output_dataset, resolution, platform_id): @@ -361,16 +361,16 @@ def _create_root_group_attributes(output_dataset, resolution, platform_id): output_dataset.setncattr('platform_identifier', platform_id) @staticmethod - def _get_nlocs(dataset): + def _get_Location(dataset): """ - Returns the nlocs dimension size for the provided netCDF4 Dataset. - dataset - the dataset to extract the nlocs size + Returns the Location dimension size for the provided netCDF4 Dataset. + dataset - the dataset to extract the Location size """ - return dataset.dimensions['nlocs'].size + return dataset.dimensions['Location'].size - def _create_preqc_reflectance_factor_variable(self, output_dataset): + def _create_preqc_albedo_variable(self, output_dataset): """ - Creates the /PreQC/reflectance_factor variable variable and associated attributes in an output netCDF4 dataset. + Creates the /PreQC/albedo variable and associated attributes in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -382,16 +382,16 @@ def _create_preqc_reflectance_factor_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/PreQC/reflectance_factor', 'f4', ('nlocs', 'nchans'), fill_value=-999) - output_dataset['/PreQC/reflectance_factor'][:] = data_array - output_dataset['/PreQC/reflectance_factor'].setncattr('flag_values', '0,1,2,3') - output_dataset['/PreQC/reflectance_factor'].setncattr('flag_meanings', - 'good_pixel_qf,conditionally_usable_pixel_qf,' - 'out_of_range_pixel_qf,no_value_pixel_qf') + output_dataset.createVariable('/PreQC/albedo', 'i4', ('Location', 'Channel'), fill_value=-999) + output_dataset['/PreQC/albedo'][:] = data_array + output_dataset['/PreQC/albedo'].setncattr('flag_values', '0,1,2,3') + output_dataset['/PreQC/albedo'].setncattr('flag_meanings', + 'good_pixel_qf,conditionally_usable_pixel_qf,' + 'out_of_range_pixel_qf,no_value_pixel_qf') def _create_preqc_brightness_temperature_variable(self, output_dataset): """ - Creates the /PreQC/brightness_temperature variable and associated attributes in an output netCDF4 dataset. + Creates the /PreQC/brightnessTemperature variable and associated attributes in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -403,17 +403,17 @@ def _create_preqc_brightness_temperature_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/PreQC/brightness_temperature', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/PreQC/brightnessTemperature', 'i4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/PreQC/brightness_temperature'][:] = data_array - output_dataset['/PreQC/brightness_temperature'].setncattr('flag_values', '0,1,2,3') - output_dataset['/PreQC/brightness_temperature'].setncattr('flag_meanings', - 'good_pixel_qf,conditionally_usable_pixel_qf,' - 'out_of_range_pixel_qf,no_value_pixel_qf') + output_dataset['/PreQC/brightnessTemperature'][:] = data_array + output_dataset['/PreQC/brightnessTemperature'].setncattr('flag_values', '0,1,2,3') + output_dataset['/PreQC/brightnessTemperature'].setncattr('flag_meanings', + 'good_pixel_qf,conditionally_usable_pixel_qf,' + 'out_of_range_pixel_qf,no_value_pixel_qf') - def _create_obsvalue_reflectance_factor_variable(self, output_dataset): + def _create_obsvalue_albedo_variable(self, output_dataset): """ - Creates the /ObsValue/reflectance_factor variable in an output netCDF4 dataset. + Creates the /ObsValue/albedo variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -425,13 +425,13 @@ def _create_obsvalue_reflectance_factor_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsValue/reflectance_factor', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsValue/albedo', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsValue/reflectance_factor'][:] = data_array + output_dataset['/ObsValue/albedo'][:] = data_array def _create_obsvalue_brightness_temperature_variable(self, output_dataset): """ - Creates the /ObsValue/brightness_temperature variable in an output netCDF4 dataset. + Creates the /ObsValue/brightnessTemperature variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -443,14 +443,14 @@ def _create_obsvalue_brightness_temperature_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsValue/brightness_temperature', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsValue/brightnessTemperature', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsValue/brightness_temperature'][:] = data_array - output_dataset['/ObsValue/brightness_temperature'].setncattr('units', 'K') + output_dataset['/ObsValue/brightnessTemperature'][:] = data_array + output_dataset['/ObsValue/brightnessTemperature'].setncattr('units', 'K') - def _create_obserror_reflectance_factor_variable(self, output_dataset): + def _create_obserror_albedo_variable(self, output_dataset): """ - Creates the /ObsError/reflectance_factor variable in an output netCDF4 dataset. + Creates the /ObsError/albedo variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -462,13 +462,13 @@ def _create_obserror_reflectance_factor_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsError/reflectance_factor', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsError/albedo', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsError/reflectance_factor'][:] = data_array + output_dataset['/ObsError/albedo'][:] = data_array def _create_obserror_brightness_temperature_variable(self, output_dataset): """ - Creates the /ObsError/brightness_temperature variable in an output netCDF4 dataset. + Creates the /ObsError/brightnessTemperature variable in an output netCDF4 dataset. output_dataset - A netCDF4 Dataset object """ temp_dict = {} @@ -480,10 +480,10 @@ def _create_obserror_brightness_temperature_variable(self, output_dataset): data_array = temp_dict[0] for i in range(1, counter): data_array = np.column_stack((data_array, temp_dict[i])) - output_dataset.createVariable('/ObsError/brightness_temperature', 'f4', ('nlocs', 'nchans'), + output_dataset.createVariable('/ObsError/brightnessTemperature', 'f4', ('Location', 'Channel'), fill_value=-999) - output_dataset['/ObsError/brightness_temperature'][:] = data_array - output_dataset['/ObsError/brightness_temperature'].setncattr('units', 'K') + output_dataset['/ObsError/brightnessTemperature'][:] = data_array + output_dataset['/ObsError/brightnessTemperature'].setncattr('units', 'K') def _create_metadata_time_variable(self, output_dataset): """ @@ -495,9 +495,9 @@ def _create_metadata_time_variable(self, output_dataset): time_offset = round((self._start_date - epoch).total_seconds()) # seconds since epoch. start_date = self._start_date.strftime('%Y-%m-%dT%H:%M:%SZ') output_dataset.setncattr("date_time", start_date) - datetime_array = np.full(self._get_nlocs(output_dataset), np.int64(time_offset)) + datetime_array = np.full(self._get_Location(output_dataset), np.int64(time_offset)) - output_dataset.createVariable('/MetaData/dateTime', 'i8', 'nlocs') + output_dataset.createVariable('/MetaData/dateTime', 'i8', 'Location') output_dataset['/MetaData/dateTime'][:] = datetime_array output_dataset['/MetaData/dateTime'].setncattr('units', iso8601_string) @@ -513,7 +513,7 @@ def _load_all_goes(self): def convert(self): """ - Creates the reflectance factor (if include_rf is True) and brightness temperature IODAv2 data files. + Creates the albedo (if include_rf is True) and brightness temperature IODAv2 data files. This functions also checks for the existence and nadir change of the GoesLatLon data file. """ self._initialize() @@ -538,8 +538,8 @@ def _convert_bt(self): dataset = Dataset(self._output_file_path_bt, 'w') GoesConverter._create_groups(dataset) GoesConverter._create_root_group_attributes(dataset, self._resolution, self._platform_id) - GoesConverter._create_nchans_dimension(dataset, 10) - self._create_nlocs_dimension(dataset) + GoesConverter._create_channel_dimension(dataset, 10) + self._create_location_dimension(dataset) GoesConverter._create_metadata_sensor_channel_variable(dataset) self._create_metadata_latitude_variable(dataset) self._create_metadata_longitude_variable(dataset) @@ -559,13 +559,13 @@ def _convert_bt(self): def _convert_rf(self): """ - Creates the reflectance factor IODAv2 data file. + Creates the albedo IODAv2 data file. """ dataset = Dataset(self._output_file_path_rf, 'w') GoesConverter._create_groups(dataset) GoesConverter._create_root_group_attributes(dataset, self._resolution, self._platform_id) - GoesConverter._create_nchans_dimension(dataset, 6) - self._create_nlocs_dimension(dataset) + GoesConverter._create_channel_dimension(dataset, 6) + self._create_location_dimension(dataset) GoesConverter._create_metadata_sensor_channel_variable(dataset) self._create_metadata_latitude_variable(dataset) self._create_metadata_longitude_variable(dataset) @@ -578,7 +578,7 @@ def _convert_rf(self): self._create_metadata_solar_zenith_angle_variable(dataset) self._create_metadata_solar_azimuth_angle_variable(dataset) self._create_metadata_time_variable(dataset) - self._create_obsvalue_reflectance_factor_variable(dataset) - self._create_obserror_reflectance_factor_variable(dataset) - self._create_preqc_reflectance_factor_variable(dataset) + self._create_obsvalue_albedo_variable(dataset) + self._create_obserror_albedo_variable(dataset) + self._create_preqc_albedo_variable(dataset) dataset.close() diff --git a/src/goes/goes_latlon.py b/src/goes/goes_latlon.py index 6309eecdb..781055c02 100644 --- a/src/goes/goes_latlon.py +++ b/src/goes/goes_latlon.py @@ -13,17 +13,17 @@ # # /GROUP/VARIABLE -> ATTRIBUTE # -# /MetaData/elevation_angle -# /MetaData/scan_angle -# /MetaData/scan_position -# /MetaData/sensor_azimuth_angle -> units -# /MetaData/sensor_view_angle -> units -# /MetaData/sensor_zenith_angle -> units +# /MetaData/sensorElevationAngle +# /MetaData/sensorScanAngle +# /MetaData/sensorScanPosition +# /MetaData/sensorAzimuthAngle +# /MetaData/sensorViewAngle +# /MetaData/sensorZenithAngle # /MetaData/latitude # /MetaData/latitude -> lat_nadir # /MetaData/longitude # /MetaData/longitude -> lon_nadir -# /nlocs +# /Location # from netCDF4 import Dataset from numpy import ma @@ -192,31 +192,31 @@ def create(self): sensor_zenith_angle, sensor_azimuth_angle, sensor_view_angle = \ self._calc_sensor_zenith_azimuth_view_angles(latitude, longitude) latlon_dataset = Dataset(self._latlon_file_path, 'w') - nlocs = len(latitude) - latlon_dataset.createDimension('nlocs', nlocs) - latlon_dataset.createVariable('nlocs', 'i4', ('nlocs',)) - latlon_dataset.variables['nlocs'][:] = np.arange(1, nlocs + 1, 1, dtype='int32') + Location = len(latitude) + latlon_dataset.createDimension('Location', Location) + latlon_dataset.createVariable('Location', 'i4', ('Location',)) + latlon_dataset.variables['Location'][:] = np.arange(1, Location + 1, 1, dtype='int32') nonexistent_indices = len(self._lat_fill_value_index_array[0]) latlon_dataset.createDimension('nonexistent_indices', nonexistent_indices) latlon_dataset.createVariable('nonexistent_indices', 'i4', ('nonexistent_indices',)) latlon_dataset.variables['nonexistent_indices'][:] = np.array(self._lat_fill_value_index_array[0]) latlon_dataset.createGroup('MetaData') - latlon_dataset.createVariable('/MetaData/latitude', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/longitude', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/scan_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/scan_position', 'i4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/elevation_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/sensor_zenith_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/sensor_azimuth_angle', 'f4', 'nlocs', fill_value=-999) - latlon_dataset.createVariable('/MetaData/sensor_view_angle', 'f4', 'nlocs', fill_value=-999) + latlon_dataset.createVariable('/MetaData/latitude', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/longitude', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorScanAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorScanPosition', 'i4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorElevationAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorZenithAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorAzimuthAngle', 'f4', 'Location', fill_value=-999) + latlon_dataset.createVariable('/MetaData/sensorViewAngle', 'f4', 'Location', fill_value=-999) latlon_dataset['/MetaData/latitude'][:] = latitude latlon_dataset['/MetaData/longitude'][:] = longitude - latlon_dataset['/MetaData/scan_angle'][:] = scan_angle - latlon_dataset['/MetaData/scan_position'][:] = scan_position - latlon_dataset['/MetaData/elevation_angle'][:] = elevation_angle - latlon_dataset['/MetaData/sensor_zenith_angle'][:] = sensor_zenith_angle - latlon_dataset['/MetaData/sensor_azimuth_angle'][:] = sensor_azimuth_angle - latlon_dataset['/MetaData/sensor_view_angle'][:] = sensor_view_angle + latlon_dataset['/MetaData/sensorScanAngle'][:] = scan_angle + latlon_dataset['/MetaData/sensorScanPosition'][:] = scan_position + latlon_dataset['/MetaData/sensorElevationAngle'][:] = elevation_angle + latlon_dataset['/MetaData/sensorZenithAngle'][:] = sensor_zenith_angle + latlon_dataset['/MetaData/sensorAzimuthAngle'][:] = sensor_azimuth_angle + latlon_dataset['/MetaData/sensorViewAngle'][:] = sensor_view_angle lat_nadir, lon_nadir = self._get_nadir_attributes() latlon_dataset['/MetaData/latitude'].setncattr('lat_nadir', lat_nadir) latlon_dataset['/MetaData/longitude'].setncattr('lon_nadir', lon_nadir) diff --git a/src/land/smap9km_ssm2ioda.py b/src/land/smap9km_ssm2ioda.py index b6b5ab41f..f89fd524a 100644 --- a/src/land/smap9km_ssm2ioda.py +++ b/src/land/smap9km_ssm2ioda.py @@ -22,11 +22,13 @@ from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + locationKeyList = [ ("latitude", "float"), ("longitude", "float"), ("depthBelowSoilSurface", "float"), - ("datetime", "string") + ("dateTime", "long") ] obsvars = { @@ -43,6 +45,10 @@ 'soilMoistureVolumetric': ['Location'], } +# Usual reference time for these data is 12UTC 1Jan2000 +iso8601_string = 'seconds since 2000-01-01T12:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class smap(object): def __init__(self, args): @@ -92,7 +98,7 @@ def _read(self): refsec = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['tb_time_seconds'][:].ravel() deps = np.full_like(vals, self.assumedSoilDepth) - times = np.empty_like(vals, dtype=object) + times = np.empty_like(vals, dtype=np.int64) if self.mask: with np.errstate(invalid='ignore'): @@ -110,8 +116,6 @@ def _read(self): refsec = refsec[mask] times = times[mask] - # get datetime and reference time 12UTC 1Jan2000 - base_date = datetime(2000, 1, 1, 12, 0) vals = vals.astype('float32') lats = lats.astype('float32') lons = lons.astype('float32') @@ -124,11 +128,11 @@ def _read(self): ecoli = ecoli.astype('int32') for i in range(len(lons)): - dt = base_date + timedelta(seconds=int(refsec[i])) - times[i] = dt.strftime("%Y-%m-%dT%H:%M:%SZ") + times[i] = int(refsec[i]) errs[i] = 0.04 # add metadata variables self.outdata[('dateTime', 'MetaData')] = times + self.varAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons self.varAttrs[('latitude', 'MetaData')]['units'] = 'degree_north' @@ -136,7 +140,6 @@ def _read(self): self.outdata[('depthBelowSoilSurface', 'MetaData')] = deps self.varAttrs[('depthBelowSoilSurface', 'MetaData')]['units'] = 'm' self.outdata[('surfaceFlag', 'MetaData')] = sflg - self.varAttrs[('surfaceFlag', 'MetaData')]['units'] = 'unitless' self.outdata[('vegetationOpacity', 'MetaData')] = vegop self.outdata[('easeRowIndex', 'MetaData')] = erowi self.varAttrs[('easeRowIndex', 'MetaData')]['units'] = '1' diff --git a/src/land/smap_ssm2ioda.py b/src/land/smap_ssm2ioda.py index b477f23f0..d8d6447ae 100644 --- a/src/land/smap_ssm2ioda.py +++ b/src/land/smap_ssm2ioda.py @@ -22,11 +22,13 @@ from collections import defaultdict, OrderedDict from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" + locationKeyList = [ ("latitude", "float"), ("longitude", "float"), ("depthBelowSoilSurface", "float"), - ("datetime", "string") + ("dateTime", "long") ] obsvars = { @@ -43,6 +45,9 @@ 'soilMoistureVolumetric': ['Location'], } +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class smap(object): def __init__(self, args): @@ -87,7 +92,7 @@ def _read(self): qflg = ncd.groups['Soil_Moisture_Retrieval_Data'].variables['retrieval_qual_flag'][:].ravel() deps = np.full_like(vals, self.assumedSoilDepth) - times = np.empty_like(vals, dtype=object) + times = np.empty_like(vals, dtype=np.int64) if self.mask: with np.errstate(invalid='ignore'): @@ -104,7 +109,7 @@ def _read(self): str_split = self.filename.split("_") str_datetime = str_split[7] my_datetime = datetime.strptime(str_datetime, "%Y%m%dT%H%M%S") - base_datetime = my_datetime.strftime('%Y-%m-%dT%H:%M:%SZ') + time_offset = round((my_datetime - epoch).total_seconds()) vals = vals.astype('float32') lats = lats.astype('float32') lons = lons.astype('float32') @@ -124,10 +129,11 @@ def _read(self): else: qflg[i] = 1 - times[i] = base_datetime + times[i] = time_offset # add metadata variables self.outdata[('dateTime', 'MetaData')] = times + self.varAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string self.outdata[('latitude', 'MetaData')] = lats self.outdata[('longitude', 'MetaData')] = lons self.outdata[('depthBelowSoilSurface', 'MetaData')] = deps diff --git a/src/lib-python/orddicts.py b/src/lib-python/orddicts.py index 93461226f..5ac26b9fe 100644 --- a/src/lib-python/orddicts.py +++ b/src/lib-python/orddicts.py @@ -1,4 +1,8 @@ -from collections import OrderedDict, Callable +try: + from collections.abc import Callable +except ImportError: + from collections import Callable +from collections import OrderedDict class DefaultOrderedDict(OrderedDict): diff --git a/src/marine/amsr2_icec2ioda.py b/src/marine/amsr2_icec2ioda.py index ee1494f10..55ed56833 100755 --- a/src/marine/amsr2_icec2ioda.py +++ b/src/marine/amsr2_icec2ioda.py @@ -7,7 +7,7 @@ # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # -import sys +import os, sys import argparse import numpy as np from datetime import datetime, timedelta @@ -22,17 +22,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" -vName = "sea_ice_area_fraction" +vName = "seaIceFraction" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class iceconc(object): def __init__(self, filenames, date): @@ -71,7 +75,7 @@ def _read(self): lon = lon[mask] lat = lat[mask] icec = icec[mask] - icec_qc = icec_qc[mask] + icec_qc = icec_qc[mask].astype(np.int32) for i in range(len(lon)): # get date from filename @@ -80,7 +84,8 @@ def _read(self): date1 = datetime.strptime(datestart, "%Y-%m-%dT%H:%M:%S.%fZ") date2 = datetime.strptime(dateend, "%Y-%m-%dT%H:%M:%S.%fZ") avg = date1 + (date2 - date1) * 0.5 - locKey = lat[i], lon[i], avg.strftime("%Y-%m-%dT%H:%M:%SZ") + time_offset = round((avg - epoch).total_seconds()) + locKey = lat[i], lon[i], time_offset self.data[locKey][valKey] = icec[i] * 0.01 self.VarAttrs[locKey][valKey]['_FillValue'] = icec_FillValue self.VarAttrs[locKey][valKey]['units'] = icec_units @@ -117,7 +122,7 @@ def main(): fdate = datetime.strptime(args.date, '%Y%m%d%H') # VarDims = { - 'sea_ice_area_fraction': ['nlocs'], + 'seaIceFraction': ['Location'], } # Read in the Ice concentration @@ -126,9 +131,10 @@ def main(): # write them out ObsVars, nlocs = iconv.ExtractObsData(icec.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + icec.VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string writer.BuildIoda(ObsVars, VarDims, icec.VarAttrs, GlobalAttrs) diff --git a/src/marine/avhrr_radiance2ioda.py b/src/marine/avhrr_radiance2ioda.py index a1f41f56c..a3ba60183 100755 --- a/src/marine/avhrr_radiance2ioda.py +++ b/src/marine/avhrr_radiance2ioda.py @@ -25,12 +25,12 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -output_var_names = ["brightness_temperature"] +output_var_names = ["brightnessTemperature"] locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = {} @@ -43,6 +43,8 @@ chan_number = {3, 4, 5} +time_units = '' # Will fill in below. + def read_input(input_args): """ @@ -64,9 +66,14 @@ def read_input(input_args): print("Reading ", input_file) ncd = nc.Dataset(input_file, 'r') + global time_units # get the base time (should only have 1 or 2 time slots) time_base = ncd.variables['time'][:] - basetime = dateutil.parser.parse(ncd.variables['time'].units[-20:]) + time_units = ncd.variables['time'].units[-19:] + 'Z' + s = list(time_units) + s[10] = "T" + time_units = 'seconds since ' + "".join(s) + basetime = dateutil.parser.parse(ncd.variables['time'].units[-19:]) # get some of the global attributes that we are interested in @@ -138,9 +145,8 @@ def read_input(input_args): # create a string version of the date for each observation dates = [] for i in range(len(lons)): - obs_date = basetime + \ - timedelta(seconds=float(time[i]+3600*data_in['scan_line_time'][i])) # check later - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates.append(round(3600*data_in['scan_line_time'][i])) + dates = np.array(dates, dtype=np.int64) # output values nchans = len(chan_number) @@ -162,19 +168,18 @@ def read_input(input_args): # allocate space for output depending on which variables are to be saved obs_data = {} - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = dates.astype(np.int64) obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons - obs_data[('record_number', 'MetaData')] = data_in['scan_line_number'] - obs_data[('height_above_mean_sea_level', 'MetaData')] = 840*np.ones((obs_dim)) - obs_data[('sensor_azimuth_angle', 'MetaData')] = data_in['sensor_azimuth_angle'] - obs_data[('sensor_zenith_angle', 'MetaData')] = data_in['sensor_zenith_angle'] - obs_data[('solar_zenith_angle', 'MetaData')] = data_in['solar_zenith_angle'] - obs_data[('solar_azimuth_angle', 'MetaData')] = data_in['solar_azimuth_angle'] - obs_data[('scan_position', 'MetaData')] = data_in['scan_line_number'] - obs_data[output_var_names[0], global_config['oval_name']] = val_tb - obs_data[output_var_names[0], global_config['oerr_name']] = err + obs_data[('sequenceNumber', 'MetaData')] = data_in['scan_line_number'] + obs_data[('height', 'MetaData')] = 840*np.ones((obs_dim)).astype('float32') + obs_data[('sensorAzimuthAngle', 'MetaData')] = data_in['sensor_azimuth_angle'] + obs_data[('sensorZenithAngle', 'MetaData')] = data_in['sensor_zenith_angle'] + obs_data[('solarZenithAngle', 'MetaData')] = data_in['solar_zenith_angle'] + obs_data[('solarAzimuthAngle', 'MetaData')] = data_in['solar_azimuth_angle'] + obs_data[('scanPosition', 'MetaData')] = data_in['scan_line_number'] + obs_data[output_var_names[0], global_config['oval_name']] = val_tb.astype('float32') + obs_data[output_var_names[0], global_config['oerr_name']] = err.astype('float32') obs_data[output_var_names[0], global_config['opqc_name']] = qc.astype('int32') return (obs_data, GlobalAttrs) @@ -248,7 +253,7 @@ def main(): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin GlobalAttrs['converter'] = os.path.basename(__file__) @@ -259,24 +264,24 @@ def main(): # pass parameters to the IODA writer # (needed because we are bypassing ExtractObsData within BuildNetcdf) VarDims = { - 'brightness_temperature': ['nlocs', 'nchans'] + 'brightnessTemperature': ['Location', 'Channel'] } nchans = len(chan_number) nlocs = len(obs_data[('longitude', 'MetaData')]) DimDict = { - 'nlocs': nlocs, - 'nchans': list(chan_number) + 'Location': nlocs, + 'Channel': list(chan_number) } writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - VarAttrs[('brightness_temperature', 'ObsValue')]['units'] = 'Kelvin' - VarAttrs[('brightness_temperature', 'ObsError')]['units'] = 'Kelvin' - VarAttrs[('brightness_temperature', 'PreQC')]['units'] = 'unitless' - VarAttrs[('brightness_temperature', 'ObsValue')]['_FillValue'] = 999 - VarAttrs[('brightness_temperature', 'ObsError')]['_FillValue'] = 999 - VarAttrs[('brightness_temperature', 'PreQC')]['_FillValue'] = 999 + VarAttrs[('dateTime', 'MetaData')]['units'] = time_units + VarAttrs[('brightnessTemperature', 'ObsValue')]['units'] = 'K' + VarAttrs[('brightnessTemperature', 'ObsError')]['units'] = 'K' + VarAttrs[('brightnessTemperature', 'ObsValue')]['_FillValue'] = 999 + VarAttrs[('brightnessTemperature', 'ObsError')]['_FillValue'] = 999 + VarAttrs[('brightnessTemperature', 'PreQC')]['_FillValue'] = 999 writer.BuildIoda(obs_data, VarDims, VarAttrs, GlobalAttrs) diff --git a/src/marine/copernicus_l3swh2ioda.py b/src/marine/copernicus_l3swh2ioda.py index 3e9732a4e..f35043ef6 100755 --- a/src/marine/copernicus_l3swh2ioda.py +++ b/src/marine/copernicus_l3swh2ioda.py @@ -28,23 +28,24 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] +time_units = 'seconds since 2000-01-01T00:00:00Z' + obsvars = { 'swh': 'swh', } AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), } DimDict = { } VarDims = { - 'swh': ['nlocs'], + 'swh': ['Location'], } @@ -62,9 +63,9 @@ def __init__(self, filename, factor): ncd.close() # set time stamp for all obs - self.datetime = np.empty_like(self.swh, dtype=object) + self.datetime = np.empty_like(self.swh, dtype=np.int64) for t in range(len(self.datetime)): - self.datetime[t] = datetime.utcfromtimestamp(self.time[t]+datetime(2000, 1, 1, 0, tzinfo=pytz.UTC).timestamp()).strftime("%Y-%m-%dT%H:%M:%SZ") + self.datetime[t] = self.time[t] # Remove observations out of sane bounds qci = np.where(self.swh > 0.0) @@ -89,7 +90,7 @@ def __init__(self, filename, factor): # Open input file and read relevant info def _read(self): # set up variable names for IODA - iodavar = 'sea_surface_wave_significant_height' + iodavar = 'waveHeightSignificant' self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -100,15 +101,15 @@ def _read(self): swh = copernicus(self.filename, self.factor) # map copernicus to ioda data structure - self.outdata[('datetime', 'MetaData')] = swh.datetime + self.outdata[('dateTime', 'MetaData')] = swh.datetime + self.var_mdata[('dateTime', 'MetaData')]['units'] = time_units self.outdata[('latitude', 'MetaData')] = swh.lats self.outdata[('longitude', 'MetaData')] = swh.lons self.outdata[self.varDict[iodavar]['valKey']] = swh.swh self.outdata[self.varDict[iodavar]['errKey']] = swh.err self.outdata[self.varDict[iodavar]['qcKey']] = np.zeros(swh.nlocs, dtype=np.int32) - DimDict['nlocs'] = swh.nlocs - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = swh.nlocs def main(): diff --git a/src/marine/copernicus_l4adt2ioda.py b/src/marine/copernicus_l4adt2ioda.py index 30d0cb7e7..709f3d89c 100755 --- a/src/marine/copernicus_l4adt2ioda.py +++ b/src/marine/copernicus_l4adt2ioda.py @@ -26,7 +26,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] obsvars = { @@ -35,16 +35,18 @@ AttrData = { 'converter': os.path.basename(__file__), - 'nvars': np.int32(len(obsvars)), } DimDict = { } VarDims = { - 'adt': ['nlocs'], + 'adt': ['Location'], } +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class copernicus(object): def __init__(self, filename): @@ -58,7 +60,7 @@ def __init__(self, filename): self.lats = self.lats.ravel() self.adt = np.squeeze(ncd.variables['adt'][:]).ravel() self.err = np.squeeze(ncd.variables['err_sla'][:]).ravel() - self.date = ncd.getncattr('time_coverage_start') + self.time_coverage_start = ncd.getncattr('time_coverage_start') ncd.close() # Remove observations out of sane bounds @@ -68,9 +70,6 @@ def __init__(self, filename): self.lats = self.lats[qci] self.adt = self.adt[qci].astype(np.single) self.err = self.err[qci].astype(np.single) - # Same time stamp for all obs within 1 file - self.datetime = np.empty_like(self.adt, dtype=object) - self.datetime[:] = self.date class copernicus_l4adt2ioda(object): @@ -87,7 +86,7 @@ def __init__(self, filename, datetime=None): # Open input file and read relevant info def _read(self): # set up variable names for IODA - iodavar = 'absolute_dynamic_topography' + iodavar = 'absoluteDynamicTopography' self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() @@ -97,20 +96,33 @@ def _read(self): # read input filename adt = copernicus(self.filename) # put the time at 00 between start and end coverage time - if self.datetime is not None: - ymdh = datetime.strptime(self.datetime, "%Y%m%d") - ymdhm = ymdh.strftime("%Y-%m-%dT%H:%M:%SZ") - adt.datetime[:] = ymdhm + if adt.time_coverage_start is not None: + this_datetime = datetime.strptime(adt.time_coverage_start[:-1], "%Y-%m-%dT%H:%M:%S") + time_offset = round((this_datetime - epoch).total_seconds()) + else: + try: + this_datetime = datetime.strptime(adt.datetime, "%Y-%m-%dT%H:%M:%SZ") + time_offset = round((this_datetime - epoch).total_seconds()) + except Exception: + print(f"ABORT, failure to find timestamp; check format," + " it should be like 2014-07-29T12:00:00Z whereas" + " you entered {self.datetime}") + sys.exit() + + # Same time stamp for all obs within 1 file + adt.datetime = np.empty_like(adt.adt, dtype=np.int64) + adt.datetime[:] = time_offset + # map copernicus to ioda data structure - self.outdata[('datetime', 'MetaData')] = adt.datetime + self.outdata[('dateTime', 'MetaData')] = adt.datetime + self.var_mdata[('dateTime', 'MetaData')]['units'] = iso8601_string self.outdata[('latitude', 'MetaData')] = adt.lats self.outdata[('longitude', 'MetaData')] = adt.lons self.outdata[self.varDict[iodavar]['valKey']] = adt.adt self.outdata[self.varDict[iodavar]['errKey']] = adt.err self.outdata[self.varDict[iodavar]['qcKey']] = np.zeros(adt.nlocs, dtype=np.int32) - DimDict['nlocs'] = adt.nlocs - AttrData['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = adt.nlocs def main(): diff --git a/src/marine/nsidc_l4cdr_ice2ioda.py b/src/marine/nsidc_l4cdr_ice2ioda.py index 82ac05c39..9c854fffd 100755 --- a/src/marine/nsidc_l4cdr_ice2ioda.py +++ b/src/marine/nsidc_l4cdr_ice2ioda.py @@ -8,7 +8,7 @@ # from __future__ import print_function -import sys +import os, sys import argparse import netCDF4 as nc from datetime import datetime, timedelta @@ -24,6 +24,9 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class Observation(object): @@ -41,6 +44,7 @@ def _read(self): datein = ncd.variables['time'][:] + 0.5 reftime = dateutil.parser.parse(ncd.variables['time'].units[-20:]) obs_date = reftime + timedelta(days=float(datein)) + time_offset = round((obs_date - epoch).total_seconds()) data_in = {} input_vars = ( @@ -90,7 +94,7 @@ def _read(self): qc = qc[mask_thin] for i in range(len(lons)): - locKey = lats[i], lons[i], obs_date.strftime("%Y-%m-%dT%H:%M:%SZ") + locKey = lats[i], lons[i], time_offset self.data[locKey][valKey] = vals[i] self.VarAttrs[locKey][valKey]['_FillValue'] = vals_FillValue self.VarAttrs[locKey][valKey]['units'] = vals_units @@ -100,20 +104,20 @@ def _read(self): self.VarAttrs[locKey][errKey]['_FillValue'] = errs_FillValue self.VarAttrs[locKey][errKey]['units'] = errs_units self.data[locKey][qcKey] = 1 - self.VarAttrs[locKey][qcKey]['units'] = 'unitless' - -vName = "sea_ice_area_fraction" +vName = "seaIceFraction" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = { - 'odb_version': 1, + 'converter': os.path.basename(__file__), + 'ioda_version': 2, + 'odb_version': 1 } @@ -148,16 +152,17 @@ def main(): args = parser.parse_args() fdate = datetime.strptime(args.date, '%Y%m%d%H') VarDims = { - vName: ['nlocs'], + vName: ['Location'], } # Read in ice = Observation(args.input, args.thin, fdate) # write them out ObsVars, nlocs = iconv.ExtractObsData(ice.data, locationKeyList) - DimDict = {'nlocs': nlocs} + DimDict = {'Location': nlocs} writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + ice.VarAttrs['dateTime', 'MetaData']['units'] = iso8601_string writer.BuildIoda(ObsVars, VarDims, ice.VarAttrs, GlobalAttrs) diff --git a/src/marine/pace_oc2ioda.py b/src/marine/pace_oc2ioda.py index 6262c5f63..2005dc462 100755 --- a/src/marine/pace_oc2ioda.py +++ b/src/marine/pace_oc2ioda.py @@ -26,8 +26,9 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict -output_var_names = [ - "mass_concentration_of_chlorophyll_in_sea_water"] +os.environ["TZ"] = "UTC" + +output_var_names = ["chlorophyllMassConcentration"] DimDict = {} @@ -38,7 +39,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string"), + ("dateTime", "long"), ] @@ -115,10 +116,9 @@ def read_input(input_args): data_in[v] = data_in[v][mask] # create a string version of the date for each observation - dates = [] + dates = np.empty(len(lons), dtype=np.int64) for i in range(len(lons)): - obs_date = basetime + timedelta(seconds=float(data_in['time'][i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates[i] = round(data_in['time'][i]) # allocate space for output depending on which variables are to be saved obs_dim = (len(lons)) @@ -129,8 +129,7 @@ def read_input(input_args): obs_data[(output_var_names[0], global_config['opqc_name'])] = np.zeros(obs_dim) # Add the metadata - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = dates obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons @@ -139,7 +138,7 @@ def read_input(input_args): obs_data[output_var_names[0], global_config['oerr_name']] = data_in['chlor_a']*0.0 obs_data[output_var_names[0], global_config['opqc_name']] = data_in['l2_flags'] - return (obs_data, GlobalAttrs) + return (obs_data, basetime, GlobalAttrs) def main(): @@ -202,7 +201,7 @@ def main(): obs = pool.map(read_input, pool_inputs) # concatenate the data from the files - obs_data, GlobalAttrs = obs[0] + obs_data, basetime, GlobalAttrs = obs[0] for i in range(1, len(obs)): obs_data.update(obs[i][0]) # Get the nlocs @@ -210,19 +209,18 @@ def main(): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin GlobalAttrs['converter'] = os.path.basename(__file__) - DimDict['nlocs'] = nlocs - GlobalAttrs['nlocs'] = np.int32(DimDict['nlocs']) + DimDict['Location'] = nlocs - VarAttrs[output_var_names[0], global_config['oval_name']]['units'] = 'mg ^m-3' - VarAttrs[output_var_names[0], global_config['oerr_name']]['units'] = 'mg ^m-3' - VarAttrs[output_var_names[0], global_config['opqc_name']]['units'] = 'unitless' + VarAttrs['dateTime', 'MetaData']['units'] = 'seconds since ' + basetime.strftime("%Y-%m-%dT%H:%M:%SZ") + VarAttrs[output_var_names[0], global_config['oval_name']]['units'] = 'mg m-3' + VarAttrs[output_var_names[0], global_config['oerr_name']]['units'] = 'mg m-3' VarAttrs[output_var_names[0], global_config['oval_name']]['_FillValue'] = -32767. VarAttrs[output_var_names[0], global_config['oerr_name']]['_FillValue'] = -32767. VarAttrs[output_var_names[0], global_config['opqc_name']]['_FillValue'] = -32767 - VarDims["mass_concentration_of_chlorophyll_in_sea_water"] = ['nlocs'] + VarDims["chlorophyllMassConcentration"] = ['Location'] # setup the IODA writer writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) diff --git a/src/marine/pace_radiance2ioda.py b/src/marine/pace_radiance2ioda.py index 0d81cb2f1..6de12f3a0 100755 --- a/src/marine/pace_radiance2ioda.py +++ b/src/marine/pace_radiance2ioda.py @@ -25,16 +25,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" +# The setting of basetime is temporary as it will get modified in the data reading routine. +basetime = datetime.fromisoformat('1970-01-01T00:00:00') + output_var_names = ["radiance"] locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("datetime", "string") + ("dateTime", "long") ] GlobalAttrs = { 'odb_version': 1, + 'converter': os.path.basename(__file__) } VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) @@ -43,7 +48,7 @@ VarDims = {} -chan_number = range(1, 250) # we havew 120 Blue band 120 Red band and 9 SWRI +chan_number = range(1, 250) # we have 120 Blue band 120 Red band and 9 SWRI def read_input(input_args): @@ -66,6 +71,7 @@ def read_input(input_args): ncd = nc.Dataset(input_file, 'r') # get the base time (should only have 1 or 2 time slots) time_base = ncd.groups['scan_line_attributes'].variables['time'][:] + global basetime # We wish to overrride the initial setting of basetime. basetime = dateutil.parser.parse(ncd.groups['scan_line_attributes'].variables['time'].units[-20:]) # Determine the lat/lon grid. @@ -123,10 +129,9 @@ def read_input(input_args): lats = lats[mask] # create a string version of the date for each observation - dates = [] + dates = np.empty(len(lons), dtype=np.int64) for i in range(len(lons)): - obs_date = basetime + timedelta(seconds=float(time[i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates[i] = round(time[i]) # output values nchans = len(chan_number) @@ -147,18 +152,16 @@ def read_input(input_args): # allocate space for output depending on which variables are to be saved obs_data = {} - obs_data[('datetime', 'MetaData')] = np.empty(len(dates), dtype=object) - obs_data[('datetime', 'MetaData')][:] = dates + obs_data[('dateTime', 'MetaData')] = dates obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons - obs_data[('time', 'MetaData')] = time.astype('float32') - obs_data[('height_above_mean_sea_level', 'MetaData')] = np.zeros((obs_dim), dtype=np.float32) - obs_data[('sensor_azimuth_angle', 'MetaData')] = data_in['sensor_azimuth'] - obs_data[('sensor_zenith_angle', 'MetaData')] = data_in['sensor_zenith'] - obs_data[('sensor_view_angle', 'MetaData')] = data_in['sensor_zenith'] - obs_data[('solar_zenith_angle', 'MetaData')] = data_in['solar_zenith'] - obs_data[('solar_azimuth_angle', 'MetaData')] = data_in['solar_azimuth'] - obs_data[('sensor_band_central_radiation_wavenumber', 'VarMetaData')] = wavelength.astype('float32') + obs_data[('height', 'MetaData')] = np.zeros((obs_dim), dtype=np.float32) + obs_data[('sensorAzimuthAngle', 'MetaData')] = data_in['sensor_azimuth'].astype('float32') + obs_data[('sensorZenithAngle', 'MetaData')] = data_in['sensor_zenith'].astype('float32') + obs_data[('sensorViewAngle', 'MetaData')] = data_in['sensor_zenith'].astype('float32') + obs_data[('solarZenithAngle', 'MetaData')] = data_in['solar_zenith'].astype('float32') + obs_data[('solarAzimuthAngle', 'MetaData')] = data_in['solar_azimuth'].astype('float32') + obs_data[('sensorCentralWavenumber', 'MetaData')] = wavelength.astype('float32') obs_data[output_var_names[0], global_config['oval_name']] = val_radiance.astype('float32') obs_data[output_var_names[0], global_config['oerr_name']] = err.astype('float32') obs_data[output_var_names[0], global_config['opqc_name']] = qc.astype('int32') @@ -221,9 +224,8 @@ def main(): # prepare global attributes we want to output in the file, # in addition to the ones already loaded in from the input file - GlobalAttrs['date_time_string'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") + GlobalAttrs['datetimeReference'] = args.date.strftime("%Y-%m-%dT%H:%M:%SZ") GlobalAttrs['thinning'] = args.thin - GlobalAttrs['converter'] = os.path.basename(__file__) # determine which variables we are going to output selected_names = [] @@ -232,25 +234,22 @@ def main(): # pass parameters to the IODA writer # (needed because we are bypassing ExtractObsData within BuildNetcdf) VarDims = { - 'radiance': ['nlocs', 'nchans'], - 'sensor_band_central_radiation_wavenumber': ['nchans'] + 'radiance': ['Location', 'Channel'], + 'sensorCentralWavenumber': ['Channel'] } nchans = len(chan_number) nlocs = len(obs_data[('longitude', 'MetaData')]) - ndatetime = np.zeros((20), dtype=np.float32) DimDict = { - 'nlocs': nlocs, - 'nchans': list(chan_number), - 'nvars': list(chan_number), - 'ndatetime': list(ndatetime) + 'Location': nlocs, + 'Channel': list(chan_number), } writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) - VarAttrs[('radiance', 'ObsValue')]['units'] = 'W m^-2 um^-1 sr^-1' - VarAttrs[('radiance', 'ObsError')]['units'] = 'W m^-2 um^-1 sr^-1' - VarAttrs[('radiance', 'PreQC')]['units'] = 'unitless' + VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since ' + basetime.strftime("%Y-%m-%dT%H:%M:%SZ") + VarAttrs[('radiance', 'ObsValue')]['units'] = 'W m-2 um-1 sr-1' + VarAttrs[('radiance', 'ObsError')]['units'] = 'W m-2 um-1 sr-1' VarAttrs[('radiance', 'ObsValue')]['_FillValue'] = -32767 VarAttrs[('radiance', 'ObsError')]['_FillValue'] = 999 VarAttrs[('radiance', 'PreQC')]['_FillValue'] = 999 diff --git a/src/marine/smap_sss2ioda.py b/src/marine/smap_sss2ioda.py index 8328e7cf6..ef8524f7b 100755 --- a/src/marine/smap_sss2ioda.py +++ b/src/marine/smap_sss2ioda.py @@ -8,6 +8,7 @@ # import sys +import os import argparse import numpy as np from datetime import datetime, timedelta @@ -24,17 +25,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" vName = "seaSurfaceSalinity" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("dateTime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class Salinity(object): def __init__(self, filenames, date): @@ -95,7 +100,7 @@ def _read(self): data = {} for v in source_var_name: if v == 'sss_qc': - data[v] = ncd.variables[source_var_name[v]][:].flatten().astype(int) + data[v] = ncd.variables[source_var_name[v]][:].flatten().astype(np.int32) else: data[v] = ncd.variables[source_var_name[v]][:].flatten() @@ -114,8 +119,8 @@ def _read(self): # for each observation for i in range(len(data['time'])): obs_date = basetime + timedelta(seconds=float(data['time'][i])) - locKey = data['lat'][i], data['lon'][i], obs_date.strftime( - "%Y-%m-%dT%H:%M:%SZ") + time_offset = round((obs_date - epoch).total_seconds()) + locKey = data['lat'][i], data['lon'][i], time_offset self.data[locKey][valKey] = data['sss'][i] # if source == 'JPL': #RTOFS-DA # if data['sss_qc'][i] <= 4: @@ -167,8 +172,9 @@ def main(): writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['units'] = '1' - VarAttrs[('seaSurfaceSalinity', 'ObsError')]['units'] = '1' + VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string + VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['units'] = 'PSU' + VarAttrs[('seaSurfaceSalinity', 'ObsError')]['units'] = 'PSU' VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['_FillValue'] = 999 VarAttrs[('seaSurfaceSalinity', 'ObsError')]['_FillValue'] = 999 VarAttrs[('seaSurfaceSalinity', 'PreQC')]['_FillValue'] = 999 diff --git a/src/marine/smos_sss2ioda.py b/src/marine/smos_sss2ioda.py index e6f3f92f2..781d00494 100755 --- a/src/marine/smos_sss2ioda.py +++ b/src/marine/smos_sss2ioda.py @@ -8,6 +8,7 @@ # import sys +import os import argparse import numpy as np from datetime import datetime, timedelta @@ -22,17 +23,21 @@ import ioda_conv_engines as iconv from orddicts import DefaultOrderedDict +os.environ["TZ"] = "UTC" vName = "seaSurfaceSalinity" locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("dateTime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +iso8601_string = 'seconds since 1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[14:-1]) + class Salinity(object): def __init__(self, filenames, date): @@ -63,7 +68,7 @@ def _read(self): sss = ncd.variables['SSS_corr'][:] sss_err = ncd.variables['Sigma_SSS_corr'][:] sss_qc = ncd.variables['Dg_quality_SSS_corr'][:] - sss_qc = sss_qc.astype(int) + sss_qc = sss_qc.astype(np.int32) mask = np.logical_not(sss.mask) lon = lon[mask] @@ -84,11 +89,9 @@ def _read(self): MM1 = f[n+19+11:n+19+13] SS1 = f[n+19+13:n+19+15] # - seconds = (datetime.strptime(date1+HH1+MM1+SS1, '%Y%m%d%H%M%S') - datetime.strptime( - date1, '%Y%m%d')).total_seconds() - basetime = datetime.strptime(date1, '%Y%m%d') - obs_date = basetime + timedelta(seconds=int(seconds)) - locKey = lat[i], lon[i], obs_date.strftime("%Y-%m-%dT%H:%M:%SZ") + this_dt = datetime.strptime(date1+HH1+MM1+SS1, '%Y%m%d%H%M%S') + time_offset = round((this_dt - epoch).total_seconds()) + locKey = lat[i], lon[i], time_offset self.data[locKey][valKey] = sss[i] self.data[locKey][errKey] = sss_err[i] self.data[locKey][qcKey] = sss_qc[i] @@ -132,8 +135,9 @@ def main(): writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) VarAttrs = DefaultOrderedDict(lambda: DefaultOrderedDict(dict)) - VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['units'] = '1' - VarAttrs[('seaSurfaceSalinity', 'ObsError')]['units'] = '1' + VarAttrs[('dateTime', 'MetaData')]['units'] = iso8601_string + VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['units'] = 'PSU' + VarAttrs[('seaSurfaceSalinity', 'ObsError')]['units'] = 'PSU' VarAttrs[('seaSurfaceSalinity', 'ObsValue')]['_FillValue'] = 999 VarAttrs[('seaSurfaceSalinity', 'ObsError')]['_FillValue'] = 999 VarAttrs[('seaSurfaceSalinity', 'PreQC')]['_FillValue'] = 999 diff --git a/src/marine/viirs_modis_l2_oc2ioda.py b/src/marine/viirs_modis_l2_oc2ioda.py index be61c94de..2254977e5 100755 --- a/src/marine/viirs_modis_l2_oc2ioda.py +++ b/src/marine/viirs_modis_l2_oc2ioda.py @@ -39,7 +39,7 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("dateTime", "string"), + ("dateTime", "long"), ] @@ -92,6 +92,7 @@ def read_input(input_args): time = (np.repeat(sla.variables['msec'][:].ravel(), pixels_per_line).ravel() - sla.variables['msec'][0])/1000.0 data_in['time'] = time[mask] + time_units = basetime.strftime("%Y-%m-%dT%H:%M:%SZ") # load in all the other data and apply the missing value mask input_vars = ('poc', 'chlor_a') @@ -116,8 +117,7 @@ def read_input(input_args): # create a string version of the date for each observation dates = [] for i in range(len(lons)): - obs_date = basetime + timedelta(seconds=float(data_in['time'][i])) - dates.append(obs_date.strftime("%Y-%m-%dT%H:%M:%SZ")) + dates.append(np.int64(data_in['time'][i])) # allocate space for output depending on which variables are to be saved obs_dim = (len(lons)) @@ -138,7 +138,7 @@ def read_input(input_args): np.zeros(obs_dim) # Add the metadata - obs_data[('dateTime', 'MetaData')] = np.empty(len(dates), dtype=object) + obs_data[('dateTime', 'MetaData')] = np.empty(len(dates), dtype=np.int64) obs_data[('dateTime', 'MetaData')][:] = dates obs_data[('latitude', 'MetaData')] = lats obs_data[('longitude', 'MetaData')] = lons @@ -158,7 +158,7 @@ def read_input(input_args): obs_data[output_var_names[1], global_config['opqc_name']] = \ data_in['l2_flags'] - return (obs_data, GlobalAttrs) + return (obs_data, GlobalAttrs, time_units) def main(): @@ -236,7 +236,7 @@ def main(): obs = pool.map(read_input, pool_inputs) # concatenate the data from the files - obs_data, GlobalAttrs = obs[0] + obs_data, GlobalAttrs, time_units = obs[0] for i in range(1, len(obs)): obs_data.update(obs[i][0]) # Get the Location @@ -249,6 +249,10 @@ def main(): GlobalAttrs['converter'] = os.path.basename(__file__) DimDict['Location'] = Location + VarAttrs[('dateTime', 'MetaData')]['units'] = time_units + VarAttrs[('latitude', 'MetaData')]['units'] = 'degrees_north' + VarAttrs[('longitude', 'MetaData')]['units'] = 'degrees_east' + # determine which variables we are going to output if args.poc: VarAttrs[output_var_names[0], global_config['oval_name']]['units'] = \ diff --git a/src/marine/viirs_modis_l3_oc2ioda.py b/src/marine/viirs_modis_l3_oc2ioda.py index 13c492307..84a45c10a 100755 --- a/src/marine/viirs_modis_l3_oc2ioda.py +++ b/src/marine/viirs_modis_l3_oc2ioda.py @@ -38,11 +38,15 @@ locationKeyList = [ ("latitude", "float"), ("longitude", "float"), - ("dateTime", "string") + ("dateTime", "long") ] GlobalAttrs = {} +# Prepare dateTime info +iso8601_string = '1970-01-01T00:00:00Z' +epoch = datetime.fromisoformat(iso8601_string[:-1]) + class OCL3(object): @@ -71,10 +75,13 @@ def _read(self): GlobalAttrs['description'] = str(ncd.getncattr('processing_level')+' processing') timevar = ncd.getncattr('time_coverage_start') - obstime = timevar[:19]+'Z' + this_time = datetime.fromisoformat(timevar[:19]) + obstime = np.int64(round((this_time - epoch).total_seconds())) ncd.close() + # Convert obstime from string to seconds since blah blah + valKey = vName['chlor_a'], iconv.OvalName() errKey = vName['chlor_a'], iconv.OerrName() qcKey = vName['chlor_a'], iconv.OqcName() @@ -85,6 +92,10 @@ def _read(self): self.VarAttrs[vName['chlor_a'], iconv.OvalName()]['units'] = 'mg m^-3' self.VarAttrs[vName['chlor_a'], iconv.OerrName()]['units'] = 'mg m^-3' + self.VarAttrs[('dateTime', 'MetaData')]['units'] = 'seconds since ' + iso8601_string + self.VarAttrs[('latitude', 'MetaData')]['units'] = 'degrees_north' + self.VarAttrs[('longitude', 'MetaData')]['units'] = 'degrees_east' + # apply thinning mask if self.thin > 0.0: mask_thin = np.random.uniform(size=len(lons)) > self.thin diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 7d14bdd0e..d1d7869f8 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -920,7 +920,7 @@ ecbuild_add_test( TARGET test_${PROJECT_NAME}_gsidiag_conv_uv -o testrun/ -t conv -p satwind" - satwind_obs_2018041500.nc4 ${IODA_CONV_COMP_TOL_ZERO}) + satwind_obs_2018041500.nc4 ${IODA_CONV_COMP_TOL}) ecbuild_add_test( TARGET test_${PROJECT_NAME}_gsidiag_conv TYPE SCRIPT @@ -933,7 +933,7 @@ ecbuild_add_test( TARGET test_${PROJECT_NAME}_gsidiag_conv -o testrun/ -t conv -p sfc" - sfc_tv_obs_2018041500.nc4 ${IODA_CONV_COMP_TOL_ZERO}) + sfc_tv_obs_2018041500.nc4 ${IODA_CONV_COMP_TOL}) ecbuild_add_test( TARGET test_${PROJECT_NAME}_gsidiag_rad ENVIRONMENT "PYTHONPATH=${IODACONV_PYTHONPATH}" @@ -945,7 +945,7 @@ ecbuild_add_test( TARGET test_${PROJECT_NAME}_gsidiag_rad -i testinput/gsidiag_amsua_aqua_radiance_test.nc -o testrun/ -t rad" - amsua_aqua_obs_2018041500.nc4 ${IODA_CONV_COMP_TOL_ZERO}) + amsua_aqua_obs_2018041500.nc4 ${IODA_CONV_COMP_TOL}) #=============================================================================== # WRFDA ncdiag converter diff --git a/test/testoutput/amsr2_icec_l2p.nc b/test/testoutput/amsr2_icec_l2p.nc index 5ac443805..8a5f7e0b6 100644 --- a/test/testoutput/amsr2_icec_l2p.nc +++ b/test/testoutput/amsr2_icec_l2p.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:36c2d2b5e4f2719aef5749f6e1b5383c5431b4cc0ecab3d87e2f527f2d76fac2 -size 16951 +oid sha256:6bd6f5dbf4d59eedae94f4172b67df2fdca2c2226c655c668efd773836c9cf89 +size 12854 diff --git a/test/testoutput/amsua_aqua_obs_2018041500.nc4 b/test/testoutput/amsua_aqua_obs_2018041500.nc4 index b83df1760..84c7624f4 100644 --- a/test/testoutput/amsua_aqua_obs_2018041500.nc4 +++ b/test/testoutput/amsua_aqua_obs_2018041500.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:cb8097f427d5aa946bb4758469dc51d10437ded6d88d75a191d40d5fe04ae5bb -size 60170 +oid sha256:b244a4cf8e80ea1b6514c7ff69accc85733a1b0f374afefb9c0a0d9cee16bd62 +size 60622 diff --git a/test/testoutput/avhrr_radiance.nc b/test/testoutput/avhrr_radiance.nc index 736a57150..a1b7d07e5 100644 --- a/test/testoutput/avhrr_radiance.nc +++ b/test/testoutput/avhrr_radiance.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2b3c944bc34d2a9afff10cfa9764d0a924eac1b9dca90755099bc66d9e2da0ce -size 26734 +oid sha256:b9fc26d9574374e1f27eea0c64c292cb211186186bf0ce6d3009373c533de805 +size 22687 diff --git a/test/testoutput/gnssro_cosmic2_2021080212.nc4 b/test/testoutput/gnssro_cosmic2_2021080212.nc4 index ec2663a6f..1b8fa23e2 100644 --- a/test/testoutput/gnssro_cosmic2_2021080212.nc4 +++ b/test/testoutput/gnssro_cosmic2_2021080212.nc4 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:1e60ddf5b10dbbaf12fb5d68d0bdd8cbf2f22defd13fa88678ac86df78641fe4 -size 52741 +oid sha256:0fcdddf701ed0141e83398632efb62a6106635ad05bb8fbbf8fb9285f4ed357e +size 36240 diff --git a/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc b/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc index f0429273b..b237ec98e 100644 --- a/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc +++ b/test/testoutput/ioda_dt_global_twosat_phy_l4_20190101_vDT2021.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:22c34ec032f3d07eec9537d04ccaa244e492c11f3b54fd279a1686a49d0f9b10 -size 210989 +oid sha256:9110199c196fef3566a0ac706c14617af020d293ff3124fbd8ba48ba36061f6a +size 53684 diff --git a/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc b/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc index 13af5c3b5..5d8d66bb2 100644 --- a/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc +++ b/test/testoutput/ioda_global_vavh_l3_rt_s3a_20210930T18.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7ecf547e32167c57a48f165dcde0e82b1673ffde255c04619468f07d28eca10e -size 13172 +oid sha256:ef8cae7c44a66142e6a17e15db5e19ffc5e5d9b70cd66154dece8279f15f43b7 +size 13182 diff --git a/test/testoutput/mls_o3_l2.nc b/test/testoutput/mls_o3_l2.nc index 9b9bb1a25..fecb75dbf 100644 --- a/test/testoutput/mls_o3_l2.nc +++ b/test/testoutput/mls_o3_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0c10b89fb04865b64292e7e5538feedbbce3df9af95e76584d2c5887ff52d440 -size 26380 +oid sha256:2d8bbbe99ecb90e45f863fa604d0bbacdd9574203b4e3eb011d305cbb3d4f68c +size 26857 diff --git a/test/testoutput/modis_aod.nc b/test/testoutput/modis_aod.nc index 205ac3f1e..98455d5aa 100644 --- a/test/testoutput/modis_aod.nc +++ b/test/testoutput/modis_aod.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:352eb57370ab66934ed521f799e84092831daeded79257d6c83cb909b86cf490 -size 18899 +oid sha256:ab9e4bfedbcdbf9f9e6dcda9dd45a523859eba33440fab096fa59effa42f783c +size 13678 diff --git a/test/testoutput/modis_aqua_oc_l2.nc b/test/testoutput/modis_aqua_oc_l2.nc index c275c3251..39bc32838 100644 --- a/test/testoutput/modis_aqua_oc_l2.nc +++ b/test/testoutput/modis_aqua_oc_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7dbdf0b1d77d8db048b7db604d4d6f1f770b701385c8c24513a531cd6ac4ba2f +oid sha256:423584a36ddc82ce26cf1664f3786c3ab81bd526a74b5b6679bd9b12aa704c11 size 85990 diff --git a/test/testoutput/mopitt_co.nc b/test/testoutput/mopitt_co.nc index 33cccafeb..d4c5f64d4 100644 --- a/test/testoutput/mopitt_co.nc +++ b/test/testoutput/mopitt_co.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5a5703deff298e440fb7f4349a4d8585335d16b3d9b9e0e141f4927ac3f42ab4 -size 171846 +oid sha256:9f96d9d4b64d456cabd23e2d874e97af2f0d0351d8b29e6811c6217f9a75dac0 +size 108524 diff --git a/test/testoutput/nsidc_l4_icec.nc b/test/testoutput/nsidc_l4_icec.nc index 3960e6353..fde521036 100644 --- a/test/testoutput/nsidc_l4_icec.nc +++ b/test/testoutput/nsidc_l4_icec.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:401d3b02f5287dd6eb7a8d2904770385fece53f499720183e414a5cfa3035695 -size 133049 +oid sha256:27db502e26407438741a7bc4f49c87f4d17eb5db759903d3899515a1dfe4200b +size 37020 diff --git a/test/testoutput/omi_o3_l2.nc b/test/testoutput/omi_o3_l2.nc index a651a6488..6e1bb0871 100644 --- a/test/testoutput/omi_o3_l2.nc +++ b/test/testoutput/omi_o3_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2b5965a3f258eb901fe23c21f542189dde2d759d40e16a682b6f81e670aa6951 -size 838859 +oid sha256:3a79918b72a3e9323e211594b8f18e0468908996fb90ebf79e2b304c927d5233 +size 995019 diff --git a/test/testoutput/ompsnm_o3_l2.nc b/test/testoutput/ompsnm_o3_l2.nc index f0cce6a4e..9fab1fd71 100644 --- a/test/testoutput/ompsnm_o3_l2.nc +++ b/test/testoutput/ompsnm_o3_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:51ec919979f11174f267aa001eb9d4854071bae6c0c991c27daf9b818b5f4051 -size 296953 +oid sha256:39b73db7a1054855f3ff1f1c66ba4c865fed2f9b18212bcdcf6189f0226d976f +size 376457 diff --git a/test/testoutput/pace_oc_l2.nc b/test/testoutput/pace_oc_l2.nc index 41cc206d4..cd2fbb3d6 100644 --- a/test/testoutput/pace_oc_l2.nc +++ b/test/testoutput/pace_oc_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5e4cba5c84d5ce0a23eaec05691d93cdb4ecde72d9dc20dc58a879cd8cf82f27 -size 13545 +oid sha256:e52c9eeb6d32a6bf21b218ffff977275029efbd5c8d40e47a7f1842237c85458 +size 13459 diff --git a/test/testoutput/pace_radiance_L1B.nc b/test/testoutput/pace_radiance_L1B.nc index 2ba8e349d..f61b90029 100644 --- a/test/testoutput/pace_radiance_L1B.nc +++ b/test/testoutput/pace_radiance_L1B.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ca2da8d7caae002d9e6e3f4d571341430daf063ff5aec6aa0b4d242932731205 -size 92888 +oid sha256:e2c50c0df780aeb53522b96429286f6a5b8475868c626d1c1483607a127e80d3 +size 83451 diff --git a/test/testoutput/smap9km_ssm.nc b/test/testoutput/smap9km_ssm.nc index a67c92c0d..4b2e4404d 100644 --- a/test/testoutput/smap9km_ssm.nc +++ b/test/testoutput/smap9km_ssm.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e32b9f90c6379fdbc38217fa7bb333744fe8337072ec0b86eb9f612b66aa7167 -size 387358 +oid sha256:7ef3865d0151e7bc2f7de5d9c0debb6fd1d5cb0f5c93da42c2436b3e893ee6b9 +size 386924 diff --git a/test/testoutput/smap_ssm.nc b/test/testoutput/smap_ssm.nc index 0cf3155c2..fc9aefbac 100644 --- a/test/testoutput/smap_ssm.nc +++ b/test/testoutput/smap_ssm.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7d1e86b3d45cd7cb8a1a4102e25433465bd5ce44b75b0e98881350b36370e831 -size 98819 +oid sha256:0232524f086e41464c097f7092772c4e9d5eaa5e3b3f4d16d7d5a118c1dc53d5 +size 99937 diff --git a/test/testoutput/smap_sss_rss.nc b/test/testoutput/smap_sss_rss.nc index 1e2fcb4ae..d9bbf16cf 100644 --- a/test/testoutput/smap_sss_rss.nc +++ b/test/testoutput/smap_sss_rss.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:1dc762170d2408901c0b9508d5bbc3ba32ad77b013b0dc742bb5130a74589bed +oid sha256:40659647fd83b477fd81ba3d8749f0a6305bc65b126aae08cc0b14c271b81cc1 size 13021 diff --git a/test/testoutput/smos_sss_l2.nc b/test/testoutput/smos_sss_l2.nc index 0b31ccac2..d81c12ce1 100644 --- a/test/testoutput/smos_sss_l2.nc +++ b/test/testoutput/smos_sss_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:332eb19851d146f15eece3da08ee821365da3b656e53d21bea626d81e7a6b8de +oid sha256:ca857e0f7bafa9350582c79981c3c7f1448ccfb0cad17084adce4a11988d66ff size 29183 diff --git a/test/testoutput/viirs_jpss1_oc_l2.nc b/test/testoutput/viirs_jpss1_oc_l2.nc index 98a8c567c..773730a20 100644 --- a/test/testoutput/viirs_jpss1_oc_l2.nc +++ b/test/testoutput/viirs_jpss1_oc_l2.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:cc0ef239ca1105a06b70e42451c948788f6c7d3c266f00140eb0beefcbc894af +oid sha256:7a4c3a2d8a349d1327d0b71e7a0a892020bf242202a917c9c872cd76320d9197 size 139085 diff --git a/test/testoutput/viirs_jpss1_oc_l3.nc b/test/testoutput/viirs_jpss1_oc_l3.nc index dcebc78b1..e1aeb7090 100644 --- a/test/testoutput/viirs_jpss1_oc_l3.nc +++ b/test/testoutput/viirs_jpss1_oc_l3.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:aaac62c3c3f0ca59d29b3e2889bc16af245f61e57c11881b22b2b93238b87c75 +oid sha256:af4b39af59916a755248db68985c871cfa231175aeed0598e26bdbbc830a2b45 size 13387