diff --git a/scripts/viirs_scan_bytes.py b/scripts/viirs_scan_bytes.py index b6143799cabacbb2e41efabd47e09c55b2008056..5ee9a8d5e225dff8fcd0a1ea6922b21811c73db3 100755 --- a/scripts/viirs_scan_bytes.py +++ b/scripts/viirs_scan_bytes.py @@ -6,23 +6,57 @@ import os import numpy as np import netCDF4 from edosl0util import jpssrdr -from edosl0util.headers import GROUP_CONTINUING, GROUP_LAST -def write_netcdf_file(destpath, scans, sizes): +def write_netcdf_file(destpath, scans, apids, sizes): dataset = netCDF4.Dataset(destpath, 'w') + dataset.bands = """Band APID Day +============== +M1 804 X +M2 803 X +M3 802 X +M4 801 X +M5 800 X +M6 805 X +M7 806 +M8 809 +M9 807 X +M10 808 +M11 810 X +M12 812 +M13 811 +M14 816 +M15 815 +M16 814 + +I1 818 X +I2 819 X +I3 820 X +I4 813 +I5 817 + +DNB 821 +CAL 825 +ENGR 826""" dataset.createDimension('scan', size=len(scans)) - dataset.createDimension('apid', size=len(sizes)) + dataset.createDimension('apid', size=len(apids)) dataset.createVariable('time', 'u8', ('scan',)) - dataset.createVariable('size', 'u8', ('apid', 'scan',)) + dataset.createVariable('size', 'i4', ('apid', 'scan',), fill_value=-999) dataset.createVariable('apid', 'u2', ('apid',)) dataset['time'][:] = np.array(list(scans)) - apids = sorted(sizes.keys()) - # assert len(apids) == 16, "Expected 16 apids, got {}".format(apids) - dat = np.array([sizes[a] for a in apids]) + dataset['time'].description = 'Scan start time (IET)' + dataset['apid'][:] = np.array(list(apids)) + + # FIXME: Is there a more numpyish way to do this? + dat = np.ones((len(apids), len(scans))) * -999 + for sidx, scan in enumerate(scans): + for aidx, apid in enumerate(apids): + key = (apid, scan) + if key in sizes: + dat[aidx,sidx] = sizes[key] dataset['size'][:] = dat - dataset['apid'][:] = np.array(apids) + dataset.close() @@ -30,24 +64,23 @@ def read_data_from_rdr(filepath): # XXX: just get the VIIRS Science RDR for now rdr = jpssrdr.rdr_datasets(filepath)['science'][0] scans = set() - sizes = {} + apids = set() + dat = {} for tracker, packet in rdr.packets(): apid = packet.apid scan = tracker.obs_time size = tracker.size - if apid not in sizes: - sizes[apid] = [] - - # Sum up sizes for packet groups - if packet.sequence_grouping in (GROUP_CONTINUING, GROUP_LAST): - sizes[apid][-1] += size + key = (apid, scan) + if key in dat: + dat[key] += size else: - sizes[apid].append(size) + dat[key] = size scans.add(scan) + apids.add(apid) - return scans, sizes + return scans, apids, dat if __name__ == '__main__': @@ -56,5 +89,5 @@ if __name__ == '__main__': parser.add_argument('viirs_rdr') args = parser.parse_args() destpath = os.path.basename(args.viirs_rdr) + '.size.nc' - scans, sizes = read_data_from_rdr(args.viirs_rdr) - write_netcdf_file(destpath, scans, sizes) + scans, apids, dat = read_data_from_rdr(args.viirs_rdr) + write_netcdf_file(destpath, scans, apids, dat)