Skip to content
Snippets Groups Projects
Commit e1cc9c69 authored by Bruce Flynn's avatar Bruce Flynn
Browse files

Take RDRs on stdin, fix offset issue.

parent 2a544403
No related branches found
No related tags found
No related merge requests found
...@@ -40,12 +40,11 @@ band_map = { ...@@ -40,12 +40,11 @@ band_map = {
apids = sorted(band_map.keys()) apids = sorted(band_map.keys())
def utc2tai(val): def utc2tai(dt):
dt = datetime.strptime(val, '%Y-%m-%d %H:%M:%S') return grain.Grain().utc2tai(dt, grain.VIIRS_EPOCH) * 1000**2
return grain.Grain().utc2tai(dt, grain.VIIRS_EPOCH) * 10**2
def write_netcdf_file(destpath, scans, sizes): def create_netcdf_file(destpath):
dataset = netCDF4.Dataset(destpath, 'w') dataset = netCDF4.Dataset(destpath, 'w')
dataset.bands = """Band APID Day dataset.bands = """Band APID Day
============== ==============
...@@ -78,55 +77,67 @@ ENGR 826""" ...@@ -78,55 +77,67 @@ ENGR 826"""
dataset.createDimension('scan_time', None) dataset.createDimension('scan_time', None)
dataset.createDimension('apid', size=len(apids)) dataset.createDimension('apid', size=len(apids))
dataset.createVariable('time', 'u8', ('scan_time',)) dataset.createVariable('time', 'u8', ('scan_time',))
dataset.createVariable('size', 'i4', ('apid', 'scan_time',), fill_value=-999) dataset.createVariable(
'size', 'i4', ('apid', 'scan_time',),
fill_value=-999, chunksizes=[len(apids), 1024])
dataset.createVariable('apid', 'u2', ('apid',)) dataset.createVariable('apid', 'u2', ('apid',))
dataset['time'][:] = np.array(list(scans))
dataset['apid'][:] = np.array(list(apids)) dataset['apid'][:] = np.array(list(apids))
dataset['size'][:] = sizes
dataset.close() return dataset
def read_data_from_rdr(filepath, start, end): def read_data_from_rdr(filepath, start, end):
# XXX: just get the VIIRS Science RDR for now # XXX: just get the VIIRS Science RDR for now
rdr = jpssrdr.rdr_datasets(filepath)['science'][0] rdr = jpssrdr.rdr_datasets(filepath)['science'][0]
dat = defaultdict(lambda: 0) sizes = defaultdict(lambda: 0)
times = set() times = set()
for tracker, packet in rdr.packets(): for tracker, packet in rdr.packets():
apid = packet.apid apid = packet.apid
time = tracker.obs_time time = tracker.obs_time
size = tracker.size size = tracker.size
if tracker.offset == -1:
continue
# skip data outside requested window # skip data outside requested window
if end < time < start: if time < start or time >= end or apid not in apids:
continue # skip data outside window continue # skip data outside window
key = (apid, time) sizes[apid, time] += size
dat[key] += size
times.add(time) times.add(time)
return times, apids, dat return sorted(times), sizes
if __name__ == '__main__': if __name__ == '__main__':
import argparse import argparse, sys
parser = argparse.ArgumentParser(description=__doc__) parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('start', type=utc2tai) datetype = lambda v: datetime.strptime(v, '%Y-%m-%d %H:%M:%S')
parser.add_argument('end', type=utc2tai) parser.add_argument('start', type=datetype)
parser.add_argument('rdrs', nargs='+') parser.add_argument('end', type=datetype)
parser.add_argument('rdrs', type=argparse.FileType('r'), default=sys.stdin)
args = parser.parse_args() args = parser.parse_args()
scan_times = set() start, end = utc2tai(args.start), utc2tai(args.end)
sizes = np.ones((len(apids), 86400 / 1.7864)) * float('nan') destpath = args.start.strftime('viirs_scanbytes_d%Y%m%d_t%H%M%S.nc')
for filepath in args.rdrs: dataset = create_netcdf_file(destpath)
for filepath in [l.strip() for l in args.rdrs]:
print "reading", filepath print "reading", filepath
times, _, dat = read_data_from_rdr(filepath, args.start, args.end) times, sizes = read_data_from_rdr(filepath, start, end)
dat = np.ones((len(apids), len(times))) * -999
for tidx, time in enumerate(times): for tidx, time in enumerate(times):
for aidx, apid in enumerate(apids): for aidx, apid in enumerate(apids):
sizes[aidx][tidx] = dat[apid, time] dat[aidx][tidx] = sizes[apid, time]
destpath = os.path.basename(args.rdrs[0]) + '.scanbytes.nc' var = dataset['time']
print "writing", destpath num_times = var.shape[0]
write_netcdf_file(destpath, scan_times, sizes) print 'time before', var.shape,
var[num_times:] = np.array(list(times))
print 'after', var.shape
var = dataset['size']
print 'size before', var.shape,
var[:,num_times:] = dat
print 'after', var.shape
dataset.close()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment