Skip to content
Snippets Groups Projects
Commit 05577839 authored by Bruce Flynn's avatar Bruce Flynn
Browse files

Use common struct

parent d02b7a2b
No related branches found
No related tags found
No related merge requests found
......@@ -15,20 +15,15 @@ from collections import namedtuple
import numpy as np
from h5py import File as H5File
LOG = logging.getLogger(__name__)
class BaseStruct(c.BigEndianStructure):
_pack_ = 1
_fields_ = []
from edosl0util.headers import BaseStruct
def __repr__(self):
attrs = ' '.join('%s=%s' % (f[0], getattr(self, f[0])) for f in self._fields_)
return '<{} {}>'.format(self.__class__.__name__, attrs)
LOG = logging.getLogger(__name__)
class StaticHeader(BaseStruct):
"""
Common RDR static header.
"""
_fields_ = [
('satellite', c.c_char * 4),
('sensor', c.c_char * 16),
......@@ -44,7 +39,9 @@ class StaticHeader(BaseStruct):
class Apid(BaseStruct):
"""
Entry in the ApidList storage area.
"""
_fields_ = [
('name', c.c_char * 16),
('value', c.c_uint32),
......@@ -55,7 +52,9 @@ class Apid(BaseStruct):
class PacketTracker(BaseStruct):
"""
Entry in the PacketTracker storage area.
"""
_fields_ = [
('obs_time', c.c_int64),
('sequence_number', c.c_int32),
......@@ -109,10 +108,9 @@ class CommonRdr(namedtuple('CommonRdr', ('buf', 'header', 'apids'))):
def _packets_for_apid(buf, header, apid):
# we cache packet classes by their size
_pkt_impl_cache = {}
"""
Generate tuples of (PacketTracker, Packet)
"""
t_off = header.pkt_tracker_offset + apid.pkt_tracker_start_idx * c.sizeof(PacketTracker)
for idx in range(apid.pkts_received):
tracker = PacketTracker.from_buffer(buf, t_off)
......@@ -130,7 +128,7 @@ def _packets_for_apid(buf, header, apid):
def _read_apid_list(header, buf):
"""
Return a generator that yields `Apid`s
Generate Apid-s
"""
offset = header.apid_list_offset
for idx in range(header.num_apids):
......@@ -140,8 +138,7 @@ def _read_apid_list(header, buf):
def read_common_rdrs(sensor, filepath):
"""
Return a generator that yields `CommonRdr` for each dataset provided by
`read_rdr_datasets`.
Generate CommonRdr-s for each dataset(granule) in `filelpath`
"""
for buf in read_rdr_datasets(sensor, filepath):
header = StaticHeader.from_buffer(buf)
......@@ -151,8 +148,8 @@ def read_common_rdrs(sensor, filepath):
def read_rdr_datasets(sensor, filepath):
"""
Return a generator that yields bytearrays for each RawApplicationPackets
dataset in numerical order.
Generate byte arrays of granule RawApplicationPackets in granule number
order.
"""
sensor = sensor.upper()
fobj = H5File(filepath)
......@@ -168,7 +165,7 @@ def read_rdr_datasets(sensor, filepath):
def sort_packets_by_obs_time(packets):
"""
Sort `Packet`s in-place by the PacketTracker obs_time (IET).
Sort `Packet`s in-place by the PacketTracker obs_time.
"""
return sorted(packets, key=lambda p: p.tracker.obs_time)
......@@ -218,13 +215,6 @@ if __name__ == '__main__':
parser.add_argument('-v', action='store_true')
parser.add_argument('-o', '--output')
parser.add_argument('-f', '--skipfill', action='store_true')
"""
def interval(val):
fmt = '%Y-%m-%d %H:%M:%S'
start, end = val.split(',')
return datetime.strptime(start, fmt), datetime.strptime(end, fmt)
parser.add_argument('-t', '--trunc', type=interval)
"""
parser.add_argument('sensor', choices=('viirs', 'atms', 'cris'))
parser.add_argument('rdr')
args = parser.parse_args()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment