diff --git a/edosl0util/cli/crinfo.py b/edosl0util/cli/crinfo.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a53ce592a69303983f843e3dff62198e0ddc7f8
--- /dev/null
+++ b/edosl0util/cli/crinfo.py
@@ -0,0 +1,20 @@
+import logging
+from pprint import pprint
+
+from edosl0util.cli import util
+from edosl0util import crio
+
+LOG = logging
+
+
+def main():
+    parser = util.default_parser()
+    parser.add_argument('filepath')
+    args = parser.parse_args()
+    util.configure_logging(args)
+
+    pprint(crio.read(args.filepath))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/edosl0util/headers.py b/edosl0util/headers.py
index 2eb2ff8e55cffc461bbf6705ab097dc5dfcef112..e145e58f4c8886a286f048043c8e5801998d33b8 100644
--- a/edosl0util/headers.py
+++ b/edosl0util/headers.py
@@ -63,6 +63,9 @@ class Timecode(BaseStruct):
     """
     Secondary header timecode baseclass.
     """
+    def __repr__(self):
+        return str(self.asdatetime())
+
     def astimestamp(self):
         raise NotImplementedError()
 
diff --git a/scripts/viirs_scan_bytes.py b/scripts/viirs_scan_bytes.py
index b6143799cabacbb2e41efabd47e09c55b2008056..9d3ea84211ea40df3cab41074056907a5b2bfdf6 100755
--- a/scripts/viirs_scan_bytes.py
+++ b/scripts/viirs_scan_bytes.py
@@ -1,60 +1,142 @@
-#!/usr/bin/env python
+#!/home/brucef/code/PeateScience/local/dist/edosl0/0.1/env/bin/python
 """
 Write a NetCDF file containing size in bytes of VIIRS scans.
 """
 import os
-import numpy as np
+from collections import defaultdict
+from datetime import datetime
+
 import netCDF4
+import numpy as np
 from edosl0util import jpssrdr
-from edosl0util.headers import GROUP_CONTINUING, GROUP_LAST
+from grain import grain
+
+
+band_map = {
+    800: 'M5',
+    801: 'M4',
+    802: 'M3',
+    803: 'M2',
+    804: 'M1',
+    805: 'M6',
+    806: 'M7',
+    807: 'M9',
+    808: 'M10',
+    809: 'M8',
+    810: 'M11',
+    811: 'M13',
+    812: 'M12',
+    813: 'I4',
+    814: 'M16',
+    815: 'M15',
+    816: 'M14',
+    817: 'I5',
+    818: 'I1',
+    819: 'I2',
+    820: 'I3',
+    821: 'DNB',
+    825: 'CAL',
+    826: 'ENGR'}
+apids = sorted(band_map.keys())
 
 
-def write_netcdf_file(destpath, scans, sizes):
+def utc2tai(dt):
+    return grain.Grain().utc2tai(dt, grain.VIIRS_EPOCH) * 1000**2
+
+
+def create_netcdf_file(destpath):
     dataset = netCDF4.Dataset(destpath, 'w')
-    dataset.createDimension('scan', size=len(scans))
-    dataset.createDimension('apid', size=len(sizes))
-    dataset.createVariable('time', 'u8', ('scan',))
-    dataset.createVariable('size', 'u8', ('apid', 'scan',))
+    dataset.bands = """Band APID  Day
+==============
+M1   804   X
+M2   803   X
+M3   802   X
+M4   801   X
+M5   800   X
+M6   805   X
+M7   806
+M8   809
+M9   807   X
+M10  808
+M11  810   X
+M12  812
+M13  811
+M14  816
+M15  815
+M16  814
+
+I1   818   X
+I2   819   X
+I3   820   X
+I4   813
+I5   817
+
+DNB  821
+CAL  825
+ENGR 826"""
+    dataset.createDimension('scan_time', None)
+    dataset.createDimension('apid', size=len(apids))
+    dataset.createVariable('time', 'u8', ('scan_time',))
+    dataset.createVariable(
+        'size', 'i4', ('apid', 'scan_time',),
+        fill_value=-999, chunksizes=[len(apids), 1024])
     dataset.createVariable('apid', 'u2', ('apid',))
 
-    dataset['time'][:] = np.array(list(scans))
-    apids = sorted(sizes.keys())
-    # assert len(apids) == 16, "Expected 16 apids, got {}".format(apids)
-    dat = np.array([sizes[a] for a in apids])
-    dataset['size'][:] = dat
-    dataset['apid'][:] = np.array(apids)
-    dataset.close()
+    dataset['apid'][:] = np.array(list(apids))
 
+    return dataset
 
-def read_data_from_rdr(filepath):
+
+def read_data_from_rdr(filepath, start, end):
     # XXX: just get the VIIRS Science RDR for now
     rdr = jpssrdr.rdr_datasets(filepath)['science'][0]
-    scans = set()
-    sizes = {}
+    sizes = defaultdict(lambda: 0)
+    times = set()
     for tracker, packet in rdr.packets():
         apid = packet.apid
-        scan = tracker.obs_time
+        time = tracker.obs_time
         size = tracker.size
+        if tracker.offset == -1:
+            continue
+        # skip data outside requested window
+        if time < start or time >= end or apid not in apids:
+            continue  # skip data outside window
 
-        if apid not in sizes:
-            sizes[apid] = []
-
-        # Sum up sizes for packet groups
-        if packet.sequence_grouping in (GROUP_CONTINUING, GROUP_LAST):
-            sizes[apid][-1] += size
-        else:
-            sizes[apid].append(size)
-
-        scans.add(scan)
+        sizes[apid, time] += size
+        times.add(time)
 
-    return scans, sizes
+    return sorted(times), sizes
 
 
 if __name__ == '__main__':
-    import argparse
+    import argparse, sys
     parser = argparse.ArgumentParser(description=__doc__)
-    parser.add_argument('viirs_rdr')
+    datetype = lambda v: datetime.strptime(v, '%Y-%m-%d %H:%M:%S')
+    parser.add_argument('start', type=datetype)
+    parser.add_argument('end', type=datetype)
+    parser.add_argument('rdrs', type=argparse.FileType('r'), default=sys.stdin)
     args = parser.parse_args()
-    destpath = os.path.basename(args.viirs_rdr) + '.size.nc'
-    scans, sizes = read_data_from_rdr(args.viirs_rdr)
-    write_netcdf_file(destpath, scans, sizes)
+
+    start, end = utc2tai(args.start), utc2tai(args.end)
+    destpath = args.start.strftime('viirs_scanbytes_d%Y%m%d_t%H%M%S.nc')
+    dataset = create_netcdf_file(destpath)
+
+    rdrfiles = sorted([l.strip() for l in args.rdrs],
+                      key=os.path.basename)
+    for filepath in rdrfiles:
+        print "reading", filepath
+        times, sizes = read_data_from_rdr(filepath, start, end)
+
+        dat = np.ones((len(apids), len(times))) * -999
+        for tidx, time in enumerate(times):
+            for aidx, apid in enumerate(apids):
+                dat[aidx][tidx] = sizes[apid, time]
+
+        var = dataset['time']
+        num_times = var.shape[0]
+        var[num_times:] = np.array(list(times))
+
+        var = dataset['size']
+        var[:,num_times:] = dat
+
+    dataset.close()
diff --git a/setup.py b/setup.py
index 529ee9d48441853f6a8aa356a4999cd41247b347..db6743cbf57c68af71c91bbd3bd934ca8c560788 100644
--- a/setup.py
+++ b/setup.py
@@ -5,10 +5,11 @@ setup(
     author='Bruce Flynn',
     author_email='brucef@ssec.wisc.edu',
     description='Utilities for working with EDOS L0 PDS files',
-    version='0.7',
+    version='0.8',
     zip_safe=False,
     packages=find_packages(),
     pyver=True,
+    include_package_data=True,
     dependency_links=['https://sips.ssec.wisc.edu/eggs/packages'],
     setup_requires=[
         'PyVer',
@@ -21,6 +22,7 @@ setup(
     edosl0split = edosl0util.cli.split:main
     edosl0trunc = edosl0util.cli.trunc:main
     edosl0info = edosl0util.cli.info:main
+    edosl0crinfo = edosl0util.cli.crinfo:main
     edosl0merge = edosl0util.cli.merge:main
     edosl0crgen = edosl0util.cli.crgen:main
     rdr2l0 = edosl0util.cli.rdr2l0:main