diff --git a/README.md b/README.md
index 02580e3d914a86e66d202bc6fe824d78d97a18d4..b8d0d78b4e1a1a9db6a5d671875132ad33428652 100644
--- a/README.md
+++ b/README.md
@@ -63,6 +63,28 @@ pip install -r requirements.txt
 
 # LightningCast Options/Configuration
 
+## Required Channels / Bands
+
+Basic Channell Prediction Reqs:
+
+```
+ABI: 02, 05, 13, 15
+AHI: 03, 05, 13, 15
+```
+
+Extra Channels for images:
+
+ABI:
+```
+	DayLandCloud: 03
+	IR-only Cloud Phase: 11, 14
+```
+AHI:
+```
+	DayLandCloud: 04
+	IR-only Cloud Phase: 11, 14
+```
+
 ## Required argument(s)
 Lightningcast requires only one argument: `input_file`. This should be a path to a single level 1 file in the same directory as all other required files. Otherwise, it should be a path to a text file where each new line is a path to a single level 1 file.
 
diff --git a/lightningcast/check_products.py b/lightningcast/check_products.py
index e20d01af2540f19215962630a9e883054cc5b7e8..470f3c7743bf251fe6cab80f9f18c8d6e1ab6134 100644
--- a/lightningcast/check_products.py
+++ b/lightningcast/check_products.py
@@ -74,15 +74,24 @@ while True:
                 if 'json' in product:
                     json_prods = glob.glob(f'{product}/*')
                     for json_prod in json_prods:
-                        latest_file = sorted(glob.glob(f'{json_prod}/*'))[-1]
+                        try:
+                            latest_file = sorted(glob.glob(f'{json_prod}/*'))[-1]
+                        except IndexError:
+                            body += f"No files found for {json_prod}!\n"
+                        else:
+                            mtime = os.path.getmtime(latest_file)
+                            if mtime < old_time:
+                                body += f"{latest_file} is more than {time_thresh} minutes old.\n"
                 else:
-                    latest_file = sorted(glob.glob(f'{product}/*'))[-1]
-
-                #mtime_dt = datetime.fromtimestamp(os.path.getmtime(latest_file)) 
-                mtime = os.path.getmtime(latest_file)
-      
-                if mtime < old_time:
-                    body += f"{latest_file} is more than {time_thresh} minutes old.\n"
+                    try:
+                        latest_file = sorted(glob.glob(f'{product}/*'))[-1]
+                    except IndexError:
+                        body += f"No files found for {product}!\n"
+                    else:
+                        #mtime_dt = datetime.fromtimestamp(os.path.getmtime(latest_file)) 
+                        mtime = os.path.getmtime(latest_file)
+                        if mtime < old_time:
+                            body += f"{latest_file} is more than {time_thresh} minutes old.\n"
 
         body += '\n' 
 
diff --git a/lightningcast/cleanup.py b/lightningcast/cleanup.py
index 3e7ad4c4beea7b994c83705ba3407a2fe7a93436..cffa7b232e62cddd233daf8a2987dfdaeca686e6 100755
--- a/lightningcast/cleanup.py
+++ b/lightningcast/cleanup.py
@@ -1,3 +1,7 @@
+"""
+This program runs infinitely and removes old data.
+"""
+
 import os, sys
 import numpy as np
 import time
diff --git a/lightningcast/glm/aggregate_glm_grids.py b/lightningcast/glm/aggregate_glm_grids.py
index 30759cd1d55cfa12ca3f81cbceb61c3481be2ca0..7a233ca89a8d4a2c4ec49dcf3e46db21fbb8a101 100755
--- a/lightningcast/glm/aggregate_glm_grids.py
+++ b/lightningcast/glm/aggregate_glm_grids.py
@@ -39,26 +39,23 @@ def main():
     parser = argparse.ArgumentParser(description=parse_desc)
     parser.add_argument(
         "lastdatetime",
-        help="Datetime of latest file to include in aggregation. E.g., 20181621515 -- yyyyjjjhhmm",
+        help="Datetime of latest file to include in aggregation. E.g., 201806211515 -- yyyymmddhhmm",
         type=str,
-        nargs=1,
     )
     parser.add_argument(
-        "datadir",
-        help="directory of netcdf files. E.g., /data/GLM . Subdirs assumed (e.g.): /data/GLM/2018/Jun/25/",
+        "datapatt",
+        help="Input data pattern E.g., /data/GLM/%%Y/%%b/%%d/OR_GLM-L2-*s%%Y%%j%%H%%M*.nc*",
         type=str,
-        nargs=1,
     )
     parser.add_argument(
         "-a",
         "--accumPeriod",
         help="Number of minutes to accumulate. Default = 5.",
-        default=[5],
+        default=5,
         type=int,
-        nargs=1,
     )
     parser.add_argument(
-        "-o", "--outdir", help="Output directory. Default = ${PWD}", type=str, nargs=1
+        "-o", "--outdir", help="Output directory. Default = ${PWD}", type=str
     )
     parser.add_argument(
         "-gt",
@@ -73,29 +70,25 @@ def main():
         action="store_true",
     )
     parser.add_argument(
-        "-l", "--logfile", help="print to this logfile", default=[None], nargs=1
+        "-l", "--logfile", help="print to this logfile", default=None,
     )
     parser.add_argument(
         "-ru",
         "--re_upload",
         help="Full path to re_upload executable. Will execute after GTiff is written.",
-        default=[None],
+        default=None,
         type=str,
         nargs=1,
     )
 
     args = parser.parse_args()
 
-    if not args.datadir:
-        datadir = "None"
-    else:
-        datadir = args.datadir[0]
     if not args.outdir:
         outdir = os.environ["PWD"] + "/"
     else:
-        outdir = args.outdir[0]
+        outdir = args.outdir
 
-    logDict = logging_def(logfile=args.logfile[0])
+    logDict = logging_def(logfile=args.logfile)
     dictConfig(logDict)
     # send stderr to logger
     stderr_logger = logging.getLogger("STDERR")
@@ -103,12 +96,12 @@ def main():
     sys.stderr = sl
 
     status = aggregate_glm_grids(
-        args.lastdatetime[0],
-        args.datadir[0],
-        accumPeriod=args.accumPeriod[0],
+        args.lastdatetime,
+        args.datapatt,
+        accumPeriod=args.accumPeriod,
         outdir=outdir,
         geotiff=args.geotiff,
-        re_upload=args.re_upload[0],
+        re_upload=args.re_upload,
         gzip=args.gzip_prods,
     )
 
@@ -117,7 +110,7 @@ def main():
 #################################################################################################################################################
 def aggregate_glm_grids(
     lastdatetime,
-    datadir,
+    datapatt,
     accumPeriod=5,
     outdir=os.environ["PWD"] + "/",
     geotiff=False,
@@ -125,33 +118,39 @@ def aggregate_glm_grids(
     gzip=False,
 ):
 
-    if datadir[-1] != "/":
-        datadir += "/"
 
     logging.info("Process started")
 
     try:
-        last_dt = datetime.strptime(lastdatetime, "%Y%j%H%M")
+        last_dt = datetime.strptime(lastdatetime, "%Y%m%d%H%M")
     except ValueError as err:
         logging.error(traceback.format_exc())
         return False
 
     # aggregate data for the accumPeriod
     accumMins = 0
+
+    # dict for aggregated vars
+    vars_agg = {}
+
     for tt in range(accumPeriod):
         curr_dt = last_dt - timedelta(minutes=tt)
         if tt == 0:
             ts_dt = curr_dt
 
-        filepattern = datadir + curr_dt.strftime("%Y/%b/%d/OR_GLM-L2-*s%Y%j%H%M*.nc*")
+        filepattern = curr_dt.strftime(datapatt)
 
         ncfile = np.sort(glob(filepattern))
         if len(ncfile):
+
+            # dict for THIS time only vars
+            vars_this = {}
+
             gzip = False
             logging.info(f"received {ncfile[0]}")
-            if (
-                tt == 0
-            ):  # get end datetime of first file in accumulation, since we're working back in time.
+
+            # get end datetime of first file in accumulation, since we're working back in time.
+            if tt == 0:
                 ts_dt = datetime.strptime(
                     os.path.basename(ncfile[0]).split("_e")[1].split("_")[0][0:13],
                     "%Y%j%H%M%S",
@@ -167,89 +166,49 @@ def aggregate_glm_grids(
                 gzip = True
                 nc = Dataset(ncfile, "r")
 
-            flash_extent_density_TMP = nc.variables["flash_extent_density"][:]
-
-            average_area_perflash = nc.variables["average_flash_area"][:]
-            average_area_TMP = np.multiply(
-                average_area_perflash, flash_extent_density_TMP
-            )
-
-            group_extent_density_TMP = nc.variables["group_extent_density"][:]
-
-            average_area_pergroup = nc.variables["average_group_area"][:]
-            average_garea_TMP = np.multiply(
-                average_area_pergroup, group_extent_density_TMP
-            )
-
-            flash_centroid_density_TMP = nc.variables["flash_centroid_density"][:]
-
-            total_energy_TMP = (
-                nc.variables["total_energy"][:] * 1e6
-            )  # in nJ...put into fJ
-
-            group_centroid_density_TMP = nc.variables["group_centroid_density"][:]
+            # Get the variables / attributes that don't change for each time of the accumulation
+            if tt == 0:
+                # Assuming variable names are constant for all files in the aggregation
+                native_variable_names = [item for item in nc.variables.keys()
+                                         if item not in ('nominal_satellite_subpoint_lat',
+                                                         'nominal_satellite_subpoint_lon',
+                                                         'DQF')
+                ]
+                variable_names = [item.lower() for item in native_variable_names]
 
-            # first see if the vars exist (i.e., it's the first iteration if they do not)
-            try:
-                flash_extent_density
-            except NameError:
-                # if they do not exist, copy fields
-                flash_extent_density = np.copy(flash_extent_density_TMP)
-                total_energy = np.copy(total_energy_TMP)
-                average_area_AGG = np.copy(average_area_TMP)
-                average_garea_AGG = np.copy(average_garea_TMP)
-                flash_centroid_density = np.copy(flash_centroid_density_TMP)
-                group_extent_density = np.copy(group_extent_density_TMP)
-                group_centroid_density = np.copy(group_centroid_density_TMP)
-                ydim = nc.dimensions["y"].size
-                xdim = nc.dimensions["x"].size
-                min_flash_area_grids = np.zeros((accumPeriod, ydim, xdim))
-                min_flash_area_grids[tt] = nc.variables["minimum_flash_area"][:]
-                # and grab x and y
-                x = np.array(nc["x"][:])
-                y = np.array(nc["y"][:])
+                x = nc.variables['x'][:]
+                y = nc.variables['y'][:]
+                nx = len(x)
+                ny = len(y)
                 gip = nc["goes_imager_projection"][:]
-            else:
-                # if they do exist, just aggregate fields
-                try:
-                    flash_extent_density += flash_extent_density_TMP
-                    total_energy += total_energy_TMP
-                    average_area_AGG += average_area_TMP
-                    average_garea_AGG += average_garea_TMP
-                    flash_centroid_density += flash_centroid_density_TMP
-                    group_extent_density += group_extent_density_TMP
-                    group_centroid_density += group_centroid_density_TMP
-                    min_flash_area_grids[tt] = nc.variables["minimum_flash_area"][:]
-                except ValueError as err:
-                    logging.error(traceback.format_exc())
-                    nc.close()
-                    return False
-
-            # get attributes
-            try:
-                vatts
-                gatts
-            except NameError:
-                vatts = collections.OrderedDict()
-                vatts["x"] = {}
-                vatts["y"] = {}
-                vatts["goes_imager_projection"] = {}
-                vatts["flash_extent_density"] = {}
-                vatts["flash_centroid_density"] = {}
-                vatts["total_energy"] = {}
-                vatts["average_flash_area"] = {}
-                vatts["group_extent_density"] = {}
-                vatts["group_centroid_density"] = {}
-                vatts["average_group_area"] = {}
-                vatts["minimum_flash_area"] = {}
-                for var in vatts:
+            
+                vatts = collections.OrderedDict() 
+                for vn in variable_names:
+                    vatts[vn] = {}
+                for var in native_variable_names:
                     for att in nc.variables[var].ncattrs():
-                        vatts[var][att] = nc.variables[var].getncattr(att)
-                vatts["total_energy"]["units"] = "fJ"
-                vatts["total_energy"]["scale_factor"] = 1
-                vatts["total_energy"]["add_offset"] = 0
-                vatts["average_flash_area"]["units"] = "km^2"
-                vatts["average_group_area"]["units"] = "km^2"
+                        vatts[var.lower()][att] = nc.variables[var].getncattr(att)
+                vname = "total_energy"
+                if vname in vatts:
+                    vatts[vname]["units"] = "fJ"
+                    vatts[vname]["scale_factor"] = 1
+                    vatts[vname]["add_offset"] = 0
+                vname = "total_optical_energy"
+                if vname in vatts:
+                    vatts[vname]["units"] = "fJ"
+                    vatts[vname]["scale_factor"] = 1
+                    vatts[vname]["add_offset"] = 0
+                if "average_flash_area" in vatts:
+                    vatts["average_flash_area"]["units"] = "km^2"
+                if "average_group_area" in vatts:
+                    vatts["average_group_area"]["units"] = "km^2"
+
+                # Now that vatts are all set, remove 'x', 'y', and 'goes_imager_projection'
+                # We only care about the 2D grids now.
+                native_variable_names = [item for item in native_variable_names
+                                         if item not in ('x', 'y', 'goes_imager_projection')]
+                variable_names = [item for item in variable_names
+                                  if item not in ('x', 'y', 'goes_imager_projection')] 
 
                 # global atts
                 gatts = collections.OrderedDict()
@@ -259,24 +218,70 @@ def aggregate_glm_grids(
                 ]
                 gatts["orbital_slot"] = getattr(nc, "orbital_slot")
                 gatts["platform_ID"] = getattr(nc, "platform_ID")
-                gatts["scene_id"] = gatts["orbital_slot"][5] + getattr(
-                    nc, "scene_id"
-                )  # gatts['orbital_slot'][5] is "E" or "W"
+                # gatts['orbital_slot'][5] is "E" or "W"
+                gatts["scene_id"] = gatts["orbital_slot"][5] + getattr(nc, "scene_id")
                 gatts["time_coverage_end"] = getattr(nc, "time_coverage_end")
-
-            # do every time
+            # do every time, because we're working backwards
             gatts["time_coverage_start"] = getattr(nc, "time_coverage_start")
 
+            # Get the data for the variables that change for each minute of the aggregation
+            for vname in native_variable_names:
+                vars_this[vname.lower()] = nc.variables[vname][:]
+            
+            # Perform average_flash_area computation
+            if 'average_flash_area' in variable_names:
+                if 'flash_extent_density' in variable_names:
+                    # This techincally has units = km^2 * flashes. It gets divided by aggregated FED at the end.
+                    vars_this['average_flash_area'] = np.multiply(
+                        vars_this['average_flash_area'], vars_this['flash_extent_density']
+                     )
+                else:
+                    logging.warning("Can't properly compute average_flash_area b/c there is no 'flash_extent_density")
+                    del vars_this['average_flash_area']
+            # Perform average_group_area computation
+            if 'average_group_area' in variable_names:
+                if 'group_extent_density' in variable_names:
+                    # This techincally has units = km^2 * groups. It gets divided by aggregated GED at the end
+                    vars_this['average_group_area'] = np.multiply(
+                        vars_this['average_group_area'], vars_this['group_extent_density']
+                     )
+                else:
+                    logging.warning("Can't properly compute average_group_area b/c there is no 'group_extent_density")
+                    del vars_this['average_group_area']
+
+            # Convert energy to femtoJoules
+            if 'total_energy' in variable_names:
+                vars_this['total_energy'] *= 1e6 # from nJ to fJ
+            if 'total_optical_energy' in variable_names:
+                vars_this['total_optical_energy'] *= 1e6 # from nJ to fJ
+
+            for var in vars_this:
+                if var in vars_agg:
+                    if var == 'minimum_flash_area':
+                        flash_areas[tt] = vars_this[var]
+                    else:
+                        vars_agg[var] += vars_this[var]
+                else:
+                    vars_agg[var] = np.copy(vars_this[var])
+                    if var == 'minimum_flash_area':
+                        # Container for flash areas
+                        flash_areas = np.zeros((accumPeriod, ny, nx), dtype=np.float32)
+                        # Insert the 0th grid
+                        flash_areas[tt] = vars_this[var]
+
+
             # close ncfile
             nc.close()
             if gzip:
-                sss = subprocess.Popen(
-                    ["gzip", "-f", ncfile]
-                )  # ==will run in background
+                # will run in background
+                sss = subprocess.Popen(["gzip", "-f", ncfile])
+
 
     if accumMins == 0:
+        logging.error("Couldn't aggregate anything")
         return False
-    gatts["aggregation"] = str(accumMins) + " min"
+
+    gatts["aggregation"] = f"{accumMins} min"
     gatts["dataset_name"] = "GLM_gridded_data"
 
     for var in [
@@ -285,75 +290,60 @@ def aggregate_glm_grids(
         "group_extent_density",
         "group_centroid_density",
     ]:
-        attparts = vatts[var]["units"].split(" ")
-        attparts[-2] = str(accumMins)
-        vatts[var]["units"] = (" ").join(attparts)
+        if var in vatts:
+            if "units" in vatts[var]:
+                attparts = vatts[var]["units"].split(" ")
+                attparts[-2] = str(accumMins)
+                vatts[var]["units"] = (" ").join(attparts)
+            if "valid_min" in vatts[var]:
+                del vatts[var]["valid_min"]
+            if "valid_max" in vatts[var]:
+                del vatts[var]["valid_max"]
 
     # Make final min area
-    fillv = 999999
-    min_flash_area_grids[min_flash_area_grids == 0] = fillv
-    minimum_flash_area = np.min(min_flash_area_grids, axis=0)
-    minimum_flash_area[minimum_flash_area == fillv] = 0
+    if 'minimum_flash_area' in variable_names:
+        fillv = 999999
+        bad_ind = np.where(flash_areas == 0)
+        flash_areas[bad_ind] = fillv
+        vars_agg['minimum_flash_area'] = np.min(flash_areas, axis=0)
+        # Reset empty pixels to 0.
+        vars_agg['minimum_flash_area'][vars_agg['minimum_flash_area'] == fillv] = 0
+        
 
     # Make average areas.
     # These steps are necessary so we don't divide by 0 or some small fraction
-    average_flash_area = np.zeros((np.shape(average_area_AGG)))
-    good_ind = np.where(flash_extent_density >= 1)
-    average_flash_area[good_ind] = (
-        average_area_AGG[good_ind] / flash_extent_density[good_ind]
-    )
-    average_flash_area = average_flash_area.astype("f4")
-
-    average_group_area = np.zeros((np.shape(average_garea_AGG)))
-    good_ind = np.where(group_extent_density >= 1)
-    average_group_area[good_ind] = (
-        average_garea_AGG[good_ind] / group_extent_density[good_ind]
-    )
-    average_group_area = average_group_area.astype("f4")
-
-    ny, nx = np.shape(total_energy)
-
-    # write out data into one file
-    # first pack the data using scale_factor and add_offset
+    vname = 'average_flash_area'
+    if vname in vars_agg and 'flash_extent_density' in vars_agg:
+        tmp_grid = np.zeros(np.shape(vars_agg[vname]))
+        good_ind = np.where(vars_agg['flash_extent_density'] >= 1)
+        tmp_grid = vars_agg[vname][good_ind] / vars_agg['flash_extent_density'][good_ind]
+        vars_agg[vname][good_ind] = tmp_grid
+    vname = 'average_group_area'
+    if vname in vars_agg and 'group_extent_density' in vars_agg:
+        tmp_grid = np.zeros(np.shape(vars_agg[vname]))
+        good_ind = np.where(vars_agg['group_extent_density'] >= 1)
+        tmp_grid = vars_agg[vname][good_ind] / vars_agg['group_extent_density'][good_ind]
+        vars_agg[vname][good_ind] = tmp_grid
+
+    # This is for geotiff, before scaling for netCDF
+    if geotiff:
+        flash_extent_density = np.copy(vars_agg['flash_extent_density'])
+
+    # Write out data into one file
+    # First pack the data using scale_factor and add_offset
+    for var in vars_agg:
+        if "add_offset" in vatts[var] and "scale_factor" in vatts[var]:
+            vars_agg[var] = ((vars_agg[var] - vatts[var]["add_offset"]) / vatts[var]["scale_factor"]).astype(
+                np.int16
+            )
     xpacked = ((x - vatts["x"]["add_offset"]) / vatts["x"]["scale_factor"]).astype(
         np.int16
     )
     ypacked = ((y - vatts["y"]["add_offset"]) / vatts["y"]["scale_factor"]).astype(
         np.int16
     )
-    FEDpacked = (
-        (flash_extent_density - vatts["flash_extent_density"]["add_offset"])
-        / vatts["flash_extent_density"]["scale_factor"]
-    ).astype(np.int16)
-    FCDpacked = (
-        (flash_centroid_density - vatts["flash_centroid_density"]["add_offset"])
-        / vatts["flash_centroid_density"]["scale_factor"]
-    ).astype(np.int16)
-    TOEpacked = (
-        (total_energy - vatts["total_energy"]["add_offset"])
-        / vatts["total_energy"]["scale_factor"]
-    ).astype(np.int16)
-    AFApacked = (
-        (average_flash_area - vatts["average_flash_area"]["add_offset"])
-        / vatts["average_flash_area"]["scale_factor"]
-    ).astype(np.int16)
-    GEDpacked = (
-        (group_extent_density - vatts["group_extent_density"]["add_offset"])
-        / vatts["group_extent_density"]["scale_factor"]
-    ).astype(np.int16)
-    GCDpacked = (
-        (group_centroid_density - vatts["group_centroid_density"]["add_offset"])
-        / vatts["group_centroid_density"]["scale_factor"]
-    ).astype(np.int16)
-    AGApacked = (
-        (average_group_area - vatts["average_group_area"]["add_offset"])
-        / vatts["average_group_area"]["scale_factor"]
-    ).astype(np.int16)
-    MFApacked = (
-        (minimum_flash_area - vatts["minimum_flash_area"]["add_offset"])
-        / vatts["minimum_flash_area"]["scale_factor"]
-    ).astype(np.int16)
-
+    
+    # Prepare the dims and datasets dictionary for writing to netcdf
     dims = collections.OrderedDict()
     dims["y"] = ny
     dims["x"] = nx
@@ -365,47 +355,13 @@ def aggregate_glm_grids(
         "dims": (),
         "atts": vatts["goes_imager_projection"],
     }
-    datasets["flash_extent_density"] = {
-        "data": FEDpacked,
-        "dims": ("y", "x"),
-        "atts": vatts["flash_extent_density"],
-    }
-    datasets["flash_centroid_density"] = {
-        "data": FCDpacked,
-        "dims": ("y", "x"),
-        "atts": vatts["flash_centroid_density"],
-    }
-    datasets["total_energy"] = {
-        "data": TOEpacked,
-        "dims": ("y", "x"),
-        "atts": vatts["total_energy"],
-    }
-    datasets["average_flash_area"] = {
-        "data": AFApacked,
-        "dims": ("y", "x"),
-        "atts": vatts["average_flash_area"],
-    }
-    datasets["group_extent_density"] = {
-        "data": GEDpacked,
-        "dims": ("y", "x"),
-        "atts": vatts["group_extent_density"],
-    }
-    datasets["group_centroid_density"] = {
-        "data": GCDpacked,
-        "dims": ("y", "x"),
-        "atts": vatts["group_centroid_density"],
-    }
-    datasets["average_group_area"] = {
-        "data": AGApacked,
-        "dims": ("y", "x"),
-        "atts": vatts["average_group_area"],
-    }
-    datasets["minimum_flash_area"] = {
-        "data": MFApacked,
-        "dims": ("y", "x"),
-        "atts": vatts["minimum_flash_area"],
-    }
-
+    for var in vars_agg:
+        datasets[var] = {
+            "data": vars_agg[var],
+            "dims": ("y", "x"),
+            "atts": vatts[var],
+        }
+    
     grid_outdir = outdir + "/agg/"
 
     timestamp = ts_dt.strftime("%Y%m%d-%H%M%S")
@@ -422,7 +378,7 @@ def aggregate_glm_grids(
 
     if geotiff:
         gtiff_outdir = f"{outdir}/gtiff/"
-        utils.mkdir_p(gtiff_outdir)
+        os.makedirs(gtiff_outdir, exist_ok=True)
         logging.info("Making geotiff...")
         cmap = plt.get_cmap("jet")
         colors = cmap(np.arange(0, cmap.N))  # colors.shape = (256,4) rgbas
@@ -505,7 +461,7 @@ def aggregate_glm_grids(
         return True
 
 if __name__ == "__main__":
-    try:
+    #try:
         main()
-    except Exception as e:
-        print(str(e))
\ No newline at end of file
+    #except Exception as e:
+    #    print(str(e))
diff --git a/lightningcast/glm/download_glmf.py b/lightningcast/glm/download_glmf.py
new file mode 100755
index 0000000000000000000000000000000000000000..5e030dfa2538202629fbb80f9f5361ccdf78b56f
--- /dev/null
+++ b/lightningcast/glm/download_glmf.py
@@ -0,0 +1,33 @@
+import glob
+import sys
+import os
+import subprocess
+from datetime import datetime
+
+files = ['20230905-09.txt','20231005-09.txt','20231105-09.txt',
+         '20231205-09.txt','20240105-09.txt','20240205-09.txt']
+
+basedir = '/ships22/grain/jcintineo/GLM/ainpp/south_america/FD_1min/'
+
+for file in files:
+
+    of = open(file,'r')
+    glmfiles = of.readlines()
+    glmfiles = [fff.strip() for fff in glmfiles]
+    of.close()
+
+    for glmf in glmfiles:
+        basefile = os.path.basename(glmf)
+        dt = datetime.strptime(basefile.split('_s')[1][0:7],'%Y%j')
+        outdir = f'{basedir}/{dt.strftime("%Y/%m/%d")}'
+        os.makedirs(outdir, exist_ok=True)
+        subprocess.run(['wget',
+                        glmf,
+                        '--user',
+                        'jcintineo',
+                        '--password',
+                        'Contlight33!',
+                        '-O',
+                        f'{outdir}/{basefile}']
+        )
+        #sys.exit()
diff --git a/lightningcast/glm/drive_make_GLM_grids.py b/lightningcast/glm/drive_make_GLM_grids.py
index 20aee0ace7d3f8a596c390f5384057a6bb76974a..4391f91424ea542241d98bbc477e617142d9186a 100755
--- a/lightningcast/glm/drive_make_GLM_grids.py
+++ b/lightningcast/glm/drive_make_GLM_grids.py
@@ -252,10 +252,10 @@ def drive_make_GLM_grids(
                     else:
                         gzip = False  # True
                         outroot = f"{outdir}/{oneMinAgo.strftime('%Y%m%d')}/GLM/{slot}/"
-
+                    
                     status = aggregate_glm_grids(
-                        oneMinAgo.strftime("%Y%j%H%M"),
-                        raw_outdir,
+                        oneMinAgo.strftime("%Y%m%d%H%M"),
+                        f"{raw_outdir}/%Y/%b/%d/OR*_s%Y%j%H%M*.nc",
                         outdir=outroot,
                         gzip=False,
                         re_upload=re_upload,
diff --git a/lightningcast/glm/drive_make_GLM_grids_offline.py b/lightningcast/glm/drive_make_GLM_grids_offline.py
index 3893eb9aab47d035a511183dc1e7a4d2b9055627..66088c03bcc5a919e4ce3ce0cd33dd918e033951 100755
--- a/lightningcast/glm/drive_make_GLM_grids_offline.py
+++ b/lightningcast/glm/drive_make_GLM_grids_offline.py
@@ -5,14 +5,26 @@ import shutil
 import glob
 
 
-outdir = "/ships19/grain/jcintineo/GLM/goes_west/ussamoa/"
-
-startdt = dt = datetime(2024, 4, 18, 12, 55)
-enddt = datetime(2024, 6, 1, 0, 0)  # startdt + timedelta(days=1)
+outdir = "/ships22/grain/jcintineo/GLM/ainpp/south_america"
+sat = 'goes_east'
+
+if sat == 'goes_east':
+    satnum = 'goes16'
+    satpos = 'east'
+elif sat == 'goes_west':
+    satnum = 'goes18'
+    satpos = 'west'
+else:
+    print(f'Unsupported satellite, {sat}')
+    sys.exit()
+ 
+
+startdt = dt = datetime(2023, 9, 5, 0, 0)
+enddt = datetime(2023, 9, 5, 0, 1)  # startdt + timedelta(days=1)
 
 while startdt <= dt < enddt:
     pattern = dt.strftime(
-        "/arcdata/goes/grb/goes18/%Y/%Y_%m_%d_%j/glm/L2/LCFA/*s%Y%j%H%M*"
+        f"/arcdata/goes/grb/{satnum}/%Y/%Y_%m_%d_%j/glm/L2/LCFA/*s%Y%j%H%M*"
     )
     remotefiles = glob.glob(pattern)
 
@@ -21,12 +33,12 @@ while startdt <= dt < enddt:
         #cmd = (
         #    "python glmtools/examples/grid/make_GLM_grids.py -o "
         #    + outdir
-        #    + "/{start_time:%Y/%b/%d}/{dataset_name} --fixed_grid --split_events --goes_position west --goes_sector conus --dx 2.0 --dy 2.0"
+        #    + f"/{{start_time:%Y/%b/%d}}/{{dataset_name}} --fixed_grid --split_events --goes_position {satpos} --goes_sector conus --dx 2.0 --dy 2.0"
         #)
 
         cmd ="python glmtools/examples/grid/make_GLM_grids.py " +\
-            f"-o {outdir}/{{start_time:%Y/%b/%d}}/{{dataset_name}} --fixed_grid --split_events --goes_position west --dx=2.0 --dy=2.0 " +\
-            "--ctr_lat -13 --ctr_lon -167.5 --width 2500 --height 1800"
+            f"-o {outdir}/{{start_time:%Y/%b/%d}}/{{dataset_name}} --fixed_grid --split_events --goes_position {satpos} --dx=2.0 --dy=2.0 " +\
+            "--ctr_lat -24.5 --ctr_lon -58 --width 5000 --height 6100"
 
         for rf in remotefiles:
             shutil.copy(rf, f"{os.environ['PWD']}/")
diff --git a/lightningcast/glm/plot_test.py b/lightningcast/glm/plot_test.py
new file mode 100755
index 0000000000000000000000000000000000000000..5181dba9601f1a934f4a10189a0eede52baf929f
--- /dev/null
+++ b/lightningcast/glm/plot_test.py
@@ -0,0 +1,47 @@
+import sys,os
+
+import inspect
+
+current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
+target_dir = os.path.dirname(os.path.dirname(current_dir))
+sys.path.insert(0, target_dir)
+
+from lightningcast import utils
+import cartopy.crs as ccrs
+import pyresample
+import netCDF4
+import matplotlib.pyplot as plt
+import numpy as np
+
+f='/ships22/grain/jcintineo/GLM/ainpp/south_america/2023/Sep/05/OR_GLM-L2-GLMM1-M3_G16_s20232480000000_e20232480001000_c20242011329130.nc'
+
+nc = netCDF4.Dataset(f,'r')
+gip = nc.variables['goes_imager_projection']
+sat_height = gip.perspective_point_height
+lon_0 = gip.longitude_of_projection_origin
+x = nc.variables['x'][:] * sat_height
+y = nc.variables['y'][:] * sat_height 
+nc.close()
+
+extent = [ x.min(), x.max(), y.min(), y.max() ]
+print(extent)
+
+# Create a figure and axes
+fig, ax = plt.subplots(figsize=(7, 10)) 
+
+# Define the map with projection and extent
+projection = ccrs.Geostationary(central_longitude=lon_0, sweep_axis="x", satellite_height=sat_height)
+ax = plt.axes(projection=projection)
+                                          
+ax.set_extent(extent, crs = projection)
+
+# Add coastlines (optional)
+ax.coastlines(resolution='50m')  # Adjust resolution for coastline detail
+
+# Display the map
+plt.show()
+
+#geos_def, gny, gnx = utils.get_area_definition(f)
+
+#lons = np.
+#new_def = geosGrid = pyresample.geometry.GridDefinition(lons=lons, lats=lats)
diff --git a/lightningcast/lightningcast_yaml_config.py b/lightningcast/lightningcast_yaml_config.py
index c8c26ecc0509a24bea4445a3bbd1b4e6e46d2ae9..85540de54797c434b1b72e0aab96566d8dafd458 100644
--- a/lightningcast/lightningcast_yaml_config.py
+++ b/lightningcast/lightningcast_yaml_config.py
@@ -1,3 +1,7 @@
+"""
+This program configures the yaml options from a cascade of input files
+"""
+
 from yaml_cascade import YamlCascade
 from typing import Mapping, Tuple, Union, Any, Optional
 from pathlib import Path
diff --git a/lightningcast/logging_def.py b/lightningcast/logging_def.py
index 107d47694bc80af6ae7f7c482720aa0b7000e398..00e099fde7125dff33da1c746d3740a35dbcf0fe 100755
--- a/lightningcast/logging_def.py
+++ b/lightningcast/logging_def.py
@@ -1,3 +1,7 @@
+"""
+This file contains options and objects for logging application output.
+"""
+
 import logging
 from logging.config import dictConfig
 
diff --git a/lightningcast/make_locations_RE_file.py b/lightningcast/make_locations_RE_file.py
index ffd167d5dc0115422e0feaed1946554e69cfdeec..fb04a2ba9cd8881ec55c8f947b59ae94e6558403 100644
--- a/lightningcast/make_locations_RE_file.py
+++ b/lightningcast/make_locations_RE_file.py
@@ -1,3 +1,8 @@
+"""
+This program will read some delimiter-separated files and create 
+geojsons for locations for upload and use in RealEarth.
+"""
+
 import json
 import os, sys
 
diff --git a/lightningcast/parse_TAFs.py b/lightningcast/parse_TAFs.py
index fbb06bfd055f287bbb002898e3cfb0448518f2e7..02d997575fff620c4ed831997e7578f05fe29a19 100644
--- a/lightningcast/parse_TAFs.py
+++ b/lightningcast/parse_TAFs.py
@@ -1,3 +1,6 @@
+"""
+This program will parse out terminal aerodrome forecast (TAF) sites from a csv file.
+"""
 import pandas as pd
 
 csv = "static/TAF_Sites.csv"
diff --git a/lightningcast/pltg_gr_placefile.py b/lightningcast/pltg_gr_placefile.py
index 607a4f46a5f93f43f2d4e6b9be2ab1dc947a040d..338fba1512b5ce76f9c76fff288f4bbc5ac7adae 100755
--- a/lightningcast/pltg_gr_placefile.py
+++ b/lightningcast/pltg_gr_placefile.py
@@ -1,3 +1,6 @@
+"""
+This program creates vector-based "placefiles" for use in the GRLevelX software.
+"""
 import sys
 from glob import glob
 import subprocess
diff --git a/lightningcast/predict_from_yaml.py b/lightningcast/predict_from_yaml.py
index 8a95b5c26679c3acae5d023e1e780e2e8e7bfcdc..6d7f3323a7e9b8482fd4fb85f0bdd149af18a7db 100644
--- a/lightningcast/predict_from_yaml.py
+++ b/lightningcast/predict_from_yaml.py
@@ -1,3 +1,7 @@
+"""
+This program uses yaml cascacde methodology to run predict_ltg.
+"""
+
 import argparse
 import logging
 from logging.config import dictConfig
@@ -98,6 +102,7 @@ if __name__ == "__main__":
 
     from lightningcast.predict_ltg import (
         predict_ltg,
+        set_num_threads,
     )  # This needs to be after env variables are set. When we switch to doing config differently it can maybe be moved to the top.
 
     logDict = logging_def(logfile=None)
@@ -131,6 +136,8 @@ if __name__ == "__main__":
     assert os.path.isfile(model_config_file)
     assert os.path.isfile(model)
 
+    set_num_threads(config.get("num_threads", None))
+
     predict_ltg(
         args.input_file,
         model,
diff --git a/lightningcast/predict_ltg.py b/lightningcast/predict_ltg.py
index 786552e3a1b7543239865c0918169e61a8401c77..ece2f95f741e886eb87396e141442c14fc63f5de 100755
--- a/lightningcast/predict_ltg.py
+++ b/lightningcast/predict_ltg.py
@@ -173,10 +173,11 @@ def get_all_paths(contour_img):
                 all_verts.append(path.vertices)
                 all_codes.append(path.codes)
 
-            newpath = matplotlib.path.Path(vertices=np.concatenate(all_verts),
-                                           codes=np.concatenate(all_codes))
-            all_paths.append(newpath)
-
+            if len(all_verts):
+                newpath = matplotlib.path.Path(vertices=np.concatenate(all_verts),
+                                               codes=np.concatenate(all_codes))
+                all_paths.append(newpath)
+           
     return all_paths
 
 #################################################################################################################################
@@ -978,9 +979,13 @@ def save_netcdf(
     #----------------------------------------------------------
     # addtional required quality information
     #----------------------------------------------------------
-    percentage_optimal_retrievals = 100.0*total_number_good_retrievals/float(total_number_attempted_retrievals)
-    percentage_sub_optimal_retrievals = 100.0*total_number_sub_optimal_retrievals/float(total_number_attempted_retrievals)
-    percentage_bad_retrievals = 100.0*total_number_unusable_retrievals/float(total_number_attempted_retrievals)
+    try:
+        percentage_optimal_retrievals = 100.0*total_number_good_retrievals/float(total_number_attempted_retrievals)
+        percentage_sub_optimal_retrievals = 100.0*total_number_sub_optimal_retrievals/float(total_number_attempted_retrievals)
+        percentage_bad_retrievals = 100.0*total_number_unusable_retrievals/float(total_number_attempted_retrievals)
+    except ZeroDivisionError:
+        # This could mean the sector is outside of the region we remap to.
+        percentage_optimal_retrievals = percentage_sub_optimal_retrievals = percentage_bad_retrievals = -1
 
     quality_information = np.ubyte(255)
     dataset["quality_information"] = {
@@ -2311,7 +2316,7 @@ def sector_shift(idict, plax_hgt, lon_0):
 
 
 #################################################################################################################################
-def write_image(scn, dt, new_preds, projx, projy, idict, meta):
+def write_image(scn, dt, new_preds, projx, projy, idict, meta, files_for_ir_only_cloud_phase):
 
     # Some shapefiles we may need
     country_borders = idict["country_borders"]
@@ -2477,7 +2482,7 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
             elif idict["make_img"] == 5:
                 composite = "ir_sandwich"
                 alpha = 1
-            scn.load([composite])  # ;scn.load(['IRCloudPhaseFC'])
+            scn.load([composite])
             resamp = scn.resample(scn.finest_area(), resampler="native")
             rgb = np.transpose(
                 get_enhanced_image(resamp[composite]).data.values, axes=(1, 2, 0)
@@ -2532,18 +2537,26 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
    #     irdata = scn[idict['irCH']].compute().data
    #     ax.imshow(irdata, transform=crs, extent=crs.bounds, vmin=180, vmax=310, cmap=plt.get_cmap('Greys')) #irw_ctc()
 
-        # IRCloudPhaseFC
-        composite = "IRCloudPhaseFC"
-        alpha = 0.8
-        scn.load([composite])
-        resamp = scn.resample(scn.finest_area(), resampler="native")
-        rgb = np.transpose(
-            get_enhanced_image(resamp[composite]).data.values, axes=(1, 2, 0)
-        )
-        rgb_shape = rgb.shape
-        alpha_layer = np.full((rgb_shape[0], rgb_shape[1], 1), alpha)
-        rgb = np.concatenate((rgb, alpha_layer), axis=-1)
-        ax.imshow(rgb, transform=crs, extent=crs.bounds)
+        logging.warning("Input timestamp has solar zenith > 85 so defaulting to making image from IR only.")
+
+
+        if files_for_ir_only_cloud_phase:
+            # IRCloudPhaseFC
+            alpha = 0.8
+            composite = "IRCloudPhaseFC"
+            scn.load([composite])
+            resamp = scn.resample(scn.finest_area(), resampler="native")
+            rgb = np.transpose(
+                get_enhanced_image(resamp[composite]).data.values, axes=(1, 2, 0)
+            )
+            rgb_shape = rgb.shape
+            alpha_layer = np.full((rgb_shape[0], rgb_shape[1], 1), alpha)
+            rgb = np.concatenate((rgb, alpha_layer), axis=-1)
+            ax.imshow(rgb, transform=crs, extent=crs.bounds)
+        else:
+            logging.warning("Channel / Band 11 and 14 missing so creating IR image from Channel / Band 13 only.")
+            irdata = scn[idict['irCH']].compute().data
+            ax.imshow(irdata, transform=crs, extent=crs.bounds, vmin=180, vmax=310, cmap=plt.get_cmap('Greys')) #irw_ctc()
 
         if idict["plotGLM"]:
             glmimg = ax.imshow(
@@ -2568,23 +2581,26 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
             cbar.ax.tick_params(labelsize=fs)
 
     # ENI lightning for non-GOES imagers
+    eni_pts = None # default val
     if idict["imgr"] in idict["nongoes"] or idict["plot_engln"]:
         eni_ms = 6
-        try:
-            eni_lats, eni_lons = get_eni_data(
-                dt, idict["irlats"], idict["irlons"], eni_path=idict["eni_path"]
-            )
-        except RuntimeError:
-            pass
-        else:
-            eni_pts = ax.plot(
-                eni_lats,
-                eni_lons,
-                "o",
-                color="black",
-                markersize=eni_ms,
-                transform=ccrs.PlateCarree(),
-            )
+        if idict["eni_path"]:
+            try:
+                eni_lats, eni_lons = get_eni_data(
+                    dt, idict["irlats"], idict["irlons"], eni_path=idict["eni_path"]
+                )
+            except RuntimeError:
+                pass
+            else:
+                eni_pts = ax.plot(
+                    eni_lats,
+                    eni_lons,
+                    "o",
+                    color="black",
+                    markersize=eni_ms,
+                    transform=ccrs.PlateCarree(),
+                )
+
 
     # Maps and shapefiles
     if idict["imgr"] == "ABI":
@@ -2768,7 +2784,7 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
             )
 
             # Legend for ENI
-            if idict["imgr"] in idict["nongoes"] or idict["plot_engln"]:
+            if eni_pts and idict["imgr"] in idict["nongoes"] or idict["plot_engln"]:
                 leg3 = ax.legend(
                     [
                         Line2D(
@@ -3023,6 +3039,8 @@ def predict_ltg(
 
         tstart = time.time()
 
+
+
         # --------------------------------------------------
         #  Create, load, and crop scene
         # --------------------------------------------------
@@ -3039,6 +3057,25 @@ def predict_ltg(
         if not datadir:
             datadir = "./"
 
+
+        valid_req_chan_bands, problem_chan_band, optional_bands_check=utils.check_chan_bands(imgr, datadir, dt, sector,  required_abi=["02", "05", "13", "15" ], required_ahi=["03", "05", "13", "15" ], optional_abi=["03", "11", "14"], optional_ahi=["04", "11", "14"])
+
+        if not valid_req_chan_bands:
+            logging.critical("Missing 1 or more Files for: "+problem_chan_band)
+            sys.exit(1)
+
+        if make_img==1 and not optional_bands_check[0]:
+            logging.critical(
+                f"For making DayLandCloud images we require an extra Channel / Band (02 for ABI and 03 for AHI). A needed file appears to be missing."
+            )
+            sys.exit(1)
+
+        files_for_ir_only_cloud_phase=False
+        if optional_bands_check[1] and optional_bands_check[2]:
+            files_for_ir_only_cloud_phase=True
+
+
+
         if imgr == "ABI":
             abi_filenames = glob.glob(
                 dt.strftime(f"{datadir}/OR*_*-{sector}-*_s%Y%j%H%M%S*nc")
@@ -3732,7 +3769,7 @@ def predict_ltg(
                 filenames.append(abi_filenames[indices[0]])
 
             if make_img:  # make an image
-                write_image(scn, dt, new_preds, projx, projy, idict, meta)
+                write_image(scn, dt, new_preds, projx, projy, idict, meta, files_for_ir_only_cloud_phase)
 
         # clean up
         try:
@@ -3769,6 +3806,16 @@ def predict_ltg(
     logging.info("Process ended.")
 
 
+def set_num_threads(num_threads):
+    if num_threads and num_threads > 0:
+        os.environ["OMP_NUM_THREADS"] = f"{num_threads}"
+        os.environ["TF_NUM_INTRAOP_THREADS"] = f"{num_threads}"
+        os.environ["TF_NUM_INTEROP_THREADS"] = f"{num_threads}"
+        tf.config.threading.set_inter_op_parallelism_threads(num_threads)
+        tf.config.threading.set_intra_op_parallelism_threads(num_threads)
+        tf.config.set_soft_device_placement(enabled=False)
+
+
 ########################################################################################################
 if __name__ == "__main__":
 
@@ -3901,15 +3948,17 @@ if __name__ == "__main__":
     )
     parser.add_argument(
         "--awips_nc",
-        help="Write a netcdf of the LC probs in AWIPS-compatible format. 1 = w/o plax correction; 2 = w/ plax correction; \
-                      3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write netcdf).",
+        help="Write a netcdf of the LC probs in AWIPS-compatible format. 1 = w/o parallax correction; 2 = w/ parallax correction \
+                      using a constant cloud height of --plax_hgt meters; 3 = save both plax-corrected and uncorrected grids. \
+                      Default = 0 (i.e., don't write netcdf).",
         default=0,
         type=int,
     )
     parser.add_argument(
         "--netcdf",
-        help="Write a netcdf of the LC probs in native geostationary format. 1 = w/o plax correction; 2 = w/ plax correction; \
-                      3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write netcdf).",
+        help="Write a netcdf of the LC probs in native geostationary format. 1 = w/o parallax correction; 2 = w/ parallax correction \
+                      using a constant cloud height of --plax_hgt meters; 3 = save both plax-corrected and uncorrected grids. \
+                      Default = 0 (i.e., don't write netcdf).",
         default=0,
         type=int,
     )
@@ -4017,9 +4066,10 @@ if __name__ == "__main__":
     parser.add_argument(
         "-ph",
         "--plax_hgt",
-        help="(Only used with --make_json). Height above the earth's surface for parallax correction, \
-                      in meters. We will use this constant height to do a parallax correction. Default = 0, which means no correction. \
-                      If > 0, a parallax-corrected file will be written out, as well as the un-parallax-corrected geoJSON.",
+        help="Height above the earth's surface for parallax correction, in meters. \
+                      We will use this constant height to do a parallax correction. Default = 0, which means no correction. \
+                      If > 0, a parallax-corrected field will be written out in the netCDF (if --awips_nc ≥ 2 or --netcdf ≥ 2), \
+                      or a parallax-corrected geoJSON will be written out in addition to the non-corrected geoJSON (if --make_json).",
         default=0,
         type=float,
     )
@@ -4030,8 +4080,17 @@ if __name__ == "__main__":
         action="store_true",
     )
 
+    parser.add_argument(
+        "--num_threads",
+        help="Limit the number of threads that lightnigncast can use.",
+        type=int,
+        default=None,
+    )
+
     args = parser.parse_args()
 
+    set_num_threads(args.num_threads)
+
     logDict = logging_def(logfile=None)
     # do this regardless of infinite/offline, so we get formatted stdout messages
     dictConfig(logDict)
diff --git a/lightningcast/quality_control.py b/lightningcast/quality_control.py
deleted file mode 100644
index c14764d5e55f66890e1afe44d0f85d5718aa5fb1..0000000000000000000000000000000000000000
--- a/lightningcast/quality_control.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import zarr
-import numpy as np
-import os, sys
-from datetime import datetime, timedelta, timezone
-
-ds = zarr.open("/home/jcintineo/pltgALL.zarr", "r")
-
-abi_date = ds.ABI_date[:]
-abi_time = ds.ABI_time[:]
-
-epoch0 = datetime(1970, 1, 1, 0, 0)
-
-dts = np.array(
-    [
-        datetime.strptime(str(dd) + str(tt).zfill(4), "%Y%m%d%H%M")
-        for dd, tt in zip(abi_date, abi_time)
-    ]
-)
-
-chunkind = np.arange(0, ds.CH13.shape[0], 20000)
-
-# trueind = np.arange(ds.CH13.shape[0])
-
-for ll in range(len(chunkind)):
-    print(chunkind[ll])
-    try:
-        ch13 = ds.CH13[chunkind[ll] : chunkind[ll + 1]]
-        ch02 = ds.CH02[chunkind[ll] : chunkind[ll + 1]]
-        ch05 = ds.CH05[chunkind[ll] : chunkind[ll + 1]]
-        ch06 = ds.CH06[chunkind[ll] : chunkind[ll + 1]]
-        lat = ds.lat[chunkind[ll] : chunkind[ll + 1]]
-    except IndexError:
-        ch13 = ds.CH13[chunkind[ll] :]
-        ch02 = ds.CH02[chunkind[ll] :]
-        ch05 = ds.CH05[chunkind[ll] :]
-        ch06 = ds.CH06[chunkind[ll] :]
-        lat = ds.lat[chunkind[ll] :]
-
-    uniq = np.array([])
-    ind0 = np.where(np.abs(ch02) > 2)
-    if len(ind0[0]):
-        uniq = np.concatenate((uniq, np.unique(ind0[0])))
-    ind1 = np.where(np.abs(ch05) > 2)
-    if len(ind1[0]):
-        uniq = np.concatenate((uniq, np.unique(ind1[0])))
-    ind2 = np.where(np.abs(ch06) > 2)
-    if len(ind2[0]):
-        uniq = np.concatenate((uniq, np.unique(ind2[0])))
-    ind3 = np.where(np.abs(ch13) > 500)
-    if len(ind3[0]):
-        uniq = np.concatenate((uniq, np.unique(ind3[0])))
-    ind4 = np.where(lat < 27.5)
-    if len(ind4[0]):
-        uniq = np.concatenate(
-            (uniq, np.unique(ind4[0]))
-        )  # eliminating any potential "Bermuda strobes"
-
-    new_data = {}
-    if len(uniq):
-        uniq = np.unique(uniq)
-
-        # now remove bad samples
-        for key in ds.keys():
-            try:
-                new_data[key] = np.delete(
-                    ds[key][chunkind[ll] : chunkind[ll + 1]], uniq, axis=0
-                )
-            except IndexError:
-                new_data[key] = np.delete(ds[key][chunkind[ll] :], uniq, axis=0)
-
-        print("deleting", len(uniq), "elements")
-
-    else:  # just copy data
-        for key in ds.keys():
-            try:
-                new_data[key] = ds[key][chunkind[ll] : chunkind[ll + 1]]
-            except IndexError:
-                new_data[key] = ds[key][chunkind[ll] :]
-
-    # now write out new zarr
-    # write zarr
-    zname = "/home/jcintineo/pltgALL_new.zarr"
-    ds_exists = True if (os.path.isdir(zname)) else False
-    if not (ds_exists):
-        root = zarr.open(zname, mode="a")
-    chunksize = 300
-
-    for key in ds.keys():
-        # set chunksizes
-        if len(ds[key].shape) == 1:
-            chunks = (chunksize,)
-        else:
-            chunks = (chunksize, ds[key].shape[1], ds[key].shape[2], 1)
-
-        # create data
-        if ds_exists:  # just append along time axis
-            newarr = zarr.zeros(
-                new_data[key].shape, chunks=chunks, dtype=new_data[key].dtype
-            )
-            newarr[:] = new_data[key]
-            root[key].append(newarr, axis=0)
-        else:  # start of new data array in group
-            newarr = root.zeros(
-                key, shape=new_data[key].shape, chunks=chunks, dtype=new_data[key].dtype
-            )
-            newarr[:] = new_data[key]
-            # set attributes
-            for att in ds[key].attrs:
-                newarr.attrs[att] = ds[key].attrs[att]
-
-    print("-----")
diff --git a/lightningcast/re_upload b/lightningcast/re_upload
deleted file mode 100644
index 05835ac9bc441b6c60634ab11ba2745b7bed83b7..0000000000000000000000000000000000000000
--- a/lightningcast/re_upload
+++ /dev/null
@@ -1,535 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-# DO NOT ALTER THIS LINE
-VERSION=59
-################################################################################
-
-SERVER="https://realearth.ssec.wisc.edu"
-KEY=
-URI=
-
-IS_HELP=0
-if [ -n "$1" -a "$1" = "-h" ]; then
-        IS_HELP=1
-fi
-CHECK_SH=`basename ${SHELL}`
-if [ ${IS_HELP} -eq 0 -a \
-     "${CHECK_SH}" != "sh" -a \
-     "${CHECK_SH}" != "ash" -a \
-     "${CHECK_SH}" != "bash" ]; then
-	echo "Error: Bad interpreter: \"${CHECK_SH}\""
-	echo "Please modify the first line of this script to use sh, ash, or bash"
-	echo "Or call explicity with:"
-	echo "	/bin/sh $0 $@"
-	exit 6
-fi
-
-CURL=`which curl 2>/dev/null`
-if [ -z "${CURL}" ]; then
-	echo "Error: Cannot find \"curl\""
-	exit 5
-fi
-CHECK_CURL=`${CURL} --version |head -1 |awk '{print $2}' |awk -F. '{print $1}'`
-if [ -z "${CHECK_CURL}" -o ${CHECK_CURL} -lt 7 ]; then
-	echo "Error: \"curl\" version must be 7+"
-	exit 5
-fi
-
-BASE64=`which base64 2>/dev/null`
-TIMEOUT_BIN=`which timeout 2>/dev/null`
-TIMEOUT=
-if [ -n "${TIMEOUT_BIN}" ]; then
-	TIMEOUT="${TIMEOUT_BIN} -s 9 600"
-fi
-TMPDIR=/tmp/re_upload.$$
-
-showHelp() {
-	SELF=`basename $0`
-	echo ""
-	echo "$0 [-huojtcg1dxv] [-f [hours]] [-p [part]] [-k key|file] [-l \"URI\"] [-s server:port] file [name] [date] [time]"
-	echo ""
-	echo "   -h: Show help"
-	echo "   -u: Check for update"
-	echo "       Version: ${VERSION}"
-	echo ""
-	echo "   -p: Designate file as part of a larger product (part # optional)"
-	echo "   -f: Force overwrite if there is a conflict (hour range optional)"
-	echo "   -o: Convert to COG (requires GDAL tools)"
-	echo "   -j: Use JPEG compression (requires GDAL tools)"
-	echo "   -t: Do not timeout"
-	echo ""
-	echo "   -k: Specify the upload key or file"
-	echo "   -l: Specify a URI for download of the original data"
-	echo ""
-	echo "   -c: Add random sleep to mitigate concurrent uploads (eg. cron jobs)"
-	echo "   -g: Send through public gateway"
-	echo "   -s: Specify the server and port"
-	echo "       Default: ${SERVER}"
-	echo ""
-	echo "   -1: Do not retry on failure"
-	echo "   -d: Delete file on successful upload"
-	echo "   -x: Print target server and port (do not upload)"
-	echo "   -v: Be verbose"
-	echo ""
-	echo " file: Path to file"
-	echo "       Format: /path/to/[name]_[YYYYMMDD]_[HHMMSS].???"
-	echo ""
-	echo " name: Specify the product name"
-	echo "       Required when the file name does not contain [name]"
-	echo "       Format: Cannot contain '_'"
-	echo " date: Specify the date"
-	echo "       Required when the file name does not contain [date]"
-	echo "       Format: YYYYMMDD"
-	echo " time: Specify the time"
-	echo "       Required when the file name does not contain [time]"
-	echo "       Format: HHMMSS"
-	echo ""
-}
-
-checkUpdate() {
-	DL_URL="${SERVER}/upload/re_upload"
-	URL="$DL_URL?version=${VERSION}"
-	VERSION_CHECK=`${CURL} -L --insecure -s ${URL}`
-	if [ -n "${VERSION_CHECK}" ]; then
-		echo "A new version is available at:"
-		echo "    ${DL_URL}"
-		echo "Download with:"
-		echo "    ${CURL} -L ${DL_URL} -o $0"
-		if [ -n "${KEY}" ]; then
-			echo "WARNING: Custom upload key must be set manually:"
-			echo "    KEY=${KEY}"
-		fi
-		if [ -n "${URI}" ]; then
-			echo "WARNING: Custom upload URI must be set manually:"
-			echo "    URI=${URI}"
-		fi
-	else
-		echo "This version is up to date"
-	fi
-}
-
-# Cleanup
-cleanup() {
-	rm -Rf "${TMPDIR}"
-	if [ ${RE_DELETE} -ne 0 ]; then
-		if [ ${RE_VERBOSE} -ne 0 ]; then
-			echo "Deleting ${RE_FILE_NAME}"
-		fi
-		rm -f ${RE_FILE}
-	fi
-}
-trap cleanup SIGHUP SIGINT SIGTERM
-
-# Get the options from the command line
-RE_ONCE=0
-RE_FORCE=0
-RE_PART=0
-RE_SLEEP=0
-RE_PUBLIC=0
-RE_VERBOSE=0
-RE_PRINT=0
-RE_COG=0
-RE_JPEG=0
-RE_DELETE=0
-if [ -z "$1" ]; then
-	showHelp
-	exit 1
-fi
-while getopts 1hufpojtcrgdxvk:l:s: o; do
-	case "$o" in
-	1)	RE_ONCE=1
-		;;
-	h)	showHelp
-		exit 1
-		;;
-	u)	checkUpdate
-		exit 1
-		;;
-        f)      eval NEXTIND=\${$OPTIND}
-                RE_GREP=$(echo "${NEXTIND}" |grep -Ec ^[0-9]+$)
-                if [ -n "${NEXTIND}" -a ${RE_GREP} -ne 0 -a ! -f "${NEXTIND}" ]; then
-                        OPTIND=$((OPTIND + 1))
-                        RE_FORCE=${NEXTIND}
-                else
-                        RE_FORCE=-1
-                fi
-                ;;
-	p)	eval NEXTIND=\${$OPTIND}
-                RE_GREP=$(echo "${NEXTIND}" |grep -Ec ^[0-9]+$)
-                if [ -n "${NEXTIND}" -a ${RE_GREP} -ne 0 -a ! -f "${NEXTIND}" ]; then
-			OPTIND=$((OPTIND + 1))
-			RE_PART=${NEXTIND}
-		else
-			RE_PART=-1
-		fi
-		;;
-	c)	RE_SLEEP=1
-		;;
-	r)	RE_PUBLIC=1
-		;;
-	g)	RE_PUBLIC=1
-		;;
-	v)	RE_VERBOSE=1
-		;;
-	x)	RE_PRINT=1
-		;;
-	o)	RE_COG=1
-		;;
-	j)	RE_JPEG=1
-		;;
-	d)	RE_DELETE=1
-		;;
-	t)	TIMEOUT=
-		;;
-	k)	KEY=${OPTARG}
-		;;
-	l)	URI=${OPTARG}
-		if [ -z "${BASE64}" ]; then
-			echo "Error: Cannot find \"base64\""
-			exit 5
-		fi
-		;;
-	s)	SERVER=${OPTARG}
-		;;
-	*)	showHelp
-		exit 1
-		;;
-	esac
-done
-shift $((${OPTIND} -1))
-
-# Insert http:// if not part of SERVER
-RE_GREP=$(echo "${SERVER}" |grep -Ec ^http)
-if [ ${RE_GREP} -eq 0 ]; then
-	SERVER="http://${SERVER}"
-fi
-SERVER_GW="${SERVER}"
-
-# Set our variables
-RE_FILE=$1
-RE_NAME=$2
-RE_DATE=$3
-RE_TIME=$4
-
-# Does the file exist?
-if [ ! -f "${RE_FILE}" -a ${RE_PRINT} -eq 0 ]; then
-	echo "ERROR: Could not find file: ${RE_FILE}"
-	exit 4
-fi
-
-# Is the key a file?
-if [ -n "${KEY}" -a -f "${KEY}" ]; then
-	KEY=`cat "${KEY}"`
-fi
-
-# Set the defaults for sending
-RE_FILE_DIR=`dirname "${RE_FILE}"`
-RE_FILE_NAME=`basename "${RE_FILE}"`
-RE_FILE_PARTS=`echo "${RE_FILE_NAME}" |awk -F. '{print $1}'`
-
-# Verify the product name
-CHECK_NAME=${RE_NAME}
-if [ -z "${CHECK_NAME}" ]; then
-	CHECK_NAME=`echo ${RE_FILE_PARTS} |awk -F_ '{print $1}'`
-fi
-if [ -n "${CHECK_NAME}" ]; then
-	match=`expr "${CHECK_NAME}" : '\([a-zA-Z0-9\-]\{1,\}\)'`
-	if [ "$match" != "${CHECK_NAME}" ]; then
-		echo ""
-		echo "ERROR: Invalid product name"
-		showHelp
-		exit 4
-	fi
-fi
-
-# Verify the product date
-CHECK_DATE=${RE_DATE}
-if [ -z "${CHECK_DATE}" ]; then
-	CHECK_DATE=`echo ${RE_FILE_PARTS} |awk -F_ '{print $2}'`
-fi
-if [ -n "${CHECK_DATE}" ]; then
-	match=`expr "${CHECK_DATE}" : '\([0-9]\{7,8\}\)'`
-	if [ "$match" != "${CHECK_DATE}" ]; then
-		echo ""
-		echo "ERROR: Invalid product date"
-		showHelp
-		exit 4
-	fi
-fi
-
-# Verify the product time
-CHECK_TIME=${RE_TIME}
-if [ -z "${CHECK_TIME}" ]; then
-	CHECK_TIME=`echo ${RE_FILE_PARTS} |awk -F_ '{print $3}'`
-fi
-if [ -n "${CHECK_TIME}" ]; then
-	match=`expr "${CHECK_TIME}" : '\([0-9]\{6\}\)'`
-	if [ "$match" != "${CHECK_TIME}" ]; then
-		echo ""
-		echo "ERROR: Invalid product time"
-		showHelp
-		exit 4
-	fi
-fi
-
-# Get the direct upload name (unless -g was specified)
-if [ ${RE_PUBLIC} -eq 0 ]; then
-	SERVER_DIRECT=
-	if [ -n "${RE_NAME}" ]; then
-		SERVER_DIRECT=`${CURL} -L --insecure -s ${SERVER}/upload/re_upload?name=${RE_NAME}`
-	else
-		SERVER_DIRECT=`${CURL} -L --insecure -s ${SERVER}/upload/re_upload?file=${RE_FILE_NAME}`
-	fi
-	if [ -n "${SERVER_DIRECT}" ]; then
-		SERVER=${SERVER_DIRECT}
-		RE_GREP=$(echo "${SERVER}" |grep -Ec ^http)
-		if [ ${RE_GREP} -eq 0 ]; then
-			SERVER="http://${SERVER}"
-		fi
-	else
-		if [ ${RE_VERBOSE} -ne 0 ]; then
-			echo "WARNING: Could not determine the direct URL for proxy upload"
-		fi
-	fi
-
-	# Test the direct upload
-	if [ ${RE_VERBOSE} -ne 0 ]; then
-		echo "Testing direct connection to ${SERVER}..."
-	fi
-	SERVER_TEST=`${CURL} -L --max-time 5 --insecure -s ${SERVER}/api/version`
-	RE_GREP=$(echo "${SERVER_TEST}" |grep -Ec ^[\.0-9]+$)
-	if [ -z "${SERVER_TEST}" -o ${RE_GREP} -eq 0 ]; then
-		if [ ${RE_VERBOSE} -ne 0 ]; then
-			echo "WARNING: Could not connect directly, using gateway"
-		fi
-		SERVER="${SERVER_GW}"
-	fi
-fi
-
-# Print
-if [ ${RE_PRINT} -ne 0 ]; then
-	echo ""
-	echo "   File: ${RE_FILE}"
-	echo ""
-	echo "Product: ${CHECK_NAME}"
-	echo "   Date: ${CHECK_DATE}"
-	echo "   Time: ${CHECK_TIME}"
-	echo ""
-	echo " Target: ${SERVER}"
-	echo ""
-	exit 0
-fi
-
-# Sleep up to 15 seconds if asked to
-if [ ${RE_SLEEP} -ne 0 ]; then
-	SLEEP=$((${RANDOM} * 15 / 32767))
-	echo "Sleeping for ${SLEEP} seconds..."
-	sleep ${SLEEP}
-fi
-
-# See if we can use translate
-if [ ${RE_COG} -ne 0 -o ${RE_JPEG} -ne 0 ]; then
-	GDAL_TRANSLATE=`which gdal_translate 2>/dev/null`
-	if [ -z "${GDAL_TRANSLATE}" ]; then
-		echo "Warning: Cannot find \"gdal_translate\", COG and JPEG compression disabled"
-		RE_COG=0
-		RE_JPEG=0
-	fi
-	RE_GREP=$(echo "${RE_FILE_NAME}" |grep -Ec \.tif$)
-	if [ ${RE_GREP} -eq 0 ]; then
-		echo "Warning: COG and JPEG compression only applies to GeoTIFFs"
-		RE_COG=0
-		RE_JPEG=0
-	fi
-fi
-
-# Did we ask for COG?
-if [ ${RE_COG} -ne 0 ]; then
-        if [ ${RE_VERBOSE} -ne 0 ]; then
-                echo "Converting to COG..."
-        fi
-        mkdir -p ${TMPDIR}
-        if [ ${RE_VERBOSE} -ne 0 ]; then
-		echo "Using ${TMPDIR}"
-        	echo "gdal_translate \"${RE_FILE}\" \"${TMPDIR}/${RE_FILE_NAME}\" -of COG -co \"COMPRESS=DEFLATE\""
-        	gdal_translate "${RE_FILE}" "${TMPDIR}/${RE_FILE_NAME}" -of COG -co "COMPRESS=DEFLATE"
-	else
-        	gdal_translate "${RE_FILE}" "${TMPDIR}/${RE_FILE_NAME}" -of COG -co "COMPRESS=DEFLATE" >/dev/null 2>&1
-	fi
-        if [ -f "${TMPDIR}/${RE_FILE_NAME}" ]; then
-                RE_FILE_DIR=${TMPDIR}
-        else
-                echo "Warning: Failed to convert GeoTIFF to COG"
-        fi
-fi
-
-# Did we ask for compression?
-if [ ${RE_JPEG} -ne 0 ]; then
-	if [ ${RE_VERBOSE} -ne 0 ]; then
-		echo "Compressing w/JPEG..."
-	fi
-	mkdir -p ${TMPDIR}
-        if [ ${RE_VERBOSE} -ne 0 ]; then
-		echo "Using ${TMPDIR}"
-		echo "gdal_translate \"${RE_FILE}\" \"${TMPDIR}/${RE_FILE_NAME}\" -co \"COMPRESS=JPEG\" -co \"JPEG_QUALITY=90\" -co \"TILED=YES\""
-		gdal_translate "${RE_FILE}" "${TMPDIR}/${RE_FILE_NAME}" -co "COMPRESS=JPEG" -co "JPEG_QUALITY=90" -co "TILED=YES"
-	else
-		gdal_translate "${RE_FILE}" "${TMPDIR}/${RE_FILE_NAME}" -co "COMPRESS=JPEG" -co "JPEG_QUALITY=90" -co "TILED=YES" >/dev/null 2>&1
-	fi
-	if [ -f "${TMPDIR}/${RE_NEW_NAME}" ]; then
-		RE_FILE_DIR=${TMPDIR}
-	else
-		echo "Warning: Failed to compress GeoTIFF"
-	fi
-fi
-
-# Change to the dir with the file
-cd "${RE_FILE_DIR}"
-echo "Connecting to ${SERVER}..."
-
-# Check if the server is ready to receive the file
-if [ ${RE_VERBOSE} -ne 0 ]; then
-	echo "Checking upload availability"
-fi
-BYTES=`/bin/ls -Lln "${RE_FILE_NAME}" |awk '{print $5}'`
-COMMAND="${CURL} --connect-timeout 15 -L --insecure -s ${SERVER}/upload/re_upload?bytes=${BYTES}"
-
-if [ ${RE_VERBOSE} -ne 0 ]; then
-	echo "Running: ${COMMAND}"
-fi
-SUCCESS=`${COMMAND} -o - 2>/dev/null |head -1`
-if [ -z "${SUCCESS}" ]; then
-        echo "  Server cannot be reached at this time, try again later"
-        exit 3
-fi
-if [ "${SUCCESS}" -eq "${SUCCESS}" ] 2>/dev/null; then
-	if [ "${SUCCESS}" = "-1" ]; then
-		echo "  Server cannot accept the file, it is too large!"
-		cleanup
-		exit 3
-	fi
-	if [ "${SUCCESS}" = "2" ]; then
-		echo "  Server has already received a file with this signature, use -f to force upload"
-		cleanup
-		exit 2
-	fi
-	if [ "${SUCCESS}" = "3" ]; then
-		echo "  Server is currently ingesting a file with this name, use -f to force upload"
-		cleanup
-		exit 3
-	fi
-	while [ "${SUCCESS}" != "1" ]; do
-		if [ ${RE_ONCE} -ne 0 ]; then
-			echo "  Server cannot accept the file at this time, try again later"
-			exit 3
-		fi
-		SLEEP=$((${RANDOM} * 5 / 32767 + 10));
-		echo "  Server cannot accept the file at this time, trying again in ${SLEEP} seconds..."
-		sleep ${SLEEP}
-		SUCCESS=`${COMMAND} -o - |head -1`
-	done
-else
-	if [ ${RE_VERBOSE} -ne 0 ]; then
-		echo "  Server does not understand file size check, continuing..."
-	fi
-fi
-
-# Send the file
-echo "Sending ${RE_FILE_NAME} (${BYTES} bytes)"
-COMMAND="${TIMEOUT} ${CURL} -L --max-time 600 --write-out %{http_code} --silent --fail --insecure -o /dev/null ${SERVER}/upload/ -F file=@${RE_FILE_NAME}"
-if [ -n "${RE_NAME}" ]; then
-	COMMAND="${COMMAND} -F name=${RE_NAME}"
-	echo "  Name: ${RE_NAME}"
-fi
-if [ -n "${RE_DATE}" ]; then
-	COMMAND="${COMMAND} -F date=${RE_DATE}"
-	echo "  Date: ${RE_DATE}"
-fi
-if [ -n "${RE_TIME}" ]; then
-	COMMAND="${COMMAND} -F time=${RE_TIME}"
-	echo "  Time: ${RE_TIME}"
-fi
-if [ ${RE_PART} -ne 0 ]; then
-	COMMAND="${COMMAND} -F part=${RE_PART}"
-	if [ ${RE_PART} -gt 0 ]; then
-		echo "  Part: ${RE_PART}"
-	else
-		echo "  Part"
-	fi
-fi
-if [ ${RE_FORCE} -ne 0 ]; then
-        COMMAND="${COMMAND} -F force=1"
-        if [ ${RE_FORCE} -gt 0 ]; then
-        	COMMAND="${COMMAND} -F cast=${RE_FORCE}"
-                echo "  Force: ${RE_FORCE}h"
-        else
-                echo "  Force"
-        fi
-fi
-if [ ${RE_COG} -ne 0 ]; then
-	echo "  COG format"
-fi
-if [ ${RE_JPEG} -ne 0 ]; then
-	echo "  JPEG compressed"
-fi
-if [ -n "${KEY}" ]; then
-	COMMAND="${COMMAND} -F key=${KEY}"
-fi
-if [ -n "${URI}" ]; then
-	B64URI=`echo "${URI}" |${BASE64} |tr -d '\n'`
-	COMMAND="${COMMAND} -F uri=\"${B64URI}\" -F uri64=true"
-fi
-
-# Retry a few times...
-RETRIES=3
-if [ ${RE_ONCE} -ne 0 ]; then
-	RETRIES=0
-fi
-if [ ${RE_VERBOSE} -ne 0 ]; then
-	echo "Running: ${COMMAND}"
-fi
-CODE=$(${COMMAND} |head -n 1 |sed -e 's/.*\s//')
-LASTEXIT=$?
-if [ $((${CODE}+0)) -ge 400 ]; then
-	LASTEXIT=${CODE}
-fi
-while [ ${LASTEXIT} -ne 0 -a ${RETRIES} -gt 0 ]; do
-	echo "Curl command failed: ${LASTEXIT}, HTTP code: ${CODE}"
-	if [ ${CODE} -ge 400 -a ${CODE} -lt 500 ]; then
-		if [ ${CODE} -eq 400 ]; then
-			echo "Invalid filename"
-			break;
-		elif [ ${CODE} -eq 401 ]; then
-			echo "Authorization failed"
-			break;
-		elif [ ${CODE} -eq 409 ]; then
-			echo "Conflict"
-			break;
-		fi
-		echo "Client error"
-		break;
-	fi
-	SLEEP=$((${RANDOM} * 30 / 32767))
-	echo "Sleeping for ${SLEEP} seconds..."
-	sleep ${SLEEP}
-	echo "Trying again..."
-	CODE=$(${COMMAND} |head -n 1 |sed -e 's/.*\s//')
-	LASTEXIT=$?
-	if [ $((${CODE}+0)) -ge 400 ]; then
-		LASTEXIT=${CODE}
-	fi
-	RETRIES=$((${RETRIES} - 1))
-done
-if [ ${RE_VERBOSE} -ne 0 ]; then
-	echo "HTTP code: ${CODE}"
-	echo "CURL exit: ${LASTEXIT}"
-fi
-if [ ${LASTEXIT} -eq 0 ]; then
-	echo "Done"
-else
-	echo "Giving up"
-fi
-
-cleanup
-
-exit ${LASTEXIT}
diff --git a/lightningcast/record_dss_events.py b/lightningcast/record_dss_events.py
index 2172dcc2490cd46811ba5171a81c1255728bc4ba..3fa901c8c52d7ea6df4486913fb58e0df9d9a1ac 100644
--- a/lightningcast/record_dss_events.py
+++ b/lightningcast/record_dss_events.py
@@ -1,3 +1,7 @@
+"""
+This program runs infinitely and records new on-demand DSS events to a CSV file.
+"""
+
 import os, sys
 from datetime import datetime, timedelta
 import logging
@@ -15,7 +19,6 @@ import re
 import pandas as pd
 import urllib
 
-# This program runs infinitely and records new on-demand DSS events to a CSV file.
 
 # Logging info
 logDict = logging_def(logfile=None)
diff --git a/lightningcast/sat_config.py b/lightningcast/sat_config.py
index 2ef299c3374ebef963d77a7ca633afe3e2612e40..ca303382daff281cbf506e67a21a4a3290aff750 100644
--- a/lightningcast/sat_config.py
+++ b/lightningcast/sat_config.py
@@ -1,3 +1,8 @@
+"""
+Some configuration items if not using yaml files. 
+This is in addition to command line options.
+"""
+
 def sat_config():
 
     sat_info = {}
diff --git a/lightningcast/sat_zen_angle.py b/lightningcast/sat_zen_angle.py
index 2086fba93a422942d2549508dd11c46dc0824911..c113cb09a255fd9d90f43ba6a7b75ee39f2de2c9 100755
--- a/lightningcast/sat_zen_angle.py
+++ b/lightningcast/sat_zen_angle.py
@@ -1,5 +1,9 @@
-import numpy as np
+"""
+This program computes the satellite zenith angle
+given latitude, longitude, and satellite central longitude.
+"""
 
+import numpy as np
 
 def sat_zen_angle(xlat, xlon, satlat=0, satlon=-75.0):
 
diff --git a/lightningcast/static/ABIcmaps.py b/lightningcast/static/ABIcmaps.py
index c522f7f222463ff174f86c71c0de5ace2dfbd5d1..da5bae1d9a78aa620c1e2f46c60846558df277a2 100644
--- a/lightningcast/static/ABIcmaps.py
+++ b/lightningcast/static/ABIcmaps.py
@@ -1,3 +1,7 @@
+"""
+This file contains some colormaps infrared satellite channels.
+"""
+
 import matplotlib.colors as mcolors
 
 
diff --git a/lightningcast/static/stadiums-geocoded.csv b/lightningcast/static/stadiums-geocoded.csv
index 14b02831a6fe687f2945e87819515fc26a4ad16a..e0a7f238efa61cc37f298b9defd1ae352357d0d2 100644
--- a/lightningcast/static/stadiums-geocoded.csv
+++ b/lightningcast/static/stadiums-geocoded.csv
@@ -255,3 +255,4 @@ Arthur J. Rooney Athletic Field,Pittsburgh,Pennsylvania,Duquesne Dukes,Northeast
 LeGrand Stadium,San Angelo,Texas,Angelo State Rams,,,,,,31.43506,-100.45788
 Bristol Motor Speedway,Bristol,Tennessee,,,,,,,36.5157,-82.2570
 Francis Scott Key Bridge,Baltimore,Maryland,,,,,,,39.21748,-76.52815
+Indianapolis Motor Speedway,Indianapolis,Indiana,,,,,,,39.79556,-86.23450
diff --git a/lightningcast/tf_models.py b/lightningcast/tf_models.py
index a85f5f1c963de6153ac9141819600f114b80c3c8..1f896a33d85fcc4ed64e25d4923e289953bad2cd 100755
--- a/lightningcast/tf_models.py
+++ b/lightningcast/tf_models.py
@@ -1,3 +1,7 @@
+"""
+A collection of tensorflow model definitions and metrics.
+"""
+
 import numpy as np
 import logging
 from logging.config import dictConfig
diff --git a/lightningcast/utils.py b/lightningcast/utils.py
index cf3c2c1396d74d17ca6bf224b0ac098f5c829f26..d155ce315c4b09716a28d611e056546aebd3e2aa 100644
--- a/lightningcast/utils.py
+++ b/lightningcast/utils.py
@@ -981,4 +981,72 @@ def get_metadata_from_filename(file):
                              "naming conventions.")
             sys.exit(1)
 
-    return imgr,  satname, sector, timestamp
\ No newline at end of file
+    return imgr,  satname, sector, timestamp
+
+
+def verify_correct_num_him_files_him(filelist, chan_band):
+    sub_filelist=[f for f in filelist if chan_band in f]
+    if len(sub_filelist) <= 0:
+        return False
+    count=filelist[0][-6:-4]
+    count=int(count)
+    if len(sub_filelist) == count:
+        return True
+    else:
+        return False
+
+#Returns 3 values. 1: True false if all required files are present for required bands, 2: Problem bands if 1 is false, 3: An a boolean array of whether given optional bands are present
+def check_chan_bands(imgr, datadir, dt, sector, required_abi=["02", "05", "13", "15" ], required_ahi=["03", "05", "13", "15" ], optional_abi=["03", "11", "14"], optional_ahi=["04", "11", "14"]):
+    if imgr=="ABI":
+        optional_chan_bands=optional_abi
+        required_chan_bands=required_abi
+        datafiles=glob.glob(
+                dt.strftime(f"{datadir}/OR*_*-{sector}-*_s%Y%j%H%M%S*nc")
+            )
+    elif imgr=="AHI":
+        optional_chan_bands=optional_ahi
+        required_chan_bands=required_ahi
+        if sector == "JP":
+            mss = dt.strftime("%M%S")[1:]
+            if mss == "000":
+                mm = "01"
+            elif mss == "230":
+                mm = "02"
+            elif mss == "500":
+                mm = "03"
+            else:
+                mm = "04"
+
+            datafiles = glob.glob(dt.strftime(f"{datadir}/*%Y%m%d_%H00_B*_{sector}{mm}*"))
+
+        else:  # FLDK
+            datafiles = glob.glob(dt.strftime(f"{datadir}/*%Y%m%d_%H%M_B*_{sector}*"))
+    else:
+        raise("Only ABI and AHI are supported at this time. Make sure your file names use standard "
+                         "naming conventions.")
+
+    valid_req_chan_bands=True
+    problem_chan_band=""
+    joined_file_string="".join(datafiles)
+    for chan_band in required_chan_bands:
+        if imgr=="ABI":
+            if not f"C{chan_band}_" in joined_file_string:
+                valid_req_chan_bands = False
+                problem_chan_band += f"C{chan_band} "
+        elif imgr=="AHI":
+            if not verify_correct_num_him_files_him(datafiles, f"_B{chan_band}_"):
+                valid_req_chan_bands = False
+                problem_chan_band += f"B{chan_band}"
+
+    opt_return_val=[]
+    for chan_band in optional_chan_bands:
+        opt_return_val.append(False)
+        if imgr=="ABI":
+            if f"C{chan_band}_" in joined_file_string:
+                opt_return_val[-1]=True
+        elif imgr=="AHI":
+            opt_return_val[-1]=verify_correct_num_him_files_him(datafiles, f"_B{chan_band}_")
+
+    return valid_req_chan_bands, problem_chan_band, opt_return_val
+
+