diff --git a/gridded_glm/libexec/_make_glm_grids.py b/gridded_glm/libexec/_make_glm_grids.py
index 7103cc7ad31e730ebfe72ba8e507eba9339ab437..1dac45be5d16ddc1990cfae05cd6250a3a27711f 100644
--- a/gridded_glm/libexec/_make_glm_grids.py
+++ b/gridded_glm/libexec/_make_glm_grids.py
@@ -1,9 +1,8 @@
 #!/usr/bin/env python3
-# Based on https://github.com/deeplycloudy/glmtools/blob/master/examples/grid/make_GLM_grids.py
-
-parse_desc = """Grid GLM flash data.
-
-The start and end times can be specified
+# This example was provided by GLMTools
+# https://github.com/deeplycloudy/glmtools/blob/master/examples/grid/make_GLM_grids.py
+import argparse
+parse_desc = """Grid GLM flash data. The start and end times can be specified
 independently, or if not provided they will be inferred from the filenames.
 
 Grid spacing is regular in latitude and longitude with the grid box
@@ -14,84 +13,129 @@ standard GOES imagery naming convention. This behavior can be fully controlled
 by adjusting the -o argument.
 """
 
-import numpy as np
-from datetime import datetime
-import os
-from functools import partial
-from lmatools.grid.make_grids import write_cf_netcdf_latlon, write_cf_netcdf_noproj, write_cf_netcdf_fixedgrid
-from lmatools.grid.make_grids import dlonlat_at_grid_center, grid_h5flashfiles
-from glmtools.grid.make_grids import grid_GLM_flashes
-from glmtools.io.glm import parse_glm_filename
-from lmatools.grid.fixed import get_GOESR_grid, get_GOESR_coordsys
-
-import logging
-
-log = logging.getLogger(__name__)
+output_help = """Specify the output path and filename using a configurable path
+template. -o ./{dataset_name} (the default) will generate files in the current
+directory using the standard GOES imagery naming convention, including a .nc
+extension. Any intermediate directories will be created as needed. All allowed
+names in the template are listed in the docs for
+glmtools.io.imagery.write_goes_imagery. For example: this script can be used to
+process multiple days and that are written to a standardized directory
+structure by specifying a path like so: -o
+{start_time:%%Y/%%b/%%d}/{dataset_name}"""
 
 def create_parser():
-    import argparse
     parser = argparse.ArgumentParser(description=parse_desc)
-    parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
-                        help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
-    parser.add_argument('-l', '--log', dest="log_fn", default=None,
-                        help="specify the log filename")
-    parser.add_argument('-o', '--output-dir', metavar='output directory',
-                        default=os.getcwd())
-    parser.add_argument('--ctr-lat', metavar='latitude',
-                        type=float, help='center latitude')
-    parser.add_argument('--ctr-lon', metavar='longitude',
-                        type=float, help='center longitude')
+    parser.add_argument(dest='filenames',metavar='filename', nargs='*')
+    parser.add_argument('-o', '--output_path',
+                        metavar='filename template including path',
+                        required=False, dest='outdir', action='store',
+                        default='./{dataset_name}', help=output_help)
+    parser.add_argument('--ctr_lat', metavar='latitude', required=False,
+                        dest='ctr_lat', action='store', type=float,
+                        help='center latitude')
+    parser.add_argument('--ctr_lon', metavar='longitude', required=False,
+                        dest='ctr_lon', action='store', type=float,
+                        help='center longitude')
     parser.add_argument('--start', metavar='yyyy-mm-ddThh:mm:ss',
+                        dest='start', action='store',
                         help='UTC start time, e.g., 2017-07-04T08:00:00')
     parser.add_argument('--end', metavar='yyyy-mm-ddThh:mm:ss',
+                        dest='end', action='store',
                         help='UTC end time, e.g., 2017-07-04T09:00:00')
     parser.add_argument('--dx', metavar='km',
-                        default=10.0, type=float,
+                        dest='dx', action='store', default=10.0, type=float,
                         help='approximate east-west grid spacing')
     parser.add_argument('--dy', metavar='km',
-                        default=10.0, type=float,
+                        dest='dy', action='store', default=10.0, type=float,
                         help='approximate north-south grid spacing')
     parser.add_argument('--dt', metavar='seconds',
-                        default=60.0, type=float,
+                        dest='dt', action='store', default=60.0, type=float,
                         help='frame duration')
     parser.add_argument('--width', metavar='distance in km',
-                        default=400.0,
+                        dest='width', action='store', default=400.0,
                         type=float, help='total width of the grid')
     parser.add_argument('--height', metavar='distance in km',
-                        default=400.0,
+                        dest='height', action='store', default=400.0,
                         type=float, help='total height of the grid')
     parser.add_argument('--nevents', metavar='minimum events per flash',
-                        type=int, dest='min_events', default=1,
+                        type=int, dest='min_events', action='store', default=1,
                         help='minimum number of events per flash')
     parser.add_argument('--ngroups', metavar='minimum groups per flash',
-                        type=int, dest='min_groups', default=1,
+                        type=int, dest='min_groups', action='store', default=1,
                         help='minimum number of groups per flash')
-    parser.add_argument('--subdivide-grid', metavar='sqrt(number of subgrids)',
+    parser.add_argument('--fixed_grid',
+                        action='store_true', dest='fixed_grid',
+                        help='grid to the geostationary fixed grid')
+    parser.add_argument('--subdivide_grid', metavar='sqrt(number of subgrids)',
+                        action='store', dest='subdivide_grid',
                         type=int, default=1,
-                        help="subdivide the grid this many times along "
-                             "each dimension")
-    parser.add_argument('--goes-position',
-                        help="One of [east|west|test|auto]. "
-                             "Requires '--goes-sector'.")
-    parser.add_argument('--goes-sector',
-                        help="One of [full|conus|meso]. "
-                             "Requires goes_position. If sector is "
-                             "meso, ctr_lon and ctr_lat are interpreted as "
-                             "the ctr_x and ctr_y of the fixed grid")
-    # parser.add_argument('--split-events',
-    #                     action='store_true',
-    #                     help='Split GLM event polygons when gridding')
+                        help=("subdivide the grid this many times along "
+                              "each dimension"))
+    parser.add_argument('--goes_position', default='none',
+                        action='store', dest='goes_position',
+                        help=("One of [east|west|test]. "
+                              "Also requires goes_sector."))
+    parser.add_argument('--goes_sector', default='none',
+                        action='store', dest='goes_sector',
+                        help=("One of [full|conus|meso]. "
+                              "Also requires goes_position. If sector is "
+                              "meso, ctr_lon and ctr_lat are interpreted as "
+                              "the ctr_x and ctr_y of the fixed grid. "
+                              "Omit if you are creating a fully custom grid "
+                              "with --width and --height arguments."))
+    parser.add_argument('--corner_points', metavar='filename.pickle',
+                        action='store', dest='corner_points',
+                        help=("name of file containing a pickled "
+                              "corner point lookup table"))
+    parser.add_argument('--split_events', dest='split_events',
+                        action='store_true',
+                        help='Split GLM event polygons when gridding')
     parser.add_argument('--ellipse', dest='ellipse_rev', default=-1,
-                        type=int,
+                        action='store', type=int,
                         help='Lightning ellipse revision. -1 (default)=infer'
                              ' from date in each GLM file, 0=value at launch,'
                              ' 1=late 2018 revision')
-    parser.add_argument('--float-output', dest='output_scale_and_offset',
+    parser.add_argument('--float_output', dest='output_scale_and_offset',
+                        default=True,
                         action='store_false',
                         help='write all output variables as floating point')
-    parser.add_argument(dest='filenames', metavar='filename', nargs='+')
+    parser.add_argument('--lma', dest='is_lma',
+                        action='store_true',
+                        help='grid LMA h5 files instead of GLM data')
+    # parser.add_argument('-v', dest='verbose', action='store_true',
+                        # help='verbose mode')
     return parser
 
+##### END PARSING #####
+
+import numpy as np
+import subprocess, glob
+from datetime import datetime, timedelta
+import os
+from functools import partial
+
+import logging
+class MyFormatter(logging.Formatter):
+    """ Custom class to allow logging of microseconds"""
+    converter=datetime.fromtimestamp
+    def formatTime(self, record, datefmt=None):
+        ct = self.converter(record.created)
+        if datefmt:
+            s = ct.strftime(datefmt)
+        else:
+            t = ct.strftime("%Y-%m-%d %H:%M:%S")
+            s = "%s,%03d" % (t, record.msecs)
+        return s
+logoutfile = logging.FileHandler("make_GLM_grid.log")
+formatter = MyFormatter(fmt='%(levelname)s %(asctime)s %(message)s',
+                        datefmt='%Y-%m-%dT%H:%M:%S.%f')
+logoutfile.setFormatter(formatter)
+logging.basicConfig(handlers = [logoutfile],
+                    level=logging.DEBUG)
+
+# Separate from log setup - actually log soemthign specific to this module.
+log = logging.getLogger(__name__)
+log.info("Starting GLM Gridding")
 
 def nearest_resolution(args):
     """ Uses args.dx to find the closest resolution specified by the
@@ -104,47 +148,14 @@ def nearest_resolution(args):
     resln = '{0:4.1f}km'.format(closest_resln).replace(' ', '')
     return resln
 
-
-# if provided "auto" position, we determine the sensor from the filename
-def get_goes_position(filenames):
-    if all("_G16_" in f for f in filenames):
-        return "east"
-    if all("_G17_" in f for f in filenames):
-        return "west"
-
-    # we require that all files are from the same sensor and raise an exception if not
-    raise ValueError("position 'auto' but could not determine position - did you provide a mix of satellites?")
-
-
-def get_start_end(filenames, start_time=None, end_time=None):
-    """Compute start and end time of data based on filenames."""
-    base_filenames = [os.path.basename(p) for p in filenames]
-    try:
-        filename_infos = [parse_glm_filename(f) for f in base_filenames]
-        # opsenv, algorithm, platform, start, end, created = parse_glm_filename(f)
-        filename_starts = [info[3] for info in filename_infos]
-        filename_ends = [info[4] for info in filename_infos]
-    except ValueError:
-        filename_starts = None
-        filename_ends = None
-
-    if args.start is not None:
-        start_time = datetime.strptime(args.start, '%Y-%m-%dT%H:%M:%S')
-    elif filename_starts is not None:
-        start_time = min(filename_starts)
-
-    if args.end is not None:
-        end_time = datetime.strptime(args.end, '%Y-%m-%dT%H:%M:%S')
-    elif filename_ends is not None:
-        end_time = max(filename_ends)
-
-    if start_time is None or end_time is None:
-        raise ValueError("Could not determine start/end time")
-
-    return start_time, end_time
-
-
 def grid_setup(args):
+    from lmatools.grid.make_grids import write_cf_netcdf_latlon, write_cf_netcdf_noproj, write_cf_netcdf_fixedgrid
+    from lmatools.grid.make_grids import dlonlat_at_grid_center, grid_h5flashfiles
+    from glmtools.grid.make_grids import grid_GLM_flashes
+    from glmtools.io.glm import parse_glm_filename
+    from lmatools.io.LMA_h5_file import parse_lma_h5_filename
+    from lmatools.grid.fixed import get_GOESR_grid, get_GOESR_coordsys
+
     # When passed None for the minimum event or group counts, the gridder will skip
     # the check, saving a bit of time.
     min_events = int(args.min_events)
@@ -154,120 +165,168 @@ def grid_setup(args):
     if min_groups <= 1:
         min_groups = None
 
+    if args.is_lma:
+        filename_parser = parse_lma_h5_filename
+        start_idx = 0
+        end_idx = 1
+    else:
+        filename_parser = parse_glm_filename
+        start_idx = 3
+        end_idx = 4
+
+    glm_filenames = args.filenames
+    base_filenames = [os.path.basename(p) for p in glm_filenames]
     try:
-        start_time, end_time = get_start_end(args.filenames, args.start, args.end)
+        filename_infos = [filename_parser(f) for f in base_filenames]
+        # opsenv, algorithm, platform, start, end, created = parse_glm_filename(f)
+        filename_starts = [info[start_idx] for info in filename_infos]
+        filename_ends = [info[end_idx] for info in filename_infos]
     except ValueError:
-        log.error("Non-standard filenames provided, use --start and --end to specify data times.")
-        raise
-
-    date = datetime(start_time.year, start_time.month, start_time.day)
-    os.makedirs(args.output_dir, exist_ok=True)
-    output = os.path.join(args.output_dir, "{dataset_name}") # GLMTools expects a template in addition to the path
-    proj_name = 'geos'
+        log.error("One or more GLM files has a non-standard filename.")
+        log.error("Assuming that --start and --end have been passed directly.")
 
-    if args.goes_position == "auto":
-        # auto-create the goes-position from the input filename
-        args.goes_position = get_goes_position(args.filenames)
+    from glmtools.io.glm import parse_glm_filename
+    if args.start is not None:
+        start_time = datetime.strptime(args.start[:19], '%Y-%m-%dT%H:%M:%S')
+    else:
+        start_time = min(filename_starts)
+    if args.end is not None:
+        end_time = datetime.strptime(args.end[:19], '%Y-%m-%dT%H:%M:%S')
+    else:
+        # Used to use max(filename_ends), but on 27 Oct 2020, the filename
+        # ends started to report the time of the last event in the file,
+        # causing a slight leakage (usually less than a second) into the
+        # next minute. This caused two minutes of grids to be produced for every
+        # three twenty second files passed to this script.
+        # Instead, we now assume every LCFA file is 20 s long, beginning with
+        # the start time. No doubt in the future we will see filenames that no
+        # longer start on an even minute boundary.
+        end_time = max(filename_starts) + timedelta(0, 20)
 
-    if args.goes_position is not None and args.goes_sector is not None:
-        resln = nearest_resolution(args)
-        view = get_GOESR_grid(position=args.goes_position,
-                              view=args.goes_sector,
-                              resolution=resln)
-        nadir_lon = view['nadir_lon']
-        dx = dy = view['resolution']
-        nx, ny = view['pixelsEW'], view['pixelsNS']
-        geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon)
+    date = datetime(start_time.year, start_time.month, start_time.day)
 
-        if 'centerEW' in view:
-            x_ctr, y_ctr = view['centerEW'], view['centerNS']
-        elif args.goes_sector == 'meso':
-            # use ctr_lon, ctr_lat to get the center of the mesoscale FOV
+    outpath = args.outdir
+
+    if args.fixed_grid:
+        proj_name = 'geos'
+
+        if (args.goes_position != 'none') & (args.goes_sector != 'none'):
+            resln = nearest_resolution(args)
+            view = get_GOESR_grid(position=args.goes_position,
+                                  view=args.goes_sector,
+                                  resolution=resln)
+            nadir_lon = view['nadir_lon']
+            dx = dy = view['resolution']
+            nx, ny = view['pixelsEW'], view['pixelsNS']
+            geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon)
+
+            if 'centerEW' in view:
+                x_ctr, y_ctr = view['centerEW'], view['centerNS']
+            elif args.goes_sector == 'meso':
+                # use ctr_lon, ctr_lat to get the center of the mesoscale FOV
+                x_ctr, y_ctr, z_ctr = geofixcs.fromECEF(
+                    *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0))
+        elif (args.goes_position != 'none') & (args.goes_sector == 'none'):
+            # Requires goes_position, a center, and a width. Fully flexible
+            # in resolution, i.e., doesn't slave it to one of the GOES-R specs
+            view = get_GOESR_grid(position=args.goes_position,
+                                  view='full',
+                                  resolution='1.0km')
+            nadir_lon = view['nadir_lon']
+            dx1km = dy1km = view['resolution']
+            geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon)
             x_ctr, y_ctr, z_ctr = geofixcs.fromECEF(
-                *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0))
-    elif args.goes_position is not None and args.goes_sector is None:
-        # Requires goes_position, a center, and a width. Fully flexible
-        # in resolution, i.e., doesn't slave it to one of the GOES-R specs
-        view = get_GOESR_grid(position=args.goes_position,
-                              view='full',
-                              resolution='1.0km')
-        nadir_lon = view['nadir_lon']
-        dx1km = dy1km = view['resolution']
+              *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0))
+
+            # Convert the specified resolution in km given by args.dx to
+            # a delta in fixed grid coordinates using the 1 km delta from the
+            # GOES-R PUG.
+            dx, dy = args.dx * dx1km, args.dy * dy1km
+            nx, ny = int(args.width/args.dx), int(args.height/args.dy)
+        else:
+            raise ValueError("Gridding on the fixed grid requires "
+                "goes_position and dx. For goes_sector='meso', also specify "
+                "ctr_lon and ctr_lat. Without goes_sector, also include width "
+                "and height.")
+        # Need to use +1 here to convert to xedge, yedge expected by gridder
+        # instead of the pixel centroids that will result in the final image
+        nx += 1
+        ny += 1
+        x_bnd = (np.arange(nx, dtype='float') - (nx)/2.0)*dx + x_ctr + 0.5*dx
+        y_bnd = (np.arange(ny, dtype='float') - (ny)/2.0)*dy + y_ctr + 0.5*dy
+        log.debug(("initial x,y_ctr", x_ctr, y_ctr))
+        log.debug(("initial x,y_bnd", x_bnd.shape, y_bnd.shape))
+        x_bnd = np.asarray([x_bnd.min(), x_bnd.max()])
+        y_bnd = np.asarray([y_bnd.min(), y_bnd.max()])
+
         geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon)
-        x_ctr, y_ctr, z_ctr = geofixcs.fromECEF(
-            *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0))
+        ctr_lon, ctr_lat, ctr_alt = grs80lla.fromECEF(
+            *geofixcs.toECEF(x_ctr, y_ctr, 0.0))
+        fixed_grid = geofixcs
+        log.debug((x_bnd, y_bnd, dx, dy, nx, ny))
 
-        # Convert the specified resolution in km given by args.dx to
-        # a delta in fixed grid coordinates using the 1 km delta from the
-        # GOES-R PUG.
-        dx, dy = args.dx * dx1km, args.dy * dy1km
-        nx, ny = int(args.width / args.dx), int(args.height / args.dy)
+        output_writer = partial(write_cf_netcdf_fixedgrid, nadir_lon=nadir_lon)
     else:
-        raise ValueError("Gridding on the fixed grid requires "
-                         "goes_position and dx. For goes_sector='meso', also specify "
-                         "ctr_lon and ctr_lat. Without goes_sector, also include width "
-                         "and height.")
-    # Need to use +1 here to convert to xedge, yedge expected by gridder
-    # instead of the pixel centroids that will result in the final image
-    nx += 1
-    ny += 1
-    x_bnd = (np.arange(nx, dtype='float') - (nx) / 2.0) * dx + x_ctr + 0.5 * dx
-    y_bnd = (np.arange(ny, dtype='float') - (ny) / 2.0) * dy + y_ctr + 0.5 * dy
-    log.debug(("initial x,y_ctr", x_ctr, y_ctr))
-    log.debug(("initial x,y_bnd", x_bnd.shape, y_bnd.shape))
-    x_bnd = np.asarray([x_bnd.min(), x_bnd.max()])
-    y_bnd = np.asarray([y_bnd.min(), y_bnd.max()])
-
-    geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon)
-    ctr_lon, ctr_lat, ctr_alt = grs80lla.fromECEF(
-        *geofixcs.toECEF(x_ctr, y_ctr, 0.0))
-    fixed_grid = geofixcs
-    log.debug((x_bnd, y_bnd, dx, dy, nx, ny))
-
-    output_writer = partial(write_cf_netcdf_fixedgrid, nadir_lon=nadir_lon)
-
-    gridder = grid_GLM_flashes
-    output_filename_prefix = 'GLM'
-    grid_kwargs = dict(proj_name=proj_name,
-                       base_date=date, do_3d=False,
-                       dx=dx, dy=dy, frame_interval=float(args.dt),
-                       x_bnd=x_bnd, y_bnd=y_bnd,
-                       ctr_lat=ctr_lat, ctr_lon=ctr_lon, outpath=output,
-                       min_points_per_flash=min_events,
-                       output_writer=output_writer, subdivide=args.subdivide_grid,
-                       output_filename_prefix=output_filename_prefix,
-                       output_kwargs={'scale_and_offset': args.output_scale_and_offset},
-                       spatial_scale_factor=1.0)
-
-    #if args.fixed_grid:
-    #    grid_kwargs['fixed_grid'] = True
-    #    grid_kwargs['nadir_lon'] = nadir_lon
-    # if args.split_events:
-    grid_kwargs['clip_events'] = True
+        # Default
+        proj_name='latlong'
+        output_writer = write_cf_netcdf_latlon
+        ctr_lat = float(args.ctr_lat)
+        ctr_lon = float(args.ctr_lon)
+        dx_km=float(args.dx)*1.0e3
+        dy_km=float(args.dy)*1.0e3
+        width, height = 1000.0*float(args.width), 1000.0*float(args.height)
+        x_bnd_km = (-width/2.0, width/2.0)
+        y_bnd_km = (-height/2.0, height/2.0)
+        dx, dy, x_bnd, y_bnd = dlonlat_at_grid_center(ctr_lat, ctr_lon,
+                                    dx=dx_km, dy=dy_km,
+                                    x_bnd = x_bnd_km, y_bnd = y_bnd_km )
+
+    # tuples of the corners
+    corners = np.vstack([(x_bnd[0], y_bnd[0]), (x_bnd[0], y_bnd[1]),
+                         (x_bnd[1], y_bnd[1]), (x_bnd[1], y_bnd[0])])
+    # print(x_bnd, y_bnd)
+
+    if args.is_lma:
+        gridder = grid_h5flashfiles
+        output_filename_prefix='LMA'
+    else:
+        gridder = grid_GLM_flashes
+        output_filename_prefix='GLM'
+
+    grid_kwargs=dict(proj_name=proj_name,
+            base_date = date, do_3d=False,
+            dx=dx, dy=dy, frame_interval=float(args.dt),
+            x_bnd=x_bnd, y_bnd=y_bnd,
+            ctr_lat=ctr_lat, ctr_lon=ctr_lon, outpath = outpath,
+            min_points_per_flash = min_events,
+            output_writer = output_writer, subdivide=args.subdivide_grid,
+            output_filename_prefix=output_filename_prefix,
+            output_kwargs={'scale_and_offset':args.output_scale_and_offset},
+            spatial_scale_factor=1.0)
+
+    if args.fixed_grid:
+        grid_kwargs['fixed_grid'] = True
+        grid_kwargs['nadir_lon'] = nadir_lon
+    if args.split_events:
+        grid_kwargs['clip_events'] = True
     if min_groups is not None:
         grid_kwargs['min_groups_per_flash'] = min_groups
-    grid_kwargs['energy_grids'] = ('total_energy',)
-    if (proj_name == 'pixel_grid') or (proj_name == 'geos'):
+    if args.is_lma:
+        grid_kwargs['energy_grids'] = True
+    else:
+        grid_kwargs['energy_grids'] = ('total_energy',)
+    if (proj_name=='pixel_grid') or (proj_name=='geos'):
         grid_kwargs['pixel_coords'] = fixed_grid
     grid_kwargs['ellipse_rev'] = args.ellipse_rev
-    return gridder, args.filenames, start_time, end_time, grid_kwargs
-
+    # if args.corner_points:
+        # grid_kwargs['corner_pickle'] = args.corner_points
+    return gridder, glm_filenames, start_time, end_time, grid_kwargs
 
 if __name__ == '__main__':
-    import sys
     parser = create_parser()
     args = parser.parse_args()
 
-    # Configure logging
-    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
-    logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn)
-    if levels[min(3, args.verbosity)] > logging.DEBUG:
-        import warnings
-        warnings.filterwarnings("ignore")
-    log.info("Starting GLM Gridding")
-    log.debug("Starting script with: %s", sys.argv)
-
     from multiprocessing import freeze_support
     freeze_support()
     gridder, glm_filenames, start_time, end_time, grid_kwargs = grid_setup(args)
-    gridder(glm_filenames, start_time, end_time, **grid_kwargs)
+    gridder(glm_filenames, start_time, end_time, **grid_kwargs)
\ No newline at end of file