diff --git a/buildbucket/buildbucket_environment.yaml b/buildbucket/buildbucket_environment.yaml index d2231fa7952630a315ba9ed523350ef306b9d5a7..98e0204eff24603a30f310372655babb72f39b3d 100644 --- a/buildbucket/buildbucket_environment.yaml +++ b/buildbucket/buildbucket_environment.yaml @@ -27,4 +27,4 @@ dependencies: - git+https://github.com/deeplycloudy/lmatools.git@minvaluegrids - git+https://github.com/deeplycloudy/stormdrain.git - git+https://github.com/deeplycloudy/glmtools.git@master - - git+https://github.com/pytroll/satpy.git@master \ No newline at end of file + - git+https://github.com/pytroll/satpy.git@7ae7b274a01e5e170bd2cf0f18ccf53b0bd33ada \ No newline at end of file diff --git a/gridded_glm/bin/cspp-geo-gglm-minute-gridder.sh b/gridded_glm/bin/cspp-geo-gglm.sh similarity index 100% rename from gridded_glm/bin/cspp-geo-gglm-minute-gridder.sh rename to gridded_glm/bin/cspp-geo-gglm.sh diff --git a/gridded_glm/libexec/_glm_trio_picker.py b/gridded_glm/libexec/_glm_trio_picker.py index 01fcde73cc21b5479755c5d9adeb20c83911d328..376132a6d2abf8409fe5eee12f79a7f97f5916ab 100644 --- a/gridded_glm/libexec/_glm_trio_picker.py +++ b/gridded_glm/libexec/_glm_trio_picker.py @@ -39,13 +39,12 @@ if __name__ == '__main__': # and run gridded glm a'la: # minute_gridder.sh \ - # --goes-position auto --goes-sector full \ + # --goes-sector full \ # --create-tiles \ # ./2020-11-05/CLASS/OR_GLM-L2-LCFA_G16_s20203101529000_e20203101529205_c20203101529215.nc \ # ./2020-11-05/CLASS/OR_GLM-L2-LCFA_G16_s20203101529200_e20203101529405_c20203101529430.nc \ # ./2020-11-05/CLASS/OR_GLM-L2-LCFA_G16_s20203101529400_e20203101530004_c20203101530021.nc - subprocess.run(["cspp-geo-gglm-minute-gridder.sh", - "--goes-position", "auto", + subprocess.run(["cspp-geo-gglm.sh", "--goes-sector", "full", "--create-tiles", fileglob[0], fileglob[1], fileglob[2]] diff --git a/gridded_glm/libexec/_make_glm_grids.py b/gridded_glm/libexec/_make_glm_grids.py index 7103cc7ad31e730ebfe72ba8e507eba9339ab437..1dac45be5d16ddc1990cfae05cd6250a3a27711f 100644 --- a/gridded_glm/libexec/_make_glm_grids.py +++ b/gridded_glm/libexec/_make_glm_grids.py @@ -1,9 +1,8 @@ #!/usr/bin/env python3 -# Based on https://github.com/deeplycloudy/glmtools/blob/master/examples/grid/make_GLM_grids.py - -parse_desc = """Grid GLM flash data. - -The start and end times can be specified +# This example was provided by GLMTools +# https://github.com/deeplycloudy/glmtools/blob/master/examples/grid/make_GLM_grids.py +import argparse +parse_desc = """Grid GLM flash data. The start and end times can be specified independently, or if not provided they will be inferred from the filenames. Grid spacing is regular in latitude and longitude with the grid box @@ -14,84 +13,129 @@ standard GOES imagery naming convention. This behavior can be fully controlled by adjusting the -o argument. """ -import numpy as np -from datetime import datetime -import os -from functools import partial -from lmatools.grid.make_grids import write_cf_netcdf_latlon, write_cf_netcdf_noproj, write_cf_netcdf_fixedgrid -from lmatools.grid.make_grids import dlonlat_at_grid_center, grid_h5flashfiles -from glmtools.grid.make_grids import grid_GLM_flashes -from glmtools.io.glm import parse_glm_filename -from lmatools.grid.fixed import get_GOESR_grid, get_GOESR_coordsys - -import logging - -log = logging.getLogger(__name__) +output_help = """Specify the output path and filename using a configurable path +template. -o ./{dataset_name} (the default) will generate files in the current +directory using the standard GOES imagery naming convention, including a .nc +extension. Any intermediate directories will be created as needed. All allowed +names in the template are listed in the docs for +glmtools.io.imagery.write_goes_imagery. For example: this script can be used to +process multiple days and that are written to a standardized directory +structure by specifying a path like so: -o +{start_time:%%Y/%%b/%%d}/{dataset_name}""" def create_parser(): - import argparse parser = argparse.ArgumentParser(description=parse_desc) - parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, - help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)') - parser.add_argument('-l', '--log', dest="log_fn", default=None, - help="specify the log filename") - parser.add_argument('-o', '--output-dir', metavar='output directory', - default=os.getcwd()) - parser.add_argument('--ctr-lat', metavar='latitude', - type=float, help='center latitude') - parser.add_argument('--ctr-lon', metavar='longitude', - type=float, help='center longitude') + parser.add_argument(dest='filenames',metavar='filename', nargs='*') + parser.add_argument('-o', '--output_path', + metavar='filename template including path', + required=False, dest='outdir', action='store', + default='./{dataset_name}', help=output_help) + parser.add_argument('--ctr_lat', metavar='latitude', required=False, + dest='ctr_lat', action='store', type=float, + help='center latitude') + parser.add_argument('--ctr_lon', metavar='longitude', required=False, + dest='ctr_lon', action='store', type=float, + help='center longitude') parser.add_argument('--start', metavar='yyyy-mm-ddThh:mm:ss', + dest='start', action='store', help='UTC start time, e.g., 2017-07-04T08:00:00') parser.add_argument('--end', metavar='yyyy-mm-ddThh:mm:ss', + dest='end', action='store', help='UTC end time, e.g., 2017-07-04T09:00:00') parser.add_argument('--dx', metavar='km', - default=10.0, type=float, + dest='dx', action='store', default=10.0, type=float, help='approximate east-west grid spacing') parser.add_argument('--dy', metavar='km', - default=10.0, type=float, + dest='dy', action='store', default=10.0, type=float, help='approximate north-south grid spacing') parser.add_argument('--dt', metavar='seconds', - default=60.0, type=float, + dest='dt', action='store', default=60.0, type=float, help='frame duration') parser.add_argument('--width', metavar='distance in km', - default=400.0, + dest='width', action='store', default=400.0, type=float, help='total width of the grid') parser.add_argument('--height', metavar='distance in km', - default=400.0, + dest='height', action='store', default=400.0, type=float, help='total height of the grid') parser.add_argument('--nevents', metavar='minimum events per flash', - type=int, dest='min_events', default=1, + type=int, dest='min_events', action='store', default=1, help='minimum number of events per flash') parser.add_argument('--ngroups', metavar='minimum groups per flash', - type=int, dest='min_groups', default=1, + type=int, dest='min_groups', action='store', default=1, help='minimum number of groups per flash') - parser.add_argument('--subdivide-grid', metavar='sqrt(number of subgrids)', + parser.add_argument('--fixed_grid', + action='store_true', dest='fixed_grid', + help='grid to the geostationary fixed grid') + parser.add_argument('--subdivide_grid', metavar='sqrt(number of subgrids)', + action='store', dest='subdivide_grid', type=int, default=1, - help="subdivide the grid this many times along " - "each dimension") - parser.add_argument('--goes-position', - help="One of [east|west|test|auto]. " - "Requires '--goes-sector'.") - parser.add_argument('--goes-sector', - help="One of [full|conus|meso]. " - "Requires goes_position. If sector is " - "meso, ctr_lon and ctr_lat are interpreted as " - "the ctr_x and ctr_y of the fixed grid") - # parser.add_argument('--split-events', - # action='store_true', - # help='Split GLM event polygons when gridding') + help=("subdivide the grid this many times along " + "each dimension")) + parser.add_argument('--goes_position', default='none', + action='store', dest='goes_position', + help=("One of [east|west|test]. " + "Also requires goes_sector.")) + parser.add_argument('--goes_sector', default='none', + action='store', dest='goes_sector', + help=("One of [full|conus|meso]. " + "Also requires goes_position. If sector is " + "meso, ctr_lon and ctr_lat are interpreted as " + "the ctr_x and ctr_y of the fixed grid. " + "Omit if you are creating a fully custom grid " + "with --width and --height arguments.")) + parser.add_argument('--corner_points', metavar='filename.pickle', + action='store', dest='corner_points', + help=("name of file containing a pickled " + "corner point lookup table")) + parser.add_argument('--split_events', dest='split_events', + action='store_true', + help='Split GLM event polygons when gridding') parser.add_argument('--ellipse', dest='ellipse_rev', default=-1, - type=int, + action='store', type=int, help='Lightning ellipse revision. -1 (default)=infer' ' from date in each GLM file, 0=value at launch,' ' 1=late 2018 revision') - parser.add_argument('--float-output', dest='output_scale_and_offset', + parser.add_argument('--float_output', dest='output_scale_and_offset', + default=True, action='store_false', help='write all output variables as floating point') - parser.add_argument(dest='filenames', metavar='filename', nargs='+') + parser.add_argument('--lma', dest='is_lma', + action='store_true', + help='grid LMA h5 files instead of GLM data') + # parser.add_argument('-v', dest='verbose', action='store_true', + # help='verbose mode') return parser +##### END PARSING ##### + +import numpy as np +import subprocess, glob +from datetime import datetime, timedelta +import os +from functools import partial + +import logging +class MyFormatter(logging.Formatter): + """ Custom class to allow logging of microseconds""" + converter=datetime.fromtimestamp + def formatTime(self, record, datefmt=None): + ct = self.converter(record.created) + if datefmt: + s = ct.strftime(datefmt) + else: + t = ct.strftime("%Y-%m-%d %H:%M:%S") + s = "%s,%03d" % (t, record.msecs) + return s +logoutfile = logging.FileHandler("make_GLM_grid.log") +formatter = MyFormatter(fmt='%(levelname)s %(asctime)s %(message)s', + datefmt='%Y-%m-%dT%H:%M:%S.%f') +logoutfile.setFormatter(formatter) +logging.basicConfig(handlers = [logoutfile], + level=logging.DEBUG) + +# Separate from log setup - actually log soemthign specific to this module. +log = logging.getLogger(__name__) +log.info("Starting GLM Gridding") def nearest_resolution(args): """ Uses args.dx to find the closest resolution specified by the @@ -104,47 +148,14 @@ def nearest_resolution(args): resln = '{0:4.1f}km'.format(closest_resln).replace(' ', '') return resln - -# if provided "auto" position, we determine the sensor from the filename -def get_goes_position(filenames): - if all("_G16_" in f for f in filenames): - return "east" - if all("_G17_" in f for f in filenames): - return "west" - - # we require that all files are from the same sensor and raise an exception if not - raise ValueError("position 'auto' but could not determine position - did you provide a mix of satellites?") - - -def get_start_end(filenames, start_time=None, end_time=None): - """Compute start and end time of data based on filenames.""" - base_filenames = [os.path.basename(p) for p in filenames] - try: - filename_infos = [parse_glm_filename(f) for f in base_filenames] - # opsenv, algorithm, platform, start, end, created = parse_glm_filename(f) - filename_starts = [info[3] for info in filename_infos] - filename_ends = [info[4] for info in filename_infos] - except ValueError: - filename_starts = None - filename_ends = None - - if args.start is not None: - start_time = datetime.strptime(args.start, '%Y-%m-%dT%H:%M:%S') - elif filename_starts is not None: - start_time = min(filename_starts) - - if args.end is not None: - end_time = datetime.strptime(args.end, '%Y-%m-%dT%H:%M:%S') - elif filename_ends is not None: - end_time = max(filename_ends) - - if start_time is None or end_time is None: - raise ValueError("Could not determine start/end time") - - return start_time, end_time - - def grid_setup(args): + from lmatools.grid.make_grids import write_cf_netcdf_latlon, write_cf_netcdf_noproj, write_cf_netcdf_fixedgrid + from lmatools.grid.make_grids import dlonlat_at_grid_center, grid_h5flashfiles + from glmtools.grid.make_grids import grid_GLM_flashes + from glmtools.io.glm import parse_glm_filename + from lmatools.io.LMA_h5_file import parse_lma_h5_filename + from lmatools.grid.fixed import get_GOESR_grid, get_GOESR_coordsys + # When passed None for the minimum event or group counts, the gridder will skip # the check, saving a bit of time. min_events = int(args.min_events) @@ -154,120 +165,168 @@ def grid_setup(args): if min_groups <= 1: min_groups = None + if args.is_lma: + filename_parser = parse_lma_h5_filename + start_idx = 0 + end_idx = 1 + else: + filename_parser = parse_glm_filename + start_idx = 3 + end_idx = 4 + + glm_filenames = args.filenames + base_filenames = [os.path.basename(p) for p in glm_filenames] try: - start_time, end_time = get_start_end(args.filenames, args.start, args.end) + filename_infos = [filename_parser(f) for f in base_filenames] + # opsenv, algorithm, platform, start, end, created = parse_glm_filename(f) + filename_starts = [info[start_idx] for info in filename_infos] + filename_ends = [info[end_idx] for info in filename_infos] except ValueError: - log.error("Non-standard filenames provided, use --start and --end to specify data times.") - raise - - date = datetime(start_time.year, start_time.month, start_time.day) - os.makedirs(args.output_dir, exist_ok=True) - output = os.path.join(args.output_dir, "{dataset_name}") # GLMTools expects a template in addition to the path - proj_name = 'geos' + log.error("One or more GLM files has a non-standard filename.") + log.error("Assuming that --start and --end have been passed directly.") - if args.goes_position == "auto": - # auto-create the goes-position from the input filename - args.goes_position = get_goes_position(args.filenames) + from glmtools.io.glm import parse_glm_filename + if args.start is not None: + start_time = datetime.strptime(args.start[:19], '%Y-%m-%dT%H:%M:%S') + else: + start_time = min(filename_starts) + if args.end is not None: + end_time = datetime.strptime(args.end[:19], '%Y-%m-%dT%H:%M:%S') + else: + # Used to use max(filename_ends), but on 27 Oct 2020, the filename + # ends started to report the time of the last event in the file, + # causing a slight leakage (usually less than a second) into the + # next minute. This caused two minutes of grids to be produced for every + # three twenty second files passed to this script. + # Instead, we now assume every LCFA file is 20 s long, beginning with + # the start time. No doubt in the future we will see filenames that no + # longer start on an even minute boundary. + end_time = max(filename_starts) + timedelta(0, 20) - if args.goes_position is not None and args.goes_sector is not None: - resln = nearest_resolution(args) - view = get_GOESR_grid(position=args.goes_position, - view=args.goes_sector, - resolution=resln) - nadir_lon = view['nadir_lon'] - dx = dy = view['resolution'] - nx, ny = view['pixelsEW'], view['pixelsNS'] - geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon) + date = datetime(start_time.year, start_time.month, start_time.day) - if 'centerEW' in view: - x_ctr, y_ctr = view['centerEW'], view['centerNS'] - elif args.goes_sector == 'meso': - # use ctr_lon, ctr_lat to get the center of the mesoscale FOV + outpath = args.outdir + + if args.fixed_grid: + proj_name = 'geos' + + if (args.goes_position != 'none') & (args.goes_sector != 'none'): + resln = nearest_resolution(args) + view = get_GOESR_grid(position=args.goes_position, + view=args.goes_sector, + resolution=resln) + nadir_lon = view['nadir_lon'] + dx = dy = view['resolution'] + nx, ny = view['pixelsEW'], view['pixelsNS'] + geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon) + + if 'centerEW' in view: + x_ctr, y_ctr = view['centerEW'], view['centerNS'] + elif args.goes_sector == 'meso': + # use ctr_lon, ctr_lat to get the center of the mesoscale FOV + x_ctr, y_ctr, z_ctr = geofixcs.fromECEF( + *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0)) + elif (args.goes_position != 'none') & (args.goes_sector == 'none'): + # Requires goes_position, a center, and a width. Fully flexible + # in resolution, i.e., doesn't slave it to one of the GOES-R specs + view = get_GOESR_grid(position=args.goes_position, + view='full', + resolution='1.0km') + nadir_lon = view['nadir_lon'] + dx1km = dy1km = view['resolution'] + geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon) x_ctr, y_ctr, z_ctr = geofixcs.fromECEF( - *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0)) - elif args.goes_position is not None and args.goes_sector is None: - # Requires goes_position, a center, and a width. Fully flexible - # in resolution, i.e., doesn't slave it to one of the GOES-R specs - view = get_GOESR_grid(position=args.goes_position, - view='full', - resolution='1.0km') - nadir_lon = view['nadir_lon'] - dx1km = dy1km = view['resolution'] + *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0)) + + # Convert the specified resolution in km given by args.dx to + # a delta in fixed grid coordinates using the 1 km delta from the + # GOES-R PUG. + dx, dy = args.dx * dx1km, args.dy * dy1km + nx, ny = int(args.width/args.dx), int(args.height/args.dy) + else: + raise ValueError("Gridding on the fixed grid requires " + "goes_position and dx. For goes_sector='meso', also specify " + "ctr_lon and ctr_lat. Without goes_sector, also include width " + "and height.") + # Need to use +1 here to convert to xedge, yedge expected by gridder + # instead of the pixel centroids that will result in the final image + nx += 1 + ny += 1 + x_bnd = (np.arange(nx, dtype='float') - (nx)/2.0)*dx + x_ctr + 0.5*dx + y_bnd = (np.arange(ny, dtype='float') - (ny)/2.0)*dy + y_ctr + 0.5*dy + log.debug(("initial x,y_ctr", x_ctr, y_ctr)) + log.debug(("initial x,y_bnd", x_bnd.shape, y_bnd.shape)) + x_bnd = np.asarray([x_bnd.min(), x_bnd.max()]) + y_bnd = np.asarray([y_bnd.min(), y_bnd.max()]) + geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon) - x_ctr, y_ctr, z_ctr = geofixcs.fromECEF( - *grs80lla.toECEF(args.ctr_lon, args.ctr_lat, 0.0)) + ctr_lon, ctr_lat, ctr_alt = grs80lla.fromECEF( + *geofixcs.toECEF(x_ctr, y_ctr, 0.0)) + fixed_grid = geofixcs + log.debug((x_bnd, y_bnd, dx, dy, nx, ny)) - # Convert the specified resolution in km given by args.dx to - # a delta in fixed grid coordinates using the 1 km delta from the - # GOES-R PUG. - dx, dy = args.dx * dx1km, args.dy * dy1km - nx, ny = int(args.width / args.dx), int(args.height / args.dy) + output_writer = partial(write_cf_netcdf_fixedgrid, nadir_lon=nadir_lon) else: - raise ValueError("Gridding on the fixed grid requires " - "goes_position and dx. For goes_sector='meso', also specify " - "ctr_lon and ctr_lat. Without goes_sector, also include width " - "and height.") - # Need to use +1 here to convert to xedge, yedge expected by gridder - # instead of the pixel centroids that will result in the final image - nx += 1 - ny += 1 - x_bnd = (np.arange(nx, dtype='float') - (nx) / 2.0) * dx + x_ctr + 0.5 * dx - y_bnd = (np.arange(ny, dtype='float') - (ny) / 2.0) * dy + y_ctr + 0.5 * dy - log.debug(("initial x,y_ctr", x_ctr, y_ctr)) - log.debug(("initial x,y_bnd", x_bnd.shape, y_bnd.shape)) - x_bnd = np.asarray([x_bnd.min(), x_bnd.max()]) - y_bnd = np.asarray([y_bnd.min(), y_bnd.max()]) - - geofixcs, grs80lla = get_GOESR_coordsys(sat_lon_nadir=nadir_lon) - ctr_lon, ctr_lat, ctr_alt = grs80lla.fromECEF( - *geofixcs.toECEF(x_ctr, y_ctr, 0.0)) - fixed_grid = geofixcs - log.debug((x_bnd, y_bnd, dx, dy, nx, ny)) - - output_writer = partial(write_cf_netcdf_fixedgrid, nadir_lon=nadir_lon) - - gridder = grid_GLM_flashes - output_filename_prefix = 'GLM' - grid_kwargs = dict(proj_name=proj_name, - base_date=date, do_3d=False, - dx=dx, dy=dy, frame_interval=float(args.dt), - x_bnd=x_bnd, y_bnd=y_bnd, - ctr_lat=ctr_lat, ctr_lon=ctr_lon, outpath=output, - min_points_per_flash=min_events, - output_writer=output_writer, subdivide=args.subdivide_grid, - output_filename_prefix=output_filename_prefix, - output_kwargs={'scale_and_offset': args.output_scale_and_offset}, - spatial_scale_factor=1.0) - - #if args.fixed_grid: - # grid_kwargs['fixed_grid'] = True - # grid_kwargs['nadir_lon'] = nadir_lon - # if args.split_events: - grid_kwargs['clip_events'] = True + # Default + proj_name='latlong' + output_writer = write_cf_netcdf_latlon + ctr_lat = float(args.ctr_lat) + ctr_lon = float(args.ctr_lon) + dx_km=float(args.dx)*1.0e3 + dy_km=float(args.dy)*1.0e3 + width, height = 1000.0*float(args.width), 1000.0*float(args.height) + x_bnd_km = (-width/2.0, width/2.0) + y_bnd_km = (-height/2.0, height/2.0) + dx, dy, x_bnd, y_bnd = dlonlat_at_grid_center(ctr_lat, ctr_lon, + dx=dx_km, dy=dy_km, + x_bnd = x_bnd_km, y_bnd = y_bnd_km ) + + # tuples of the corners + corners = np.vstack([(x_bnd[0], y_bnd[0]), (x_bnd[0], y_bnd[1]), + (x_bnd[1], y_bnd[1]), (x_bnd[1], y_bnd[0])]) + # print(x_bnd, y_bnd) + + if args.is_lma: + gridder = grid_h5flashfiles + output_filename_prefix='LMA' + else: + gridder = grid_GLM_flashes + output_filename_prefix='GLM' + + grid_kwargs=dict(proj_name=proj_name, + base_date = date, do_3d=False, + dx=dx, dy=dy, frame_interval=float(args.dt), + x_bnd=x_bnd, y_bnd=y_bnd, + ctr_lat=ctr_lat, ctr_lon=ctr_lon, outpath = outpath, + min_points_per_flash = min_events, + output_writer = output_writer, subdivide=args.subdivide_grid, + output_filename_prefix=output_filename_prefix, + output_kwargs={'scale_and_offset':args.output_scale_and_offset}, + spatial_scale_factor=1.0) + + if args.fixed_grid: + grid_kwargs['fixed_grid'] = True + grid_kwargs['nadir_lon'] = nadir_lon + if args.split_events: + grid_kwargs['clip_events'] = True if min_groups is not None: grid_kwargs['min_groups_per_flash'] = min_groups - grid_kwargs['energy_grids'] = ('total_energy',) - if (proj_name == 'pixel_grid') or (proj_name == 'geos'): + if args.is_lma: + grid_kwargs['energy_grids'] = True + else: + grid_kwargs['energy_grids'] = ('total_energy',) + if (proj_name=='pixel_grid') or (proj_name=='geos'): grid_kwargs['pixel_coords'] = fixed_grid grid_kwargs['ellipse_rev'] = args.ellipse_rev - return gridder, args.filenames, start_time, end_time, grid_kwargs - + # if args.corner_points: + # grid_kwargs['corner_pickle'] = args.corner_points + return gridder, glm_filenames, start_time, end_time, grid_kwargs if __name__ == '__main__': - import sys parser = create_parser() args = parser.parse_args() - # Configure logging - levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] - logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn) - if levels[min(3, args.verbosity)] > logging.DEBUG: - import warnings - warnings.filterwarnings("ignore") - log.info("Starting GLM Gridding") - log.debug("Starting script with: %s", sys.argv) - from multiprocessing import freeze_support freeze_support() gridder, glm_filenames, start_time, end_time, grid_kwargs = grid_setup(args) - gridder(glm_filenames, start_time, end_time, **grid_kwargs) + gridder(glm_filenames, start_time, end_time, **grid_kwargs) \ No newline at end of file diff --git a/gridded_glm/libexec/_minute_gridder.py b/gridded_glm/libexec/_minute_gridder.py index 4b4709055d40849f64fd8912291a63c1358f2aab..cac20871b10ace798ba87c7c1e11ee9e48eaa33c 100644 --- a/gridded_glm/libexec/_minute_gridder.py +++ b/gridded_glm/libexec/_minute_gridder.py @@ -1,7 +1,16 @@ #!/usr/bin/env python3 # Based on https://github.com/deeplycloudy/glmtools/blob/master/examples/grid/make_GLM_grids.py -parse_desc = """Grid the past X minutes of GLM flash data, given a single input file. +parse_desc = """Create one minute NetCDF4 grids (and, optionally, AWIPS-compatible tiles) from GLM flash data. + +Example usage:\n + cspp-geo-gglm.sh \\ + --goes-sector conus \\ + --create-tiles \\ + -vv \\ + OR_GLM-L2-LCFA_G17_s20182750032000_e20182750032200_c20182750032225.nc \\ + OR_GLM-L2-LCFA_G17_s20182750032200_e20182750032400_c20182750032426.nc \\ + OR_GLM-L2-LCFA_G17_s20182750032400_e20182750033000_c20182750033025.nc """ import numpy as np @@ -26,24 +35,21 @@ log = logging.getLogger(__name__) def create_parser(): import argparse - parser = argparse.ArgumentParser(description=parse_desc) + parser = argparse.ArgumentParser(description=parse_desc, formatter_class=argparse.RawTextHelpFormatter) # RawTextHelpFormatter preserves our newlines in the example usage message parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, - help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)') + help="each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG\n" + "(default: ERROR)") parser.add_argument('-l', '--log', dest="log_fn", default=None, - help="specify the log filename") - # from Requirements: "Output is Gridded GLM in the native glmtools NetCDF4 format, with a user option to produce AWIPS-compatible NetCDF tiles as described below" - parser.add_argument('-o', '--output-dir', metavar='output directory', - default=os.getcwd()) - parser.add_argument('--goes-sector', default="full", - help="One of [full|conus|meso]. " - "Requires goes_position. If sector is " - "meso, ctr_lon and ctr_lat are interpreted as " - "the ctr_x and ctr_y of the fixed grid") - parser.add_argument('--goes-position', default="auto", - help="One of [east|west|test|auto]. " - "Requires '--goes-sector'.") + help="specify a log filename.\n" + "(default: print to screen).") + parser.add_argument('-o', '--output-dir', metavar='OUTPUT_DIR', + default=os.getcwd(), help="output directory (default: use current directory)") + parser.add_argument('--goes-sector', default="full", choices=['full', 'conus', 'meso'], + help="If sector is meso, ctr_lon and ctr_lat \n" + "are interpreted as the ctr_x and ctr_y of the fixed grid.\n" + "(default: full)") parser.add_argument("-t", "--create-tiles", default=False, action='store_true', - help="create AWIPS-compatible tiles") # FIXME: improve this help text + help="create AWIPS-compatible tiles (default: off)") parser.add_argument('--ctr-lat', metavar='latitude', type=float, help='center latitude (required for meso)') parser.add_argument('--ctr-lon', metavar='longitude', @@ -52,38 +58,6 @@ def create_parser(): parser.add_argument(dest='filenames', metavar='filename', nargs='+') return parser -""" -old arguments for reference - -FIXME: remove this whole comment once everything is working - - parser.add_argument('--dx', metavar='km', - default=10.0, type=float, - help='approximate east-west grid spacing') - parser.add_argument('--dy', metavar='km', - default=10.0, type=float, - help='approximate north-south grid spacing') - parser.add_argument('--dt', metavar='seconds', - default=60.0, type=float, - help='frame duration') - parser.add_argument('--width', metavar='distance in km', - default=400.0, - type=float, help='total width of the grid') - parser.add_argument('--height', metavar='distance in km', - default=400.0, - type=float, help='total height of the grid') - parser.add_argument('--nevents', metavar='minimum events per flash', - type=int, dest='min_events', default=1, - help='minimum number of events per flash') - parser.add_argument('--ngroups', metavar='minimum groups per flash', - type=int, dest='min_groups', default=1, - help='minimum number of groups per flash') - parser.add_argument('--subdivide-grid', metavar='sqrt(number of subgrids)', - type=int, default=1, - help="subdivide the grid this many times along " - "each dimension") -""" - def get_resolution(args): closest_resln = 2.0 # hardcoding resolution to 2.0 for now. see nearest_resolution in make_glm_grids for how we could expose this if we change our minds. @@ -99,7 +73,7 @@ def get_goes_position(filenames): return "west" # we require that all files are from the same sensor and raise an exception if not - raise ValueError("position 'auto' but could not determine position - did you provide a mix of satellites?") + raise ValueError("could not determine GOES position - did you provide a mix of satellites?") def get_start_end(filenames, start_time=None, end_time=None): @@ -145,10 +119,7 @@ def grid_setup(args, work_dir=os.getcwd()): outputpath = os.path.join(work_dir, "{dataset_name}") # GLMTools expects a template in addition to the path - if args.goes_position == "auto": - goes_position = get_goes_position(args.filenames) - else: - goes_position = args.goes_position + goes_position = get_goes_position(args.filenames) resln = get_resolution(args) view = get_GOESR_grid(position=goes_position, @@ -222,7 +193,8 @@ if __name__ == '__main__': # Configure logging levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] - logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn) + clamp = lambda n, minn, maxn: max(min(maxn, n), minn) # used below to keep us from going off the end of the logging levels + logging.basicConfig(level=levels[clamp(args.verbosity, 0, len(levels)-1)], filename=args.log_fn) if levels[min(3, args.verbosity)] > logging.DEBUG: import warnings warnings.filterwarnings("ignore") @@ -269,8 +241,8 @@ if __name__ == '__main__': source_name="", # You could probably make source_name an empty string. I think it is required by the writer for legacy reasons but isn't actually used for the glm output base_dir=tempdir_path, # base_dir is the output directory. I think blank is the same as current directory. tile_size=(506, 904), # tile_size is set to the size of the GLMF sample tiles we were given and should match the full disk ABI tiles which is what they wanted - check_categories=False) # check_categories is there because of that issue I mentioned where DQF is all valid all the time so there is no way to detect empty tiles unless we ignore the "category" products - + check_categories=False, # check_categories is there because of that issue I mentioned where DQF is all valid all the time so there is no way to detect empty tiles unless we ignore the "category" products + compress=True) # pick up output files from the tempdir # output looks like: OR_GLM-L2-GLMC-M3_G17_T03_20200925160040.nc