diff --git a/README.md b/README.md
index 493a921092311a017a5cabe19cbc27bc620efc41..b1d8241325374433a3b1a85b421657f30e7bd02b 100644
--- a/README.md
+++ b/README.md
@@ -110,13 +110,14 @@ Note: Within the same file, values farther down take priority.
 
 ### Referencing other options and custom options
 
-Users may create their own options that don't conflict with predefined available options (see "Available Options" section). Within string options, users may also reference any other set string option by wrapping its key in brackets. Referencing undefined options or recursive referencing will result in errors. See an example yaml config (from ci-paths-override-sample.yaml) below:
+Users may create their own options that don't conflict with predefined available options (see "Available Options" section). Within string options, users may also reference any other set string option by wrapping its key in brackets. Referencing undefined options or recursive referencing will result in errors. See an example yaml config (from `ci-paths-override-sample.yaml`) below:
 
 ```
 inroot: '/data/PLTG/LightningCast_testdata/' # custom option
 outdir: '{inroot}/test-output' # Predefined option referencing custom option
 logfile: '{outdir}/logs/lightning.log' # Predefined option referencing other predefined option
 make_json: true
+awips_nc: 3
 datadir_patt: '{inroot}' # Predefined option referencing custom option
 ```
 
@@ -160,7 +161,8 @@ For some user options, users may use [format codes](https://docs.python.org/3/li
 | colors                	| ['#0000FF', '#00FFFF',<br> '#00FF00', '#CC00FF'] 	| array[string] 	| List of colors for prob. contours. Up to 8. Can be hex or standard. Default = blue cyan green magenta. <br>Ignored if contour_cmap is set.                                                                                                                                                                                                      	|                      	|
 | lightning_meteograms  	| false                                            	| boolean       	| Create meteograms of LightningCast probs and GLM and ENI flashes for static locations. Upload the updated meteogram data <br>to a web database.                                                                                                                                                                                                 	|                      	|
 | county_map            	| false                                            	| boolean       	| Use county map in image creation.                                                                                                                                                                                                                                                                                                               	|                      	|
-| savegrid              	| 0                                                	| int           	| Use county map Write a netcdf of the LC probs to outdir. 1 = w/o plax correction; 2 = w/ plax correction; 3 = save both<br>plax-corrected and uncorrected grids. Default = 0 (i.e., don't write any netcdfs).                                                                                                                                   	|                      	|
+| netcdf              	| 0                                                	| int           	| Write a netcdf of the LC probs in native geostationary format. 1 = w/o plax correction; 2 = w/ plax correction; 3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write netcdf)                                                                                                                                 	|                      	|
+| awips_nc              	| 0                                                	| int           	| Write a netcdf of the LC probs in AWIPS-compatible format. 1 = w/o plax correction; 2 = w/ plax correction; 3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write netcdf)                                                                                                                                 	|                      	|
 | fire_event_ts         	| false                                            	| boolean       	| Post-processing that grabs probs and GLM flashes and associates them with current active fire events                                                                                                                                                                                                                                            	|                      	|
 | pickle_preds_labs     	| false                                            	| boolean       	| Save the predictions and labels in a pickle file for later evaluation.                                                                                                                                                                                                                                                                          	|                      	|
 | timeseries            	| []                                               	| array[float]  	| A lat/lon pair to create a time series with. E.g., [43.82, -76.40]. The optional 3rd arg is a flag to make an image at every<br>time. E.g., [43.82, -76.40, 1]                                                                                                                                                                            	|                      	|
diff --git a/TF.yml b/TF.yml
deleted file mode 100644
index cd39d5f5cebc942fe69a33139dfc7ac18acaa66f..0000000000000000000000000000000000000000
--- a/TF.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-name: TF
-channels:
-- conda-forge
-- defaults
-dependencies:
-- tensorflow>=2.10.0
-- zarr
-- lightgbm
-- pyresample
-- cython
-- pygrib
-- h5py
-- hdf5
-- netcdf4
-- numpy
-- pytables
-- pandas
-- xarray
-- scikit-learn
-- scikit-image
-- imageio
-- statsmodels
-- scipy
-- ipython
-- matplotlib
-- pyproj
-- shapely
-- shap
-- descartes
-- geojson
-#- python=3.9
-- six
-- pip
-- pyclipper
-- cartopy #=0.20.0
-- rasterio
-- pip:
-  - pika #for amqpfind
-  - geojson-rewind
-  - git+https://github.com/deeplycloudy/lmatools.git
-  - git+https://github.com/deeplycloudy/stormdrain.git
-  - git+https://gitlab.ssec.wisc.edu/rayg/yaml_cascade.git@v0.9.0
-  - tensorflow-addons
-  - mysql.connector
-  - satpy
-  - seaborn
-  - pyorbital
-  - metpy
diff --git a/build/requirements.txt b/build/requirements.txt
index 08fdd1ac7b7112989f1fbc73e0cc2b1e52cd82f4..30b06654f8839ed95ca86200a581b7b8b4d460cb 100644
--- a/build/requirements.txt
+++ b/build/requirements.txt
@@ -36,5 +36,6 @@ statsmodels
 tables
 tensorflow==2.15
 tensorflow-addons
+timezonefinder[pytz]
 xarray
-zarr
\ No newline at end of file
+zarr
diff --git a/lightningcast/__version__.py b/lightningcast/__version__.py
index 13bf7a80a8e2fc39a728d6f7316443503112ecb0..f848d3baeaa4bbafc6dba916e7faa7c259cc9b91 100644
--- a/lightningcast/__version__.py
+++ b/lightningcast/__version__.py
@@ -1,3 +1,3 @@
 # __version__.py
 
-__version__ = "1.2.1"
+__version__ = "1.3.2"
diff --git a/lightningcast/aggregate_spatial.py b/lightningcast/aggregate_spatial.py
index 1fba50778e2b607529705d2ad65c698d07f72bdb..0cbe1081960ef9e90a78abea1531ee198b199aca 100644
--- a/lightningcast/aggregate_spatial.py
+++ b/lightningcast/aggregate_spatial.py
@@ -1,18 +1,22 @@
 import numpy as np
 import glob
-from lightningcast import utils
 import netCDF4
 import os, sys
 from datetime import datetime, timedelta
+import inspect
+
+current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
+sys.path.insert(0, os.path.dirname(current_dir))
+
 from lightningcast import utils
 
-bird = "goes_east"
+bird = "goes_west"
 
-globstr = f"/ships19/grain/jcintineo/GLM/{bird}/FD/2020/20200706*/validation/spatial_counts.nc"
+globstr = f"/ships22/grain/probsevere/LC/T2O_validation/{bird}_conus/all/month??/spatial_counts*.nc"
 # globstr = f'/ships19/grain/jcintineo/GLM/{bird}/FD/2020*-*/spatial_counts.nc'
 files = np.sort(glob.glob(globstr))
-outdir = f"/ships19/grain/jcintineo/GLM/{bird}/FD/2020/20200706/validation/"  # 202001-12/' #os.environ['PWD']
-utils.mkdir_p(outdir)
+outdir = f"/ships22/grain/probsevere/LC/T2O_validation/{bird}_conus/all/" 
+os.makedirs(outdir, exist_ok=True)
 
 all_samples = 0
 for ii, f in enumerate(files):
diff --git a/lightningcast/check_products.py b/lightningcast/check_products.py
new file mode 100644
index 0000000000000000000000000000000000000000..e20d01af2540f19215962630a9e883054cc5b7e8
--- /dev/null
+++ b/lightningcast/check_products.py
@@ -0,0 +1,106 @@
+import glob
+import os
+import argparse
+import subprocess
+import time
+from datetime import datetime
+pltg_data = os.environ['PLTG_DATA']
+
+parser = argparse.ArgumentParser(
+    description="Checks the status of real-time products (e.g., netCDF, json) and "
+    + "sends an email if the latency is too long."
+)
+parser.add_argument(
+    "--goes_east",
+    help="Check goes_east sectors (RadM1, RadM2, RadC, RadF).",
+    action="store_true",
+)
+parser.add_argument(
+    "--goes_west",
+    help="Check goes_west sectors (RadM1, RadM2, RadC, RadF).",
+    action="store_true",
+)
+parser.add_argument(
+    "--himawari",
+    help="Check himawari sectors (FLDK).",
+    action="store_true",
+)
+parser.add_argument(
+    "--email",
+    help="Send problem messages to this email address. Default=john.cintineo@noaa.gov.",
+    type=str,
+    default='john.cintineo@noaa.gov'
+)
+
+args = parser.parse_args()
+
+subject = 'LightningCast processing problem'
+
+satellites = []
+if args.goes_east: satellites.append('goes_east')
+if args.goes_west: satellites.append('goes_west')
+if args.himawari: satellites.append('ahi_japan')
+
+while True:
+
+    latest_date = sorted(glob.glob(f'{pltg_data}/products/20*'))[-1]
+    body = ""
+    nowsecs = int(time.time())
+    time_thresh_FD = 40 # minutes
+    old_time_FD = nowsecs - 60 * time_thresh_FD
+    time_thresh_RadC = 15 # minutes
+    old_time_RadC = nowsecs - 60 * time_thresh_RadC
+    time_thresh_RadM = 5 # minutes
+    old_time_RadM = nowsecs - 60 * time_thresh_RadM
+
+    for sat in satellites:
+
+        sectors = sorted(glob.glob(f'{latest_date}/{sat}/*'))
+
+        for sector in sectors:
+            products = glob.glob(f'{sector}/*')
+
+            if 'RadM1' in sector or 'RadM2' in sector:
+                old_time = old_time_RadM
+                time_thresh = time_thresh_RadM
+            elif 'RadC' in sector:
+                old_time = old_time_RadC
+                time_thresh = time_thresh_RadC
+            else:
+                old_time = old_time_FD
+                time_thresh = time_thresh_FD
+
+            for product in products:
+                if 'json' in product:
+                    json_prods = glob.glob(f'{product}/*')
+                    for json_prod in json_prods:
+                        latest_file = sorted(glob.glob(f'{json_prod}/*'))[-1]
+                else:
+                    latest_file = sorted(glob.glob(f'{product}/*'))[-1]
+
+                #mtime_dt = datetime.fromtimestamp(os.path.getmtime(latest_file)) 
+                mtime = os.path.getmtime(latest_file)
+      
+                if mtime < old_time:
+                    body += f"{latest_file} is more than {time_thresh} minutes old.\n"
+
+        body += '\n' 
+
+
+    if len(body) > 20:
+       
+        of = open(f'{pltg_data}/err_mail.txt','w')
+        of.write(body)
+        of.close()
+        
+        #Below is the same as:
+        # printf $BODY | /bin/email -s $SUBJECT $ADDRESS      
+ 
+        cmd1 = subprocess.run(['printf', f'{body}'], capture_output=True).stdout
+        decoded_cmd1 = cmd1.decode("utf-8")
+        cmd2 = subprocess.run(['/bin/mail','-s',f'{subject}',f'{args.email}'], input=decoded_cmd1, text=True)
+      
+        print('sent email')
+
+    time.sleep(5*60)
+
diff --git a/lightningcast/combine_all_preds_labs.py b/lightningcast/combine_all_preds_labs.py
new file mode 100644
index 0000000000000000000000000000000000000000..08df6d633516c5c3be74de959856a0eb4d6b748d
--- /dev/null
+++ b/lightningcast/combine_all_preds_labs.py
@@ -0,0 +1,34 @@
+import pickle
+import numpy as np
+import glob
+import os
+
+rootdir = '/ships22/grain/probsevere/LC/T2O_validation/goes_west_conus/'
+pickles = glob.glob(f'{rootdir}/???2023/all_preds_labs.pkl')
+
+outdict = {}
+
+for ii,pick in enumerate(pickles):
+    print(pick)
+    dd = pickle.load(open(pick,'rb'))
+
+    if ii == 0:
+
+        outdict['stride'] = dd['stride']
+        outdict['preds'] = np.copy(dd['preds'])
+        outdict['labels'] = np.copy(dd['labels'])
+        outdict['files'] = dd['files'].copy()
+        outdict['datetimes'] = dd['datetimes'].copy()
+
+    else:
+
+        outdict['preds'] = np.vstack((dd['preds'], outdict['preds']))
+        outdict['labels'] = np.vstack((dd['labels'], outdict['labels']))
+        outdict['files'] += dd['files']
+        outdict['datetimes'] += dd['datetimes']
+
+    print(outdict['preds'].shape)
+
+os.makedirs(f'{rootdir}/all/', exist_ok=True)
+
+pickle.dump(outdict, open(f'{rootdir}/all/all_preds_labs.pkl','wb'))
diff --git a/lightningcast/config/default/default_config.yaml b/lightningcast/config/default/default_config.yaml
index 781cbb8beac962c2570ce1dfb43a6b5a6ba22aaf..b7798645946b2cfe31afb1917cef473503de9ba2 100644
--- a/lightningcast/config/default/default_config.yaml
+++ b/lightningcast/config/default/default_config.yaml
@@ -4,7 +4,7 @@ re_upload: null
 extra_script: null
 ftp: null
 timeseries: []
-savegrid: 0
+awips_nc: 0
 fire_event_ts: false
 lightning_meteograms: false
 geotiff: false
diff --git a/lightningcast/config/options/ci-paths-override-sample.yaml b/lightningcast/config/options/ci-paths-override-sample.yaml
index 2a9fc5a9cc9e028575cf0925ca00ed1dd326a409..19cba86a5171cf6d55db3f1f63c6ef92f6a77055 100644
--- a/lightningcast/config/options/ci-paths-override-sample.yaml
+++ b/lightningcast/config/options/ci-paths-override-sample.yaml
@@ -1,5 +1,6 @@
 inroot: '/data/PLTG/LightningCast_testdata/'
 outdir: '/test-output'
 logfile: '{outdir}/logs/lightning.log'
-make_json: false
+make_json: True
+awips_nc: 3
 datadir_patt: '{inroot}'
diff --git a/lightningcast/config/options/sectors/ge-radc.yaml b/lightningcast/config/options/sectors/ge-radc.yaml
index f435f6f61384288ff709453a2b8a0dcc35a13511..3bc91e90aedf817bf42488ce6ca8c0ae2cdda9e1 100644
--- a/lightningcast/config/options/sectors/ge-radc.yaml
+++ b/lightningcast/config/options/sectors/ge-radc.yaml
@@ -1,4 +1,2 @@
-savegrid: 3
 grplacefile: True
-make_json: true
 plax_hgt: 9000.0
diff --git a/lightningcast/config/options/sectors/ge-radf.yaml b/lightningcast/config/options/sectors/ge-radf.yaml
index 08cc9359cf91ab7415af90ac3c54cbebf4165ca5..b5fc9ae7003dfafdc1f84ad23dc3db28612e9d8e 100644
--- a/lightningcast/config/options/sectors/ge-radf.yaml
+++ b/lightningcast/config/options/sectors/ge-radf.yaml
@@ -1,4 +1,2 @@
 ind_bbox: [700, 3900, 500, 3000]
-savegrid: 3
 plax_hgt: 9000.0
-make_json: True
diff --git a/lightningcast/config/options/sectors/ge-radm1.yaml b/lightningcast/config/options/sectors/ge-radm1.yaml
index f435f6f61384288ff709453a2b8a0dcc35a13511..3bc91e90aedf817bf42488ce6ca8c0ae2cdda9e1 100644
--- a/lightningcast/config/options/sectors/ge-radm1.yaml
+++ b/lightningcast/config/options/sectors/ge-radm1.yaml
@@ -1,4 +1,2 @@
-savegrid: 3
 grplacefile: True
-make_json: true
 plax_hgt: 9000.0
diff --git a/lightningcast/config/options/sectors/ge-radm2.yaml b/lightningcast/config/options/sectors/ge-radm2.yaml
index f435f6f61384288ff709453a2b8a0dcc35a13511..3bc91e90aedf817bf42488ce6ca8c0ae2cdda9e1 100644
--- a/lightningcast/config/options/sectors/ge-radm2.yaml
+++ b/lightningcast/config/options/sectors/ge-radm2.yaml
@@ -1,4 +1,2 @@
-savegrid: 3
 grplacefile: True
-make_json: true
 plax_hgt: 9000.0
diff --git a/lightningcast/config/options/sectors/gw-radc.yaml b/lightningcast/config/options/sectors/gw-radc.yaml
index f435f6f61384288ff709453a2b8a0dcc35a13511..3bc91e90aedf817bf42488ce6ca8c0ae2cdda9e1 100644
--- a/lightningcast/config/options/sectors/gw-radc.yaml
+++ b/lightningcast/config/options/sectors/gw-radc.yaml
@@ -1,4 +1,2 @@
-savegrid: 3
 grplacefile: True
-make_json: true
 plax_hgt: 9000.0
diff --git a/lightningcast/config/options/sectors/gw-radf-akcan.yaml b/lightningcast/config/options/sectors/gw-radf-akcan.yaml
index 58f00ff9434aa12ab09592b726fedb112771613f..65aee077607b356442c9055c62ad15313f91a9f0 100644
--- a/lightningcast/config/options/sectors/gw-radf-akcan.yaml
+++ b/lightningcast/config/options/sectors/gw-radf-akcan.yaml
@@ -1,5 +1,3 @@
 sector_suffix: 'AKCAN'
 ind_bbox: [2020, 3500, 50, 500]
-savegrid: 3
 plax_hgt: 9000.0
-make_json: True
diff --git a/lightningcast/config/options/sectors/gw-radf-ussamoa.yaml b/lightningcast/config/options/sectors/gw-radf-ussamoa.yaml
index ba9e1a69137149a13b10441c6dd187355bb789dc..b041ec8ee294fb75ef1dde676cebc03bd5110c62 100644
--- a/lightningcast/config/options/sectors/gw-radf-ussamoa.yaml
+++ b/lightningcast/config/options/sectors/gw-radf-ussamoa.yaml
@@ -1,5 +1,3 @@
 sector_suffix: 'USSAMOA'
 ll_bbox: [172, -155, -23, -4]
-savegrid: 3
 plax_hgt: 9000.0
-make_json: True
diff --git a/lightningcast/config/options/sectors/gw-radm1.yaml b/lightningcast/config/options/sectors/gw-radm1.yaml
index f435f6f61384288ff709453a2b8a0dcc35a13511..3bc91e90aedf817bf42488ce6ca8c0ae2cdda9e1 100644
--- a/lightningcast/config/options/sectors/gw-radm1.yaml
+++ b/lightningcast/config/options/sectors/gw-radm1.yaml
@@ -1,4 +1,2 @@
-savegrid: 3
 grplacefile: True
-make_json: true
 plax_hgt: 9000.0
diff --git a/lightningcast/config/options/sectors/gw-radm2.yaml b/lightningcast/config/options/sectors/gw-radm2.yaml
index f435f6f61384288ff709453a2b8a0dcc35a13511..3bc91e90aedf817bf42488ce6ca8c0ae2cdda9e1 100644
--- a/lightningcast/config/options/sectors/gw-radm2.yaml
+++ b/lightningcast/config/options/sectors/gw-radm2.yaml
@@ -1,4 +1,2 @@
-savegrid: 3
 grplacefile: True
-make_json: true
 plax_hgt: 9000.0
diff --git a/lightningcast/config/options/sectors/himawari-fldk.yaml b/lightningcast/config/options/sectors/himawari-fldk.yaml
index ca83127d630d418c2c670af29f6a8a146e83629e..e00cc69acbf59ba2386f0bdf9441c0b8e74f3b45 100644
--- a/lightningcast/config/options/sectors/himawari-fldk.yaml
+++ b/lightningcast/config/options/sectors/himawari-fldk.yaml
@@ -1,4 +1,3 @@
 sector_suffix: 'GUAM'
 ll_bbox: [129, -172, -1, 27]
-savegrid: 1
-make_json: true
+awips_nc: 1
diff --git a/lightningcast/drive_accumulation.bash b/lightningcast/drive_accumulation.bash
index 577bf43b31aefa3c0c7940811d219309f9f07997..4cc06077cee4dc7af0d942a617cb38659c5873a4 100644
--- a/lightningcast/drive_accumulation.bash
+++ b/lightningcast/drive_accumulation.bash
@@ -1,12 +1,20 @@
 #!/bin/bash
 
-inpatt=/ships19/grain/probsevere/lightning/%Y/%Y%m%d/GLM/goes_east/agg/%Y%m%d-%H%M*.netcdf
-outpatt=/ships19/grain/saved_probsevere_data/lightning/%Y/%Y%m%d/GLM/goes_east/FED_accum_60min_2km/%Y%m%d-%H%M*.netcdf
-
-nohup python accumulation.py $inpatt 20230101-0000 20230201-0000 -o $outpatt > nohup_01-03.out &
-
-nohup python accumulation.py /apollo/grain/saved_probsevere_data/lightning/%Y%m%d/GLM/goes_east/agg/ 20190331-2300 2019430-2359 -r -o /apollo/grain/saved_probsevere_data/lightning/%Y%m%d/GLM/goes_east/FED_accum_60min_2km/ > nohup_04.out &
-
+# also consider --inabidir, --sector, and --shape
 
+inpatt=/ships19/grain/probsevere/lightning/%Y/%Y%m%d/GLM/goes_east/agg/%Y%m%d-%H%M*.netcdf
+outpatt=/ships19/grain/probsevere/lightning/%Y/%Y%m%d/GLM/goes_east/FED_accum_60min_2km/
 
+nohup python accumulation.py $inpatt 20230101-0000 20230131-2359 -o $outpatt > nohup_01.out &
+nohup python accumulation.py $inpatt 20230201-0000 20230228-2359 -o $outpatt > nohup_02.out &
+nohup python accumulation.py $inpatt 20230301-0000 20230331-2359 -o $outpatt > nohup_03.out &
+nohup python accumulation.py $inpatt 20230401-0000 20230430-2359 -o $outpatt > nohup_04.out &
+nohup python accumulation.py $inpatt 20230501-0000 20230531-2359 -o $outpatt > nohup_05.out &
+nohup python accumulation.py $inpatt 20230601-0000 20230630-2359 -o $outpatt > nohup_06.out &
+nohup python accumulation.py $inpatt 20230701-0000 20230731-2359 -o $outpatt > nohup_07.out &
+nohup python accumulation.py $inpatt 20230801-0000 20230831-2359 -o $outpatt > nohup_08.out &
+nohup python accumulation.py $inpatt 20230901-0000 20230930-2359 -o $outpatt > nohup_09.out &
+nohup python accumulation.py $inpatt 20231001-0000 20231031-2359 -o $outpatt > nohup_10.out &
+nohup python accumulation.py $inpatt 20231101-0000 20231130-2359 -o $outpatt > nohup_11.out &
+nohup python accumulation.py $inpatt 20231201-0000 20231231-2359 -o $outpatt > nohup_12.out &
 
diff --git a/lightningcast/fire_event_timeseries.py b/lightningcast/fire_event_timeseries.py
index f45cb9348beb469ecb2f42a24b622bf8ac4ae4d6..dc31cbb053bd4b31849e3c03662771588c9b5324 100644
--- a/lightningcast/fire_event_timeseries.py
+++ b/lightningcast/fire_event_timeseries.py
@@ -183,7 +183,10 @@ def make_fire_points(
                         incident_name = clean_string(
                             i["properties"]["IncidentName"]
                         ).upper()
-                        incident_county = clean_string(i["properties"]["POOCounty"])
+                        if (i['properties']['POOCounty']):
+                            incident_county = clean_string(i['properties']['POOCounty']).upper()
+                        else:
+                            incident_county = 'UNKNOWN'
 
                         mpoints.append(
                             {
diff --git a/lightningcast/generate_tfrecs.py b/lightningcast/generate_tfrecs.py
index b5960fb8b2d2eacb673119a54880ebb4112c3def..5f6989f6adb314dcc90335046271af5739f29ca2 100755
--- a/lightningcast/generate_tfrecs.py
+++ b/lightningcast/generate_tfrecs.py
@@ -184,7 +184,6 @@ glmpatt1 = f"{glmdir}/FED_accum_60min_2km/%Y%m%d-%H%M00.netcdf"
 glmvar1 = "FED_accum_60min_2km"
 # glmpatt2 = f'{glmdir}/FED_accum_15min_2km/%Y%m%d-%H%M00.netcdf'; glmvar2 = "FED_accum_15min_2km"
 aggpatt = f"{glmdir}/agg/%Y%m%d-%H%M00.netcdf"
-sector = "RadC"
 ltgthresh = 3  # should be in byte-scaled space
 
 process_ref10 = False
diff --git a/lightningcast/glm/drive_make_GLM_grids.py b/lightningcast/glm/drive_make_GLM_grids.py
index 42efe18b4aefe32db47fb1405ea894106035c727..20aee0ace7d3f8a596c390f5384057a6bb76974a 100755
--- a/lightningcast/glm/drive_make_GLM_grids.py
+++ b/lightningcast/glm/drive_make_GLM_grids.py
@@ -196,7 +196,7 @@ def drive_make_GLM_grids(
                     logging.info(
                         f"{ps_pythonpath}/bin/python {pltg}/lightningcast/glm/glmtools/examples/grid/make_GLM_grids.py \
                         -o {raw_outdir}/{{start_time:%Y/%b/%d}}/{{dataset_name}} --fixed_grid --split_events \
-                        --goes_position {slot[-4:]} --dx={dx} --dy={dy} --ctr_lat -13 --ctr_lon -167.5 --width 1250 --height 900 {files_to_process}"
+                        --goes_position {slot[-4:]} --dx={dx} --dy={dy} --ctr_lat -13 --ctr_lon -167.5 --width 2500 --height 1800 {files_to_process}"
                     )
                 else:
                     logging.info(
@@ -210,7 +210,7 @@ def drive_make_GLM_grids(
                         os.system(
                             f"{ps_pythonpath}/bin/python {pltg}/lightningcast/glm/glmtools/examples/grid/make_GLM_grids.py \
                         -o {raw_outdir}/{{start_time:%Y/%b/%d}}/{{dataset_name}} --fixed_grid --split_events \
-                        --goes_position {slot[-4:]} --dx={dx} --dy={dy} --ctr_lat -13 --ctr_lon -167.5 --width 1250 --height 900 {files_to_process}"
+                        --goes_position {slot[-4:]} --dx={dx} --dy={dy} --ctr_lat -13 --ctr_lon -167.5 --width 2500 --height 1800 {files_to_process}"
                         )
                     else:
                         os.system(
diff --git a/lightningcast/glm/drive_make_GLM_grids_offline.py b/lightningcast/glm/drive_make_GLM_grids_offline.py
index 1c37bd5bf5cc1727bdd069303c3a8c8b2ddceeaa..3893eb9aab47d035a511183dc1e7a4d2b9055627 100755
--- a/lightningcast/glm/drive_make_GLM_grids_offline.py
+++ b/lightningcast/glm/drive_make_GLM_grids_offline.py
@@ -5,24 +5,28 @@ import shutil
 import glob
 
 
-outdir = "/ships19/grain/jcintineo/GLM/fix/ge/"
+outdir = "/ships19/grain/jcintineo/GLM/goes_west/ussamoa/"
 
-startdt = dt = datetime(2023, 10, 19, 15, 7)
-enddt = datetime(2023, 10, 19, 15, 18)  # startdt + timedelta(days=1)
+startdt = dt = datetime(2024, 4, 18, 12, 55)
+enddt = datetime(2024, 6, 1, 0, 0)  # startdt + timedelta(days=1)
 
 while startdt <= dt < enddt:
-    print(dt)
     pattern = dt.strftime(
         "/arcdata/goes/grb/goes18/%Y/%Y_%m_%d_%j/glm/L2/LCFA/*s%Y%j%H%M*"
     )
     remotefiles = glob.glob(pattern)
+
     if len(remotefiles) == 3:  # should be 3 files per minute
+        print(dt)
+        #cmd = (
+        #    "python glmtools/examples/grid/make_GLM_grids.py -o "
+        #    + outdir
+        #    + "/{start_time:%Y/%b/%d}/{dataset_name} --fixed_grid --split_events --goes_position west --goes_sector conus --dx 2.0 --dy 2.0"
+        #)
 
-        cmd = (
-            "python glmtools/examples/grid/make_GLM_grids.py -o "
-            + outdir
-            + "/{start_time:%Y/%b/%d}/{dataset_name} --fixed_grid --split_events --goes_position west --goes_sector conus --dx 2.0 --dy 2.0"
-        )
+        cmd ="python glmtools/examples/grid/make_GLM_grids.py " +\
+            f"-o {outdir}/{{start_time:%Y/%b/%d}}/{{dataset_name}} --fixed_grid --split_events --goes_position west --dx=2.0 --dy=2.0 " +\
+            "--ctr_lat -13 --ctr_lon -167.5 --width 2500 --height 1800"
 
         for rf in remotefiles:
             shutil.copy(rf, f"{os.environ['PWD']}/")
@@ -32,4 +36,4 @@ while startdt <= dt < enddt:
         for rf in remotefiles:
             os.remove(os.path.basename(rf))
 
-        dt += timedelta(minutes=1)
+    dt += timedelta(minutes=1)
diff --git a/lightningcast/ltg_utils.py b/lightningcast/ltg_utils.py
index 46b6db61ae94fcb91d37aaab96797d8f132acf8b..faa40e8806cdd85ab951e8b5dd23257b5844bd04 100755
--- a/lightningcast/ltg_utils.py
+++ b/lightningcast/ltg_utils.py
@@ -1,4 +1,4 @@
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
 import pandas as pd
 import scipy.ndimage
 import glob
@@ -19,6 +19,8 @@ from subprocess import PIPE, call, Popen
 
 # from visualize_validation import verification
 import pytz
+from timezonefinder import TimezoneFinder
+TZF = TimezoneFinder()
 import mysql.connector
 import urllib
 
@@ -74,7 +76,7 @@ def delete_db_tables(dbname=DBNAME):
         sql = f"DROP TABLE `{dbname}`.`{table}`"
         execute_sql(sql, return_query=False)
 
-        # sql = ("DELETE FROM `"+table+"` WHERE DATE_ADD(`time`,INTERVAL "+purge_thresh_days +" DAY) < UTC_TIMESTAMP() ")
+        # sql = ("DELETE FROM `"+table+"` WHERE DATE_ADD(`utctime`,INTERVAL "+purge_thresh_days +" DAY) < UTC_TIMESTAMP() ")
 
         # status=delete_records(sql)
 
@@ -102,7 +104,7 @@ def purge_records(purge_thresh_days, dbname=DBNAME):
     # Remove old records from DB
     for t in result:
         table = t[f"Tables_in_{dbname}"]
-        sql = f"DELETE FROM `{table}` WHERE DATE_ADD(`time`,INTERVAL {purge_thresh_days} DAY) < UTC_TIMESTAMP() "
+        sql = f"DELETE FROM `{table}` WHERE DATE_ADD(`utctime`,INTERVAL {purge_thresh_days} DAY) < UTC_TIMESTAMP() "
         execute_sql(sql, return_query=False)
 
 
@@ -485,7 +487,7 @@ def get_locations(locations_file):
 # ----------------------------------------------------------------------------------------------------------------
 def get_ondemand_locations(remote_csv=os.environ["ONDEMAND_CSV"]):
     logging.info("Process started")
-    now = datetime.utcnow()
+    now = datetime.utcnow().astimezone(timezone.utc)
 
     ondemand_locations = []
 
@@ -510,10 +512,10 @@ def get_ondemand_locations(remote_csv=os.environ["ONDEMAND_CSV"]):
         lats = list(candidate_sites["Latitude of event (degrees north)"])  # type=float
         lons = list(candidate_sites["Longitude of event (degrees west)"])  # type=float
         begin_event_ts = list(
-            candidate_sites["Beginning date & time of request (UTC)"]
+            candidate_sites["Beginning date & time of request"]
         )  # type=str
         end_event_ts = list(
-            candidate_sites["End date & time of request (UTC)"]
+            candidate_sites["End date & time of request"]
         )  # type=str
     except KeyError:
         logging.error("Bad keys. Returning empty list of ondemand_locations.")
@@ -538,13 +540,26 @@ def get_ondemand_locations(remote_csv=os.environ["ONDEMAND_CSV"]):
             if not (bool(re.match(r"^[A-Za-z0-9_\- ]+$", event_name[ii]))):
                 continue
 
+            # Input begin and end dts are in LOCAL time. Use lat and lon info
+            # to find local timezone and then convert to UTC.
+            tz = pytz.timezone(TZF.timezone_at(lng=lons[ii]*-1, lat=lats[ii]))
+
             begin_dt = datetime.strptime(
                 begin_event_ts[ii], "%m/%d/%Y %H:%M:%S"
-            )  # assumed to be UTC
+            )  # assumed to be LOCAL time
+
+            # Localizing the timezone and then converting to UTC
+            begin_dt = tz.localize(begin_dt).astimezone(timezone.utc)
+#            print(tz)
+#            print(begin_dt)
             end_dt = datetime.strptime(
                 end_event_ts[ii], "%m/%d/%Y %H:%M:%S"
-            )  # assumed to be UTC
+            )  # assumed to be LOCAL time
 
+            # Localizing the timezone and then converting to UTC
+            end_dt = tz.localize(end_dt).astimezone(timezone.utc)
+#            print(end_dt)
+#            print('')
             # Check that end_dt is after begin_dt
             # Also check that duration is within bounds.
             if end_dt <= begin_dt or (end_dt - begin_dt).seconds / 86400 > float(
@@ -1016,7 +1031,7 @@ def get_fed(
                     logging.warning(f"File size is likely 0. Removing {tmpfile}.")
                     try:
                         os.remove(outfilename)
-                    except:
+                    except (OSError, IOError, FileNotFoundError):
                         pass
 
         nc = netCDF4.Dataset(tmpfile, "r")
diff --git a/lightningcast/pltg_gr_placefile.py b/lightningcast/pltg_gr_placefile.py
index fecfd77ef60671cef90f374f08eb611982d98cb3..ae9635bf439d6419a2cf4f23df78bd8e8f2c9079 100755
--- a/lightningcast/pltg_gr_placefile.py
+++ b/lightningcast/pltg_gr_placefile.py
@@ -12,6 +12,7 @@ import inspect
 current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
 sys.path.insert(0, os.path.dirname(current_dir))
 from lightningcast.__version__ import __version__
+version = __version__.replace('.','r')
 
 pltg_data = os.environ["PLTG_DATA"]
 ##########################################################################################################################################
@@ -174,7 +175,7 @@ def timeRangeGRFile(
             sat_abbr = data["platform"][0].lower() + data["platform"].split("-")[1]
             assert sat_abbr[0] == "g"
             outfile = (
-                f"LtgCast-pc-{NCCF_sector}_v{__version__.replace('.','-')}_{sat_abbr}"
+                f"LtgCast-pc-{NCCF_sector}_v{version}_{sat_abbr}"
             )
             outfile += f"_s{start_datetime.strftime('%Y%m%d%H%M%S%f')[0:15]}"
             outfile += f"_e{end_datetime.strftime('%Y%m%d%H%M%S%f')[0:15]}"
diff --git a/lightningcast/predict_from_yaml.py b/lightningcast/predict_from_yaml.py
index 15986189889555ad9e7422d00d352ef0dfb5999c..2127076bd94d642fd8a4389710f1dc19cd03b95b 100644
--- a/lightningcast/predict_from_yaml.py
+++ b/lightningcast/predict_from_yaml.py
@@ -138,7 +138,7 @@ if __name__ == "__main__":
         county_map=config["county_map"],
         lightning_meteograms=config["lightning_meteograms"],
         geotiff=config["geotiff"],
-        savegrid=config["savegrid"],
+        awips_nc=config["awips_nc"],
         fire_event_ts=config["fire_event_ts"],
         make_json=config["make_json"],
         pickle_preds_labs=config["pickle_preds_labs"],
diff --git a/lightningcast/predict_ltg.py b/lightningcast/predict_ltg.py
index 01ec460aeeea4ae1379399555e1a259f5327ee4a..91288b4656bb51ca2ff258720430da2ce9213253 100755
--- a/lightningcast/predict_ltg.py
+++ b/lightningcast/predict_ltg.py
@@ -85,6 +85,7 @@ import ftplib
 
 from lightningcast.pltg_gr_placefile import timeRangeGRFile
 from lightningcast.__version__ import __version__
+version = __version__.replace('.','r')
 
 #################################################################################################################################
 def get_rgb_colors(colors):
@@ -293,7 +294,7 @@ def write_json(
             "summary"
         ] = "Short-term probabilistic lightning predictions using remotely sensed data and machine-learning models"
         outdict["product"] = "Probability of lightning in next 60 minutes"
-        outdict["history"] = f"LightningCast v{__version__}"
+        outdict["history"] = f"LightningCast v{version}"
         outdict[
             "processing_level"
         ] = "National Oceanic and Atmospheric Administration (NOAA) Level 2"
@@ -323,7 +324,7 @@ def write_json(
         platform_abbr = (
             meta["platform_name"][0].lower() + meta["platform_name"].split("-")[1]
         )
-        outjson = f"{new_prefix}-{sector_outname.replace('_','-')}_v{__version__.replace('.','-')}_{platform_abbr}"
+        outjson = f"{new_prefix}-{sector_outname.replace('_','-')}_v{version}_{platform_abbr}"
         # Note: %f is microseconds. We take the first digit to get tenths of a second, which NESDIS requires.
         outjson += f"_s{meta['start_time'].strftime('%Y%m%d%H%M%S%f')[0:15]}"
         outjson += f"_e{meta['end_time'].strftime('%Y%m%d%H%M%S%f')[0:15]}"
@@ -603,7 +604,7 @@ def start_end_inds(xmin, xmax, ymin, ymax, irx, iry):
 
 
 #################################################################################################################################
-def save_off_grid(
+def save_netcdf(
     scn_ch,
     preds,
     dt,
@@ -613,6 +614,7 @@ def save_off_grid(
     stride=4,
     savecode=1,
     scale_data=False,
+    awips=False,
 ):
 
     atts = collections.OrderedDict()  # global attributes
@@ -631,7 +633,7 @@ def save_off_grid(
     atts[
         "summary"
     ] = "Short-term probabilistic lightning predictions using remotely sensed data and machine-learning models"
-    atts["history"] = f"LightningCast v{__version__}"
+    atts["history"] = f"LightningCast v{version}"
     atts[
         "processing_level"
     ] = "National Oceanic and Atmospheric Administration (NOAA) Level 2"
@@ -664,7 +666,7 @@ def save_off_grid(
         platform_abbr = (
             meta["platform_name"][0].lower() + meta["platform_name"].split("-")[1]
         )
-        outfile = f"LtgCast-{sector_outname.replace('_','-')}_v{__version__.replace('.','-')}_{platform_abbr}"
+        outfile = f"LtgCast-{sector_outname.replace('_','-')}_v{version}_{platform_abbr}"
         # Note: %f is microseconds. We take the first digit to get tenths of a second, which NESDIS requires.
         outfile += f"_s{meta['start_time'].strftime('%Y%m%d%H%M%S%f')[0:15]}"
         outfile += f"_e{meta['end_time'].strftime('%Y%m%d%H%M%S%f')[0:15]}"
@@ -707,7 +709,8 @@ def save_off_grid(
     lats = lats[0:ny, 0:nx]
     lons = lons[::stride, ::stride]
     lats = lats[::stride, ::stride]
-
+   
+    orig_preds = np.copy(preds)
     preds = preds[::stride, ::stride]
     # fix bad lats and lons
     lons = np.ma.masked_where(((lons < -180) | (lons > 180)), lons)
@@ -723,23 +726,24 @@ def save_off_grid(
     }  # {'proj':'lcc', 'lon_0':lon_0, 'lat_1':33, 'lat_2':45, 'units':'m'}
     target_proj = Proj(projection_dict)
 
-    SWlat = idict['awips_grid']['SWlat']
-    NElat = idict['awips_grid']['NElat']
-    SWlon = idict['awips_grid']['SWlon']
-    NElon = idict['awips_grid']['NElon']
-    dy = idict['awips_grid']['dy']
-    dx = idict['awips_grid']['dx']
-
-    # Geographic Metadata
-    atts["cdm_data_type"] = "Grid"
-    atts["geospatial_lat_min"] = float(SWlat)
-    atts["geospatial_lat_max"] = float(NElat)
-    atts["geospatial_lon_min"] = float(SWlon)
-    atts["geospatial_lon_max"] = float(NElon)
-    atts["geospatial_lat_units"] = "degrees_north"
-    atts["geospatial_lon_units"] = "degrees_east"
-    atts["geospatial_lat_resolution"] = dy
-    atts["geospatial_lon_resolution"] = dx
+    if awips:
+        SWlat = idict['awips_grid']['SWlat']
+        NElat = idict['awips_grid']['NElat']
+        SWlon = idict['awips_grid']['SWlon']
+        NElon = idict['awips_grid']['NElon']
+        dy = idict['awips_grid']['dy']
+        dx = idict['awips_grid']['dx']
+
+        # Geographic Metadata
+        atts["cdm_data_type"] = "Grid"
+        atts["geospatial_lat_min"] = float(SWlat)
+        atts["geospatial_lat_max"] = float(NElat)
+        atts["geospatial_lon_min"] = float(SWlon)
+        atts["geospatial_lon_max"] = float(NElon)
+        atts["geospatial_lat_units"] = "degrees_north"
+        atts["geospatial_lon_units"] = "degrees_east"
+        atts["geospatial_lat_resolution"] = dy
+        atts["geospatial_lon_resolution"] = dx
 
     # Product development team and publisher metadata
     atts[
@@ -753,73 +757,138 @@ def save_off_grid(
     atts["publisher_email"] = "espcoperations@noaa.gov"
     atts["publisher_url"] = "http://www.ospo.noaa.gov"
 
-    # Check that our dimensions are whole numbers.
-    # We do this to ensure that these hard-coded grids match up with the static parallax-correction grids.
-    width = (NElon - SWlon) / dx
-    height = (NElat - SWlat) / dy
-    assert(width % 1 == 0)
-    assert(height % 1 == 0)
-    width = int(width)
-    height = int(height)
+    
+    dims = collections.OrderedDict()
+    dataset = {}
 
-    # Remap data to this equal-lat equal-lon projection
+    if awips:
+        # Check that our dimensions are whole numbers.
+        # We do this to ensure that these hard-coded grids match up with the static parallax-correction grids.
+        width = (NElon - SWlon) / dx
+        height = (NElat - SWlat) / dy
+        assert(width % 1 == 0)
+        assert(height % 1 == 0)
+        width = int(width)
+        height = int(height)
+    
+        # Remap data to this equal-lat equal-lon projection
+    
+        if width < 0: # indicates that NElon is negative. I.e., the domains crosses the datetline
+            width = int((180 - SWlon + 180 + NElon) / dx)
 
-    if width < 0: # indicates that NElon is negative. I.e., the domains crosses the datetline
-        width = int((180 - SWlon + 180 + NElon) / dx)
-        
-    x, y = target_proj(np.array([SWlon, NElon]), np.array([SWlat, NElat]))
-    x_min = x.min()
-    y_min = y.min()
-    x_max = x.max()
-    y_max = y.max()
-    area_extent = (x_min, y_min, x_max, y_max)
-   
-    # The standard lon and lats work fine for GOES-East/West CONUS grid.
-    targetArea = pr.geometry.AreaDefinition(
-        "targetProj",
-        "LC_targetProj",
-        "targetProj",
-        projection_dict,
-        width=width,
-        height=height,
-        area_extent=area_extent,
-    )
-    new_lons, new_lats = targetArea.get_lonlats()
+        dims["y"], dims["x"] = height, width        
+    
+        x, y = target_proj(np.array([SWlon, NElon]), np.array([SWlat, NElat]))
+        x_min = x.min()
+        y_min = y.min()
+        x_max = x.max()
+        y_max = y.max()
+        area_extent = (x_min, y_min, x_max, y_max)
    
-    new_preds = pr.kd_tree.resample_nearest(
-        geosGrid, preds, targetArea, radius_of_influence=16000, fill_value=-1
-    )
-    assert new_preds.shape[0:2] == new_lons.shape == new_lats.shape
+        targetArea = pr.geometry.AreaDefinition(
+            "targetProj",
+            "LC_targetProj",
+            "targetProj",
+            projection_dict,
+            width=width,
+            height=height,
+            area_extent=area_extent,
+        )
+        new_lons, new_lats = targetArea.get_lonlats()
+  
+        new_preds = pr.kd_tree.resample_nearest(
+            geosGrid, preds, targetArea, radius_of_influence=16000, fill_value=-1
+        )
+        assert new_preds.shape[0:2] == new_lons.shape == new_lats.shape
+    else:
+        new_preds = np.copy(preds)
+        dims["y"], dims["x"], _ = new_preds.shape
+        # additional projection data
+        if scn_ch.attrs["area"].proj_id.lower().startswith('abi'):
+            long_name = "GOES-R ABI fixed grid projection"
+        elif scn_ch.attrs["area"].proj_id.lower().startswith('geosh'):
+            long_name = "Himawari geostationary projection" 
+        else:
+            logging.error(f"Projection: {scn_ch.attrs['area'].proj_id.lower()} is not supported.")
+            return -1, False
+
+        # Create x and y, like they are in ABI/AHI files.
+        # force the dims to be the same (e.g., [0:nx])
+        projx = scn_ch.x.compute().data[0:nx] / scn_ch.attrs["area"].proj_dict['h']
+        projy = scn_ch.y.compute().data[0:ny] / scn_ch.attrs["area"].proj_dict['h']
+        projx = projx[::stride]
+        projy = projy[::stride]
+        x_scale_factor = 5.6e-05
+        x_add_offset = -0.101332
+        xpacked = ((projx - x_add_offset) / x_scale_factor).astype(np.int16)
+        y_scale_factor = -5.6e-05
+        y_add_offset = 0.128212
+        ypacked = ((projy - y_add_offset) / y_scale_factor).astype(np.int16)
+        
+        dataset["y"] = {'data': ypacked,
+                        'dims': ("y"),
+                        'atts': {'scale_factor':y_scale_factor,
+                                 'add_offset':y_add_offset,
+                                 'units':'rad',
+                                 'axis':'Y',
+                                 'standard_name':'projection_y_coordinate',
+                        }
+        }
+        dataset["x"] = {'data': xpacked,
+                        'dims': ("x"),
+                        'atts': {'scale_factor':x_scale_factor,
+                                 'add_offset':x_add_offset,
+                                 'units':'rad',
+                                 'axis':'X',
+                                 'standard_name':'projection_x_coordinate',
+                        }
+        }
+ 
+        # Add imager_projection info
+
+        dataset["imager_projection"] = {'data':np.ubyte(255),
+                                        'dims':(),
+                                        'atts': {'long_name':long_name,
+                                                 'projection':'geostationary',
+                                                 'perspective_point_height':scn_ch.attrs["area"].proj_dict['h'],
+                                                 'longitude_of_projection_origin':lon_0,
+                                                 'latitude_of_projection_origin':0.
+                                        }
+        }
+
  
     # For parallax-correction fields    
     if savecode > 1:
-        plons, plats = idict['plax_lons'], idict['plax_lats']
-        plons = plons[0:ny, 0:nx]  # force the dims to be the same.
-        plats = plats[0:ny, 0:nx]
-        plons = plons[::stride, ::stride]
-        plats = plats[::stride, ::stride]
-
-        # fix bad lats and lons
-        plons = np.ma.masked_where(((plons < -180) | (plons > 180)), plons)
-        plats = np.ma.masked_where(((plats < -90) | (plats > 90)), plats)
-
-        plax_geosGrid = pr.geometry.GridDefinition(lons=plons, lats=plats)
-
-        plax_preds = pr.kd_tree.resample_nearest(
-            plax_geosGrid, preds, targetArea, radius_of_influence=16000, fill_value=-1
-        )
-        assert plax_preds.shape[0:2] == new_lons.shape == new_lats.shape
-
-    # t1=time.time()
-    # new_preds = pr.kd_tree.resample_gauss(geosGrid,tmp_preds,targetArea,radius_of_influence=16000,sigmas=20000,fill_value=-1)
-    # print(time.time()-t1)
-    logging.info("Remapped probability data.")
+        if awips:
+            plons, plats = idict['plax_lons'], idict['plax_lats']
+            plons = plons[0:ny, 0:nx]  # force the dims to be the same.
+            plats = plats[0:ny, 0:nx]
+            plons = plons[::stride, ::stride]
+            plats = plats[::stride, ::stride]
+    
+            # fix bad lats and lons
+            plons = np.ma.masked_where(((plons < -180) | (plons > 180)), plons)
+            plats = np.ma.masked_where(((plats < -90) | (plats > 90)), plats)
+    
+            plax_geosGrid = pr.geometry.GridDefinition(lons=plons, lats=plats)
+    
+            plax_preds = pr.kd_tree.resample_nearest(
+                plax_geosGrid, preds, targetArea, radius_of_influence=16000, fill_value=-1
+            )
+            assert plax_preds.shape[0:2] == new_lons.shape == new_lats.shape
 
+            # t1=time.time()
+            # new_preds = pr.kd_tree.resample_gauss(geosGrid,tmp_preds,targetArea,radius_of_influence=16000,sigmas=20000,fill_value=-1)
+            # print(time.time()-t1)
+            logging.info("Remapped probability data.")
+  
+        else:
+            yind, xind = idict["PLAX_inds_nonstatic"]
+            plax_preds = np.zeros(orig_preds.shape, dtype=np.float32)
+            for nn in range(nout_feature_maps):
+                plax_preds[yind, xind, nn] = orig_preds[..., nn].ravel()
+            plax_preds = plax_preds[::stride, ::stride]
 
-    dims = collections.OrderedDict()
-    dims["x"] = width
-    dims["y"] = height
-    dataset = {}
 
     # Create the DQF mask
     if "05km" in idict["masks"]:
@@ -836,20 +905,24 @@ def save_off_grid(
     mask[((half_km_mask == 1) & (one_km_mask == 1) & (two_km_mask == 1))] = 3
     # We "stride" the image to reduce resolution and noise
     mask = mask[::stride, ::stride]
-    # Remap mask to AWIPS targetArea
-    remapped_mask = pr.kd_tree.resample_nearest(
-        geosGrid, mask, targetArea, radius_of_influence=16000, fill_value=-1
-    ).astype(np.int8)
-
-    total_number_attempted_retrievals = np.count_nonzero(remapped_mask >= 0)
-    total_number_unattempted_retrievals = np.count_nonzero(remapped_mask == -1)
-    total_number_good_retrievals = np.count_nonzero(remapped_mask == 0)
+
+    if awips:
+        # Remap mask to AWIPS targetArea
+        new_mask = pr.kd_tree.resample_nearest(
+            geosGrid, mask, targetArea, radius_of_influence=16000, fill_value=-1
+        ).astype(np.int8)
+    else:
+        new_mask = mask.astype(np.int8)
+
+    total_number_attempted_retrievals = np.count_nonzero(new_mask >= 0)
+    total_number_unattempted_retrievals = np.count_nonzero(new_mask == -1)
+    total_number_good_retrievals = np.count_nonzero(new_mask == 0)
     total_number_sub_optimal_retrievals = np.count_nonzero(
-        (remapped_mask == 1) | (remapped_mask == 2)
+        (new_mask == 1) | (new_mask == 2)
     )
-    total_number_unusable_retrievals = np.count_nonzero((remapped_mask == 3))
+    total_number_unusable_retrievals = np.count_nonzero((new_mask == 3))
     dataset["DQF"] = {
-        "data": remapped_mask,
+        "data": new_mask,
         "dims": ("y", "x"),
         "atts": {
             "long_name": "LightningCast retrieval data quality flags",
@@ -866,7 +939,7 @@ def save_off_grid(
     }
 
     #----------------------------------------------------------
-    # quality information
+    # addtional required quality information
     #----------------------------------------------------------
     percentage_optimal_retrievals = 100.0*total_number_good_retrievals/float(total_number_attempted_retrievals)
     percentage_sub_optimal_retrievals = 100.0*total_number_sub_optimal_retrievals/float(total_number_attempted_retrievals)
@@ -975,10 +1048,10 @@ def save_off_grid(
         #  dataset['lon']['atts']['scale_factor'] = lon_scale_factor
         #  dataset['lon']['atts']['add_offset'] = lon_add_offset
 
-    netcdf = dt.strftime(f"{outdir}/%Y%m%d/{orbslot}/{sector_outname}/netcdf/{outfile}")
-    status = utils.write_netcdf(netcdf, dataset, dims, atts=atts, wait=True, gzip=False)
+    outnetcdf = dt.strftime(f"{outdir}/%Y%m%d/{orbslot}/{sector_outname}/netcdf/{outfile}")
+    status = utils.write_netcdf(outnetcdf, dataset, dims, atts=atts, wait=True, gzip=False)
 
-    return netcdf, status
+    return outnetcdf, status
 
 
 #################################################################################################################################
@@ -1812,7 +1885,7 @@ def run_overhead(
         else:
             linewidths = [1] * len(values)
     else:
-        linewidths = [1] * len(values)
+        linewidths = [2] * len(values)
     idict["linewidths"] = linewidths
 
     # Constants for MRMS Ref_-10C for images
@@ -1950,7 +2023,7 @@ def run_overhead(
 
     # Lightning plotting config
     idict["plotGLM"] = (
-        True if (make_img and sector in ["RadC", "RadM1", "RadM2", "RadF"]) else False
+        True if (glmpatt is not None and make_img and sector in ["RadC", "RadM1", "RadM2", "RadF"]) else False
     )
     if plot_engln is True:
         idict["plotGLM"] = False
@@ -2031,10 +2104,11 @@ def run_overhead(
             endX = 500
         elif sector == "RadF":
 
-            logging.critical(
-                "You must supply --ll_bbox or --ind_bbox for --sector=RadF"
+            logging.warning(
+                "Running RadF sector with no cut out. If it fails you may not have enough ram or you can try turning off onednn (see docs)."
             )
-            sys.exit(1)
+            #sys.exit(1)
+
             # For 2-km pixels. Dims should be divisible by 4.
             # ind_bbox = [700,4600,338,4000] #(yind_min is south of y_ind_max)
             # ind_bbox = [200,4800,400,4000]
@@ -2046,7 +2120,23 @@ def run_overhead(
             # else:
             #  ind_bbox = [700,3900,500,3000] #covers OPC and TAFB offshore regions
             # ind_bbox = [2700, 4800, 2300, 4500] #Brazil
-            crop = True
+
+            #crop = True
+            startY = 0
+            endY = 5424
+            startX = 0
+            endX = 5424
+        elif sector == "FLDK":
+
+            logging.warning(
+                "Running Himawark Fulldisk sector with no cut out. If it fails you may not have enough ram or you can try turning off onednn (see docs)."
+            )
+
+            startY = 0
+            endY = 5500
+            startX = 0
+            endX = 5500
+
         elif sector == "JP":
             # chopping off missing data; dims also need to be div. by 4
             crop = True
@@ -2084,67 +2174,62 @@ def run_overhead(
 
 
 #################################################################################################################################
-def get_glm_area_def(
-    idict, glmpatt, make_img, timeseries, pickle_preds_labs, ltg_stats
-):
+def get_glm_area_def(idict, glmpatt):
 
     """Make GE and GW GLM area definitions, to remap to ABI area
-    This is only needed if we're using one or more of the funcionalities in the if statement.
+    This is only needed if we're using `make_img` (and `glmpatt` is not None), `timeseries`, `pickle_preds_labs`, or `ltg_stats`.
     The other time we would need glm_area_def is if GLM fields are used as predictors, which they are currently not."""
 
-    if idict["imgr"] == "ABI" and (
-        make_img > 0 or len(timeseries) > 0 or pickle_preds_labs or ltg_stats
-    ):
-        if glmpatt.startswith("https"):  # indicates we're using FD GLM data
-            try:
-                assert ("_g16_" in glmpatt and idict["satname"] == "goes16") or (
-                    "_g17_" in glmpatt and idict["satname"] == "goes17"
-                )
-            except AssertionError:
-                logging.critical(
-                    f"Unable to get glm_area_def with this glmpatt: {glmpatt}"
-                )
-                logging.critical(
-                    f"Check that satname ({idict['satname']}) and satellite name in glmpatt are compatible."
-                )
-                sys.exit(1)
-            else:
+    if glmpatt.startswith("https"):  # indicates we're using FD GLM data
+        try:
+            assert ("_g16_" in glmpatt and idict["satname"] == "goes16") or (
+                "_g17_" in glmpatt and idict["satname"] == "goes17"
+            )
+        except AssertionError:
+            logging.critical(
+                f"Unable to get glm_area_def with this glmpatt: {glmpatt}"
+            )
+            logging.critical(
+                f"Check that satname ({idict['satname']}) and satellite name in glmpatt are compatible."
+            )
+            sys.exit(1)
+        else:
+            glm_area_def, _, _ = utils.get_area_definition(
+                f"{pltg}/lightningcast/static/{idict['slot']}_FD.nc",
+                return_latlons=False,
+            )
+    else:  # Assumes CONUS/PACUS fixed-grid-format GLM data
+        try:
+            assert (
+                "goes_east" in glmpatt.lower() and idict["slot"] == "GOES_East"
+            ) or ("goes_west" in glmpatt.lower() and idict["slot"] == "GOES_West")
+        except AssertionError:
+            logging.critical(
+                f"Unable to get glm_area_def with this glmpatt: {glmpatt}"
+            )
+            logging.critical(
+                f"Check that slot ({idict['slot']}) and satellite slot in glmpatt are compatible."
+            )
+            sys.exit(1)
+        else:
+            if (
+                idict["slot"] == "GOES_East"
+            ):  # GOES_East GLM and ABI are exactly the same!
                 glm_area_def, _, _ = utils.get_area_definition(
-                    f"{pltg}/lightningcast/static/{idict['slot']}_FD.nc",
+                    f"{pltg}/lightningcast/static/GOES_East.nc",
                     return_latlons=False,
                 )
-        else:  # Assumes CONUS/PACUS fixed-grid-format GLM data
-            try:
-                assert (
-                    "goes_east" in glmpatt.lower() and idict["slot"] == "GOES_East"
-                ) or ("goes_west" in glmpatt.lower() and idict["slot"] == "GOES_West")
-            except AssertionError:
-                logging.critical(
-                    f"Unable to get glm_area_def with this glmpatt: {glmpatt}"
-                )
-                logging.critical(
-                    f"Check that slot ({idict['slot']}) and satellite slot in glmpatt are compatible."
+            elif (
+                idict["slot"] == "GOES_West"
+            ):  # GOES_West GLM and ABI are slightly different, for some reason.
+                glm_area_def, _, _ = utils.get_area_definition(
+                    f"{pltg}/lightningcast/static/GOES_West_GLM.nc",
+                    return_latlons=False,
                 )
-                sys.exit(1)
-            else:
-                if (
-                    idict["slot"] == "GOES_East"
-                ):  # GOES_East GLM and ABI are exactly the same!
-                    glm_area_def, _, _ = utils.get_area_definition(
-                        f"{pltg}/lightningcast/static/GOES_East.nc",
-                        return_latlons=False,
-                    )
-                elif (
-                    idict["slot"] == "GOES_West"
-                ):  # GOES_West GLM and ABI are slightly different, for some reason.
-                    glm_area_def, _, _ = utils.get_area_definition(
-                        f"{pltg}/lightningcast/static/GOES_West_GLM.nc",
-                        return_latlons=False,
-                    )
-        try:
-            idict["glm_area_def"] = glm_area_def
-        except (UnboundLocalError, NameError) as err:
-            logging.warning("No glm_area_def available.")
+    try:
+        idict["glm_area_def"] = glm_area_def
+    except (UnboundLocalError, NameError) as err:
+        logging.warning("No glm_area_def available.")
 
 
 #################################################################################################################################
@@ -2300,7 +2385,8 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
                 logging.warning("Couldn't find FED for " + dt.strftime("%Y%m%d-%H%M%S"))
                 return  # move on to next line / datetime
 
-        vmin=0.1; vmax=20; cbticks=[5,10,15,20]; cmap = plt.get_cmap('jet'); norm=None
+        vmin=0.1; cbticks=[16,32,48,64]; cmap = plt.get_cmap('jet'); norm=None
+        vmax = np.max(cbticks)
         #vmin = 0.1
         #vmax = 64
         #cbticks = [16, 32, 48, 64]
@@ -2427,8 +2513,12 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
         #  for jj in range(buff,nx-buff,stride):
         #    irdata[-buff+ii:ii+buff+1,-buff+jj:jj+buff+1] = 310
 
-        # irdata = scn[idict['irCH']].compute().data
-        # ax.imshow(irdata, transform=crs, extent=crs.bounds, vmin=180, vmax=310, cmap=plt.get_cmap('Greys')) #irw_ctc()
+        # Just IR
+
+   #     irdata = scn[idict['irCH']].compute().data
+   #     ax.imshow(irdata, transform=crs, extent=crs.bounds, vmin=180, vmax=310, cmap=plt.get_cmap('Greys')) #irw_ctc()
+
+        # IRCloudPhaseFC
         composite = "IRCloudPhaseFC"
         alpha = 0.8
         scn.load([composite])
@@ -2528,14 +2618,12 @@ def write_image(scn, dt, new_preds, projx, projy, idict, meta):
     # Add latitude and longitude gridlines
     gl = ax.gridlines(
         crs=ccrs.PlateCarree(),
-        draw_labels=True,
+        draw_labels=['top','left'],
         linewidth=0.5,
         color="gray",
         linestyle="--",
     )
     # Customize the gridline labels
-    gl.xlabels_bottom = False
-    gl.ylabels_right = False
     gl.xlabel_style = {"size": 8}
     gl.ylabel_style = {"size": 8}
 
@@ -2761,7 +2849,7 @@ def predict_ltg(
     model_config_file,
     sector=None,
     outdir=os.environ["PWD"] + "/OUTPUT/",
-    glmpatt="/ships19/grain/probsevere/lightning/%Y/%Y%m%d/GLM/goes_east/agg/%Y%m%d-%H%M*.netcdf",
+    glmpatt=None,
     glmvar="flash_extent_density",
     eni_path="/ships19/grain/lightning/",
     mrms_patt=None,
@@ -2779,7 +2867,8 @@ def predict_ltg(
     sector_suffix=None,
     lightning_meteograms=False,
     geotiff=False,
-    savegrid=0,
+    awips_nc=0,
+    netcdf=0,
     fire_event_ts=False,
     infinite=False,
     re_upload=None,
@@ -2830,11 +2919,11 @@ def predict_ltg(
         ind_bbox = idict["ind_bbox"]
 
     # If we're writing parallax-corrected netcdfs, ensure that plax_hgt > 0
-    if savegrid >= 2:
+    if awips_nc >= 2:
         try:
             assert(plax_hgt > 0)
         except AssertionError:
-            logging.critical("Arg plax_hgt must be > 0 if savegrid is >= 2")
+            logging.critical("Arg plax_hgt must be > 0 if awips_nc is >= 2")
             logging.critical(f"plax = {plax_hgt}")
             sys.exit(1)
 
@@ -2868,7 +2957,7 @@ def predict_ltg(
     # Get the file times or listing from listened file
     if infinite:
         with open(filelist) as fp:  # with automatically closes file when done with it.
-            lines = [l.rstrip("\n") for l in fp.readlines()]
+            lines = [line.rstrip("\n") for line in fp.readlines()]
     else:  # since duplicate lines are removed in overhead we will get our processed filelist from there
         lines = files
 
@@ -3004,14 +3093,15 @@ def predict_ltg(
 
         # Get the grid remap data for the AWIPS remapped grid. It is statically saved in the remap_info.
         # Also get the parallax-correction info, if appropriate. idict gets returned with added fields.
-        if line_cter == 0 and savegrid > 0:
+        if line_cter == 0 and  awips_nc > 0:
             remap_info(idict, meta) 
 
         # Get GLM area definition. glm_area_def gets added to idict
-        if line_cter == 0:
-            get_glm_area_def(
-                idict, glmpatt, make_img, timeseries, pickle_preds_labs, ltg_stats
-            )
+        if line_cter == 0 and idict["imgr"] == "ABI" and ((
+                make_img > 0 and glmpatt is not None) or len(timeseries) > 0 or pickle_preds_labs or ltg_stats
+        ):
+            get_glm_area_def(idict, glmpatt)
+            
 
         # get proj coords (in case no crop)
         projx = orig_scn[irCH].x.compute().data
@@ -3049,6 +3139,7 @@ def predict_ltg(
             scn = orig_scn.crop(xy_bbox=xy_bbox)
             projx = scn[irCH].x.compute().data  # get cropped proj coords
             projy = scn[irCH].y.compute().data
+            #scn.save_datasets(writer='simple_image', datasets=['B13'], filename='ldp-cropped-valid.png') #use to visualize cropped scn
             meta = scn[irCH].attrs  # reset with cropped info
         else:
             scn = orig_scn
@@ -3067,6 +3158,11 @@ def predict_ltg(
         projx = projx[0 : endX - startX]
         projy = projy[0 : endY - startY]
         irlons, irlats = meta["area"].get_lonlats()
+        
+        if ll_bbox and (True in np.isinf(irlons) or True in np.isinf(irlons)):
+            logging.critical("Invalid bounding box! Most likely your ll_bbox parameter is to blame! ")
+            sys.exit(1)
+        
         irlons = irlons[0 : endY - startY, 0 : endX - startX]
         irlats = irlats[0 : endY - startY, 0 : endX - startX]
 
@@ -3327,7 +3423,7 @@ def predict_ltg(
         try:
             if is_tensorflow:
                 new_preds = conv_model.predict(all_preds, verbose=1)
-                new_preds = np.squeeze(new_preds)
+                new_preds = np.squeeze(new_preds) #[...,0])
             else:
                 with torch.set_grad_enabled(False):
                     for i in range(len(all_preds)):  # move each input to the device
@@ -3433,8 +3529,8 @@ def predict_ltg(
                     re_upload=re_upload,
                 )
 
-            if savegrid:
-                outnetcdf, save_status = save_off_grid(
+            if awips_nc:
+                outnetcdf, save_status = save_netcdf(
                     scn[irCH],
                     new_preds,
                     dt,
@@ -3442,13 +3538,26 @@ def predict_ltg(
                     outdir,
                     idict,
                     stride=stride,
-                    savecode=savegrid,
+                    savecode=awips_nc,
+                    awips=True,
                 )
                 if save_status and extra_script:
                     status = subprocess.Popen(
                         ["bash", extra_script, outnetcdf]
                     )  # to send via LDM
 
+            if netcdf:
+                geo_nc, save_status = save_netcdf(
+                    scn[irCH],
+                    new_preds,
+                    dt,
+                    meta,
+                    outdir,
+                    idict,
+                    stride=stride,
+                    savecode=netcdf,
+                )
+
             if lightning_meteograms:
                 if infinite:
                     eni_patt = (
@@ -3536,7 +3645,7 @@ def predict_ltg(
                 except mysql.connector.Error as err:
                     logging.error(str(err))
 
-            if fire_event_ts and savegrid:  # need outnetcdf
+            if fire_event_ts and awips_nc:  # need awips outnetcdf
                 sat_and_sector = (
                     os.path.basename(outnetcdf).split("Cast_")[1].split("_2")[0]
                 )
@@ -3689,9 +3798,9 @@ if __name__ == "__main__":
     parser.add_argument(
         "-gp",
         "--glmpatt",
-        help='GLM file pattern. Default: /ships19/grain/probsevere/lightning/%%Y/%%Y%%m%%d/GLM/goes_east/agg/%%Y%%m%%d-%%H%%M*.netcdf. For segments of FD scenes, outside of CONUS or PACUS, use (e.g.), "https://lightningdev.umd.edu/feng_data_sharing/113_g16_glmgrid_arch/%%Y/%%Y%%j/OR_GLM-L3-GLMF-M3_G16_e%%Y%%m%%d%%H%%M00.nc"',
+        help='GLM file pattern. E.g.,: /ships19/grain/probsevere/lightning/%%Y/%%Y%%m%%d/GLM/goes_east/agg/%%Y%%m%%d-%%H%%M*.netcdf. For segments of FD scenes, outside of CONUS or PACUS, use (e.g.), "https://lightningdev.umd.edu/feng_data_sharing/113_g16_glmgrid_arch/%%Y/%%Y%%j/OR_GLM-L3-GLMF-M3_G16_e%%Y%%m%%d%%H%%M00.nc". Default=None.',
         type=str,
-        default="/ships19/grain/probsevere/lightning/%Y/%Y%m%d/GLM/goes_east/agg/%Y%m%d-%H%M*.netcdf",
+        default=None,
     )
     # FD goes_west example: /ships19/grain/jcintineo/GLM/goes_west/FD/%Y/%Y%m%d/5min/*e%Y%m%d%H%M*.nc
     parser.add_argument(
@@ -3770,9 +3879,16 @@ if __name__ == "__main__":
         nargs="+",
     )
     parser.add_argument(
-        "--savegrid",
-        help="Write a netcdf of the LC probs to outdir. 1 = w/o plax correction; 2 = w/ plax correction; \
-                      3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write any netcdfs).",
+        "--awips_nc",
+        help="Write a netcdf of the LC probs in AWIPS-compatible format. 1 = w/o plax correction; 2 = w/ plax correction; \
+                      3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write netcdf).",
+        default=0,
+        type=int,
+    )
+    parser.add_argument(
+        "--netcdf",
+        help="Write a netcdf of the LC probs in native geostationary format. 1 = w/o plax correction; 2 = w/ plax correction; \
+                      3 = save both plax-corrected and uncorrected grids. Default = 0 (i.e., don't write netcdf).",
         default=0,
         type=int,
     )
@@ -3936,7 +4052,8 @@ if __name__ == "__main__":
         plot_points=args.plot_points,
         lightning_meteograms=args.lightning_meteograms,
         geotiff=args.geotiff,
-        savegrid=args.savegrid,
+        awips_nc=args.awips_nc,
+        netcdf=args.netcdf,
         fire_event_ts=args.fire_event_ts,
         make_json=args.make_json,
         pickle_preds_labs=args.pickle_preds_labs,
diff --git a/lightningcast/purge_static_records.py b/lightningcast/purge_static_records.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b0d6546c94c5601c636d67cb7aa1bfda6a16ead
--- /dev/null
+++ b/lightningcast/purge_static_records.py
@@ -0,0 +1,9 @@
+from ltg_utils import purge_records
+
+dbs = ['lightning_meteograms','lightning_dss']
+
+for db in dbs:
+    
+    print(db)
+    # USERNAME, PW, and HOST are hard-coded at the top of ltg_utils.py
+    purge_records(purge_thresh_days=365, dbname=db)
diff --git a/lightningcast/realtime/w2algrun/w2alg.conf b/lightningcast/realtime/w2algrun/w2alg.conf
index 34e0155d21a4bced05d3a238df6e6ee21ab4a6b3..c2b96f0701eec2fe39bf0c2e445f5df3dd6e5cf8 100644
--- a/lightningcast/realtime/w2algrun/w2alg.conf
+++ b/lightningcast/realtime/w2algrun/w2alg.conf
@@ -53,6 +53,11 @@ group MAIN
     set PROCESS_PARAMS "$M1/record_dss_events.py"
     set NUM_LOGS "1"
 
+  manager PROCESS-CHECK_PRODS
+    set PROCESS_EXE "python"
+    set PROCESS_PARAMS "$M1/check_products.py --goes_east --goes_west --himawari"
+    set NUM_LOGS "1"
+
 group GE
 
 #GOES-East ABI
@@ -64,23 +69,23 @@ group GE
 #-b -115 -64 23.5 50
   manager PROCESS-CONUS
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadC -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadC -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 --lightning_meteograms"
      set NUM_LOGS "2"
 
   manager PROCESS-MESO1
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadM1 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadM1 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 --lightning_meteograms"
      set NUM_LOGS "2"
 
   manager PROCESS-MESO2
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadM2 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadM2 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 --lightning_meteograms"
      set NUM_LOGS "2"
 
 #OPC/TAFB ---only doing South/Central American locations for --lightning_meteograms
   manager PROCESS-FD
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadF -i -ru $M1/realtime/re_upload -mj -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 -ibb 700 3900 500 3000 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GE-AMQ-A1.log $M1/$M3/$M4 -o $M2/products/ --sector RadF -i -ru $M1/realtime/re_upload -mj -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 -ibb 700 3900 500 3000 --lightning_meteograms"
      set NUM_LOGS "2"
   
   manager PROCESS-GLM
@@ -110,27 +115,27 @@ group GW
 #-128 -93.5 30.5 51.5
   manager PROCESS-CONUS
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadC -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadC -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 --lightning_meteograms"
      set NUM_LOGS "2"
 
   manager PROCESS-MESO1
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadM1 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadM1 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 --lightning_meteograms"
      set NUM_LOGS "2"
 
   manager PROCESS-MESO2
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadM2 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --savegrid 3 --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadM2 -i -ru $M1/realtime/re_upload -mj -gr -ph 9000 -x $M1/realtime/ldm_send.sh --awips_nc 3 --lightning_meteograms"
      set NUM_LOGS "2"
 
   manager PROCESS-USSAMOA
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadF -i -ss USSAMOA -llbb 172 -155 -23 -4 -mj -ph 9000 -ru $M1/realtime/re_upload -x $M1/realtime/ldm_send.sh --savegrid 3"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadF -i -ss USSAMOA -llbb 172 -155 -23 -4 -mj -ph 9000 -ru $M1/realtime/re_upload -x $M1/realtime/ldm_send.sh --awips_nc 3"
      set NUM_LOGS "2"
 
   manager PROCESS-AKCAN
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadF -i -ss AKCAN -ibb 2020 3500 50 500 -mj -ph 9000 --savegrid 3 -ru $M1/realtime/re_upload --lightning_meteograms"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/GW-AMQ-A1.log $M1/$M5/$M6 -o $M2/products/ --sector RadF -i -ss AKCAN -ibb 2020 3500 50 500 -mj -ph 9000 --awips_nc 3 -ru $M1/realtime/re_upload --lightning_meteograms"
      set NUM_LOGS "2"
 
   manager PROCESS-GLM
@@ -163,7 +168,7 @@ group AHI
 
   manager PROCESS-GUAM
      set PROCESS_EXE "python"
-     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/AHI-AMQ-A1.log $M1/$M3/$M4 -o $M2/products --sector FLDK -i -ss GUAM -llbb 129 -172 -1 27 -mj -ru $M1/realtime/re_upload -x $M1/realtime/ldm_send.sh --savegrid 1"
+     set PROCESS_PARAMS "$M1/predict_ltg.py $M2/logs/AHI-AMQ-A1.log $M1/$M3/$M4 -o $M2/products --sector FLDK -i -ss GUAM -llbb 129 -172 -1 27 -mj -ru $M1/realtime/re_upload -x $M1/realtime/ldm_send.sh --awips_nc 1"
      set NUM_LOGS "2"
      #old sector 140 150 11 18
 
diff --git a/lightningcast/record_dss_events.py b/lightningcast/record_dss_events.py
index a2352a25983d0110455b0f665f74d877487fbc85..2172dcc2490cd46811ba5171a81c1255728bc4ba 100644
--- a/lightningcast/record_dss_events.py
+++ b/lightningcast/record_dss_events.py
@@ -45,9 +45,9 @@ while True:
         lats = list(dss_events["Latitude of event (degrees north)"])  # type=float
         lons = list(dss_events["Longitude of event (degrees west)"])  # type=float
         begin_event_ts = list(
-            dss_events["Beginning date & time of request (UTC)"]
+            dss_events["Beginning date & time of request"]
         )  # type=str
-        end_event_ts = list(dss_events["End date & time of request (UTC)"])  # type=str
+        end_event_ts = list(dss_events["End date & time of request"])  # type=str
     except urllib.error.HTTPError as err:
         logging.error(str(err))
     except KeyError as err:
diff --git a/lightningcast/spatial_csi.py b/lightningcast/spatial_csi.py
index 5b302be025798a13c7cecc5836bf02d201190548..08d9f10657e5d0327d8abe532b085cf90a51b296 100644
--- a/lightningcast/spatial_csi.py
+++ b/lightningcast/spatial_csi.py
@@ -71,7 +71,7 @@ def main(
     problevel,
     outdir=None,
     georeference_file=f"{pltg}/lightningcast/static/GOES_East.nc",
-    stride=1,
+    stride=4,
     ticks=np.array([]),
 ):
     datafile = spatial_counts_file  #'tf/c02051315/model-11/spatial_counts.nc'
@@ -234,9 +234,8 @@ if __name__ == "__main__":
     georeference_file = sys.argv[3]
     if len(sys.argv) == 5:
         outdir = sys.argv[4]
-        from lightningcast import utils
 
-        utils.mkdir_p(outdir)
+        os.makedirs(outdir, exist_ok=True)
     else:
         outdir = os.path.dirname(spatial_counts_file)
     main(
diff --git a/lightningcast/static/timezones.json b/lightningcast/static/timezones.json
new file mode 100644
index 0000000000000000000000000000000000000000..c5851282fcbacd44ca64d3b39f8a846198a2cff7
--- /dev/null
+++ b/lightningcast/static/timezones.json
@@ -0,0 +1,3031 @@
+{
+  "Africa/Abidjan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Abidjan-tz"
+    }
+  ],
+  "Africa/Accra": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Accra-tz"
+    }
+  ],
+  "Africa/Addis_Ababa": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Addis_Ababa-tz"
+    }
+  ],
+  "Africa/Algiers": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Algiers-tz"
+    }
+  ],
+  "Africa/Asmara": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Asmara-tz"
+    }
+  ],
+  "Africa/Bamako": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Bamako-tz"
+    }
+  ],
+  "Africa/Bangui": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Bangui-tz"
+    }
+  ],
+  "Africa/Banjul": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Banjul-tz"
+    }
+  ],
+  "Africa/Bissau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Bissau-tz"
+    }
+  ],
+  "Africa/Blantyre": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Blantyre-tz"
+    }
+  ],
+  "Africa/Brazzaville": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Brazzaville-tz"
+    }
+  ],
+  "Africa/Bujumbura": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Bujumbura-tz"
+    }
+  ],
+  "Africa/Cairo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Cairo-tz"
+    }
+  ],
+  "Africa/Casablanca": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Casablanca-tz"
+    }
+  ],
+  "Africa/Ceuta": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Ceuta-tz"
+    }
+  ],
+  "Africa/Conakry": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Conakry-tz"
+    }
+  ],
+  "Africa/Dakar": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Dakar-tz"
+    }
+  ],
+  "Africa/Dar_es_Salaam": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Dar_es_Salaam-tz"
+    }
+  ],
+  "Africa/Djibouti": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Djibouti-tz"
+    }
+  ],
+  "Africa/Douala": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Douala-tz"
+    }
+  ],
+  "Africa/El_Aaiun": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-El_Aaiun-tz"
+    }
+  ],
+  "Africa/Freetown": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Freetown-tz"
+    }
+  ],
+  "Africa/Gaborone": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Gaborone-tz"
+    }
+  ],
+  "Africa/Harare": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Harare-tz"
+    }
+  ],
+  "Africa/Johannesburg": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Johannesburg-tz"
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-5.5,-86],[0,-86],[0,-68],[-5.5,-69],[-5.5,-86]]],
+      "description": "Add Antarctic Station of SANAE IV."
+    }
+  ],
+  "Africa/Juba": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Juba-tz"
+    }
+  ],
+  "Africa/Kampala": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Kampala-tz"
+    }
+  ],
+  "Africa/Khartoum": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Khartoum-tz"
+    }
+  ],
+  "Africa/Kigali": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Kigali-tz"
+    }
+  ],
+  "Africa/Kinshasa": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Kinshasa-tz"
+    }
+  ],
+  "Africa/Lagos": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Lagos-tz"
+    }
+  ],
+  "Africa/Libreville": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Libreville-tz"
+    }
+  ],
+  "Africa/Lome": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Lome-tz"
+    }
+  ],
+  "Africa/Luanda": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Luanda-tz"
+    }
+  ],
+  "Africa/Lubumbashi": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Lubumbashi-tz"
+    }
+  ],
+  "Africa/Lusaka": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Lusaka-tz"
+    }
+  ],
+  "Africa/Malabo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Malabo-tz"
+    }
+  ],
+  "Africa/Maputo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Maputo-tz"
+    }
+  ],
+  "Africa/Maseru": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Maseru-tz"
+    }
+  ],
+  "Africa/Mbabane": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Mbabane-tz"
+    }
+  ],
+  "Africa/Mogadishu": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Mogadishu-tz"
+    }
+  ],
+  "Africa/Monrovia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Monrovia-tz"
+    }
+  ],
+  "Africa/Nairobi": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Nairobi-tz"
+    }
+  ],
+  "Africa/Ndjamena": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Ndjamena-tz"
+    }
+  ],
+  "Africa/Niamey": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Niamey-tz"
+    }
+  ],
+  "Africa/Nouakchott": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Nouakchott-tz"
+    }
+  ],
+  "Africa/Ouagadougou": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Ouagadougou-tz"
+    }
+  ],
+  "Africa/Porto-Novo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Porto_Novo-tz"
+    }
+  ],
+  "Africa/Sao_Tome": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Sao_Tome-tz"
+    }
+  ],
+  "Africa/Tripoli": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Tripoli-tz"
+    }
+  ],
+  "Africa/Tunis": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Tunis-tz"
+    }
+  ],
+  "Africa/Windhoek": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Africa-Windhoek-tz"
+    }
+  ],
+  "America/Adak": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Adak-tz"
+    }
+  ],
+  "America/Anchorage": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Anchorage-tz"
+    }
+  ],
+  "America/Anguilla": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Anguilla-tz"
+    }
+  ],
+  "America/Antigua": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Antigua-tz"
+    }
+  ],
+  "America/Aruba": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Aruba-tz"
+    }
+  ],
+  "America/Araguaina": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Araguaina-tz"
+    }
+  ],
+  "America/Argentina/Buenos_Aires": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Buenos_Aires-tz"
+    }
+  ],
+  "America/Argentina/Catamarca": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Catamarca-tz"
+    }
+  ],
+  "America/Argentina/Cordoba": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Cordoba-tz"
+    }
+  ],
+  "America/Argentina/Jujuy": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Jujuy-tz"
+    }
+  ],
+  "America/Argentina/La_Rioja": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-La_Rioja-tz"
+    }
+  ],
+  "America/Argentina/Mendoza": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Mendoza-tz"
+    }
+  ],
+  "America/Argentina/Rio_Gallegos": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Rio_Gallegos-tz"
+    }
+  ],
+  "America/Argentina/Salta": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Salta-tz"
+    }
+  ],
+  "America/Argentina/San_Juan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-San_Juan-tz"
+    }
+  ],
+  "America/Argentina/San_Luis": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-San_Luis-tz"
+    }
+  ],
+  "America/Argentina/Tucuman": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Tucuman-tz"
+    }
+  ],
+  "America/Argentina/Ushuaia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Argentina-Ushuaia-tz"
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-58,-86],[-45,-86],[-35,-86],[-35,-75],[-58,-74],[-54,-63],[-57,-61.6],[-59,-61.8],[-58.8,-62.25],[-57.3,-63.4],[-64,-64],[-63.8,-64.8],[-65.5,-65.25],[-67.5,-66],[-67.5,-86],[-60,-86],[-58,-86]]],
+      "description": "Add Antarctic areas near Stations: Belgrano II, San Martín, Vernadsky, Esperanza and Arctowski."
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-45,-60],[-47.5,-60.5],[-46,-61.5],[-43.5,-61],[-43.5,-60],[-45,-60]]],
+      "description": "Add Antarctic area near Orcadas Station."
+    }
+  ],
+  "America/Asuncion": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Asuncion-tz"
+    }
+  ],
+  "America/Atikokan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Atikokan-tz"
+    }
+  ],
+  "America/Bahia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Bahia-tz"
+    }
+  ],
+  "America/Bahia_Banderas": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Bahia_Banderas-tz"
+    }
+  ],
+  "America/Barbados": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Barbados-tz"
+    }
+  ],
+  "America/Belem": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Belem-tz"
+    }
+  ],
+  "America/Belize": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Belize-tz"
+    }
+  ],
+  "America/Blanc-Sablon": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Blanc_Sablon-tz"
+    }
+  ],
+  "America/Boa_Vista": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Boa_Vista-tz"
+    }
+  ],
+  "America/Bogota": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Bogota-tz"
+    }
+  ],
+  "America/Boise": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Boise-tz"
+    }
+  ],
+  "America/Cambridge_Bay": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Cambridge_Bay-tz"
+    }
+  ],
+  "America/Campo_Grande": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Campo_Grande-tz"
+    }
+  ],
+  "America/Cancun": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Cancun-tz"
+    }
+  ],
+  "America/Caracas": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Caracas-tz"
+    }
+  ],
+  "America/Cayenne": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Cayenne-tz"
+    }
+  ],
+  "America/Cayman": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Cayman-tz"
+    }
+  ],
+  "America/Chicago": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Chicago-tz"
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-100.475,44.533],[-100.855,44.162],[-99,44],[-100.475,44.533]]],
+      "description": "Include small region east of Fort Pierre"
+    }, {
+      "op": "difference",
+      "source": "manual-polygon",
+      "data": [[[-102,46],[-101.326,46.146],[-101.324,45.938],[-102,46]]],
+      "description": "Draw the line slightly east of ND 31"
+    }, {
+      "op": "difference",
+      "source": "manual-polygon",
+      "data": [[[-85.2,32.97],[-85.237,32.954],[-85.282,32.847],[-85.235,32.765],[-85.198,32.656],[-85.182,32.571],[-85.214,32.514],[-85.119,32.444],[-85.094,32.397],[-85.073,32.355],[-85.031,32.339],[-85.02,32.3429],[-85.011,32.358],[-84.86,32.35],[-85.2,32.97]]],
+      "description": "Omit a few towns in Alabama close to Georgia that observe eastern time"
+    }
+  ],
+  "America/Chihuahua": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Chihuahua-tz"
+    }
+  ],
+  "America/Ciudad_Juarez": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Ciudad_Juarez-tz"
+    }
+  ],
+  "America/Costa_Rica": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Costa_Rica-tz"
+    }
+  ],
+  "America/Creston": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Creston-tz"
+    }
+  ],
+  "America/Cuiaba": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Cuiaba-tz"
+    }
+  ],
+  "America/Curacao": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Curacao-tz"
+    }
+  ],
+  "America/Danmarkshavn": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Danmarkshavn-tz"
+    }
+  ],
+  "America/Dawson": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Dawson-tz"
+    }
+  ],
+  "America/Dawson_Creek": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Dawson_Creek-tz"
+    }
+  ],
+  "America/Denver": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Denver-tz"
+    }, {
+      "op": "union",
+      "source": "overpass",
+      "id": "Sioux County, ND"
+    }, {
+      "op": "difference",
+      "source": "manual-polygon",
+      "data": [[[-98,44],[-98,46.5],[-100.8,46.57],[-101.326,46.146],[-101.324,45.938],[-100.583,44.865],[-100.475,44.533],[-100.855,44.162],[-98,44]]],
+      "description": "anomalies in North and South Dakota"
+    }, {
+      "op": "union",
+      "source": "overpass",
+      "id": "Grant County, ND"
+    }, {
+      "op": "union",
+      "source": "overpass",
+      "id": "Corson County, SD"
+    }, {
+      "op": "union",
+      "source": "overpass",
+      "id": "Dewey County, SD"
+    }
+  ],
+  "America/Detroit": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Detroit-tz"
+    }
+  ],
+  "America/Dominica": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Dominica-tz"
+    }
+  ],
+  "America/Edmonton": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Edmonton-tz"
+    }
+  ],
+  "America/Eirunepe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Eirunepe-tz"
+    }
+  ],
+  "America/El_Salvador": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-El_Salvador-tz"
+    }
+  ],
+  "America/Fort_Nelson": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Fort_Nelson-tz"
+    }
+  ],
+  "America/Fortaleza": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Fortaleza-tz"
+    }
+  ],
+  "America/Glace_Bay": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Glace_Bay-tz"
+    }
+  ],
+  "America/Goose_Bay": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Goose_Bay-tz"
+    }
+  ],
+  "America/Grand_Turk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Grand_Turk-tz"
+    }
+  ],
+  "America/Grenada": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Grenada-tz"
+    }
+  ],
+  "America/Guadeloupe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Guadeloupe-tz"
+    }
+  ],
+  "America/Guatemala": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Guatemala-tz"
+    }
+  ],
+  "America/Guayaquil": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Guayaquil-tz"
+    }
+  ],
+  "America/Guyana": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Guyana-tz"
+    }
+  ],
+  "America/Halifax": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Halifax-tz"
+    }
+  ],
+  "America/Havana": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Havana-tz"
+    }
+  ],
+  "America/Hermosillo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Hermosillo-tz"
+    }
+  ],
+  "America/Indiana/Indianapolis": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Indianapolis-tz"
+    }
+  ],
+  "America/Indiana/Knox": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Knox-tz"
+    }
+  ],
+  "America/Indiana/Marengo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Marengo-tz"
+    }
+  ],
+  "America/Indiana/Petersburg": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Petersburg-tz"
+    }
+  ],
+  "America/Indiana/Tell_City": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Tell_City-tz"
+    }
+  ],
+  "America/Indiana/Vevay": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Vevay-tz"
+    }
+  ],
+  "America/Indiana/Vincennes": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Vincennes-tz"
+    }
+  ],
+  "America/Indiana/Winamac": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Indiana-Winamac-tz"
+    }
+  ],
+  "America/Inuvik": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Inuvik-tz"
+    }
+  ],
+  "America/Iqaluit": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Iqaluit-tz"
+    }
+  ],
+  "America/Jamaica": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Jamaica-tz"
+    }
+  ],
+  "America/Juneau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Juneau-tz"
+    }
+  ],
+  "America/Kentucky/Louisville": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Kentucky-Louisville-tz"
+    }
+  ],
+  "America/Kentucky/Monticello": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Kentucky-Monticello-tz"
+    }
+  ],
+  "America/Kralendijk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Kralendijk-tz"
+    }
+  ],
+  "America/La_Paz": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-La_Paz-tz"
+    }
+  ],
+  "America/Lima": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Lima-tz"
+    }
+  ],
+  "America/Los_Angeles": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Los_Angeles-tz"
+    }
+  ],
+  "America/Lower_Princes": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Lower_Princes-tz"
+    }
+  ],
+  "America/Maceio": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Maceio-tz"
+    }
+  ],
+  "America/Managua": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Managua-tz"
+    }
+  ],
+  "America/Manaus": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Manaus-tz"
+    }
+  ],
+  "America/Marigot": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Marigot-tz"
+    }
+  ],
+  "America/Martinique": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Martinique-tz"
+    }
+  ],
+  "America/Matamoros": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Matamoros-tz"
+    }
+  ],
+  "America/Mazatlan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Mazatlan-tz"
+    }
+  ],
+  "America/Miquelon": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Miquelon-tz"
+    }
+  ],
+  "America/Menominee": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Menominee-tz"
+    }
+  ],
+  "America/Merida": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Merida-tz"
+    }
+  ],
+  "America/Metlakatla": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Metlakatla-tz"
+    }
+  ],
+  "America/Mexico_City": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Mexico_City-tz"
+    }
+  ],
+  "America/Moncton": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Moncton-tz"
+    }
+  ],
+  "America/Monterrey": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Monterrey-tz"
+    }
+  ],
+  "America/Montevideo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Montevideo-tz"
+    }
+  ],
+  "America/Montserrat": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Montserrat-tz"
+    }
+  ],
+  "America/Nassau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Nassau-tz"
+    }
+  ],
+  "America/New_York": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-New_York-tz"
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-85.2,32.97],[-85.237,32.954],[-85.282,32.847],[-85.235,32.765],[-85.198,32.656],[-85.182,32.571],[-85.214,32.514],[-85.119,32.444],[-85.094,32.397],[-85.073,32.355],[-85.031,32.339],[-85.02,32.3429],[-85.011,32.358],[-84.86,32.35],[-85.2,32.97]]],
+      "description": "Add areas in Alabama near Phenix City and Lanett that observe eastern time.  The boundaries are a speculative guess."
+    }
+  ],
+  "America/Nome": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Nome-tz"
+    }
+  ],
+  "America/Noronha": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Noronha-tz"
+    }
+  ],
+  "America/North_Dakota/Beulah": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-North_Dakota-Beulah-tz"
+    }
+  ],
+  "America/North_Dakota/Center": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-North_Dakota-Center-tz"
+    }
+  ],
+  "America/North_Dakota/New_Salem": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-North_Dakota-New_Salem-tz"
+    }
+  ],
+  "America/Nuuk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Nuuk-tz"
+    }
+  ],
+  "America/Ojinaga": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Ojinaga-tz"
+    }
+  ],
+  "America/Panama": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Panama"
+    }
+  ],
+  "America/Paramaribo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Paramaribo-tz"
+    }
+  ],
+  "America/Phoenix": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Phoenix-tz"
+    }, {
+      "op": "difference",
+      "source": "overpass",
+      "id": "America-Creston-tz"
+    }
+  ],
+  "America/Port-au-Prince": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Port-au-Prince-tz"
+    }
+  ],
+  "America/Port_of_Spain": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Port_of_Spain-tz"
+    }
+  ],
+  "America/Porto_Velho": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Porto_Velho-tz"
+    }
+  ],
+  "America/Puerto_Rico": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Puerto_Rico-tz-old"
+    }
+  ],
+  "America/Punta_Arenas": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Punta_Arenas-tz"
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-64,-64],[-57.3,-63.4],[-58.8,-62.25],[-59,-61.8],[-63,-62.7],[-64,-64]]],
+      "description": "Add Antarctic Areas.  Includes stations General Bernardo O'Higgins, Great Wall, Bellinghausen and Artigas."
+    }
+  ],
+  "America/Rankin_Inlet": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Rankin_Inlet-tz"
+    }
+  ],
+  "America/Recife": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Recife-tz"
+    }
+  ],
+  "America/Regina": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Regina-tz"
+    }
+  ],
+  "America/Resolute": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Resolute-tz"
+    }
+  ],
+  "America/Rio_Branco": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Rio_Branco-tz"
+    }
+  ],
+  "America/Santarem": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Santarem-tz"
+    }
+  ],
+  "America/Santiago": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Santiago-tz"
+    }
+  ],
+  "America/Santo_Domingo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Santo_Domingo-tz"
+    }
+  ],
+  "America/Sao_Paulo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Sao_Paulo-tz"
+    }
+  ],
+  "America/Scoresbysund": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Scoresbysund-tz"
+    }
+  ],
+  "America/Sitka": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Sitka-tz"
+    }
+  ],
+  "America/St_Barthelemy": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-St_Barthelemy-tz"
+    }
+  ],
+  "America/St_Johns": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-St_Johns-tz"
+    }
+  ],
+  "America/St_Kitts": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-St_Kitts-tz"
+    }
+  ],
+  "America/St_Lucia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-St_Lucia-tz"
+    }
+  ],
+  "America/St_Thomas": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-St_Thomas-tz"
+    }
+  ],
+  "America/St_Vincent": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-St_Vincent-tz"
+    }
+  ],
+  "America/Swift_Current": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Swift_Current-tz"
+    }
+  ],
+  "America/Tegucigalpa": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Tegucigalpa-tz"
+    }
+  ],
+  "America/Thule": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Thule-tz"
+    }
+  ],
+  "America/Tijuana": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Tijuana-tz"
+    }
+  ],
+  "America/Toronto": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Toronto-tz"
+    }
+  ],
+  "America/Tortola": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Tortola-tz"
+    }
+  ],
+  "America/Vancouver": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Vancouver-tz"
+    }
+  ],
+  "America/Whitehorse": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Whitehorse-tz"
+    }
+  ],
+  "America/Winnipeg": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Winnipeg-tz"
+    }
+  ],
+  "America/Yakutat": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "America-Yakutat-tz"
+    }
+  ],
+  "Antarctica/Casey": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[97,-70],[115,-70],[120,-70],[120,-65],[97,-64],[97,-70]]],
+      "description": "Best guess at extent of Casey timezone based off of map at https://en.wikipedia.org/wiki/Time_in_Antarctica, but subtracting area for Vostok and Mirny Stations."
+    }
+  ],
+  "Antarctica/Davis": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[77.5,-86],[86,-86],[86,-65],[77.5,-68],[77.5,-86]]],
+      "description": "Best guess at extent of Davis timezone."
+    }
+  ],
+  "Antarctica/DumontDUrville": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[120,-70],[135,-70],[150,-70],[160,-70],[160,-65],[150,-65],[135,-65],[120,-65],[120,-70]]],
+      "description": "Best guess at extent of DumontDUrville timezone based off of map at https://en.wikipedia.org/wiki/Time_in_Antarctica, but subtracting area for Concordia Station (Australia/Perth)."
+    }
+  ],
+  "Antarctica/Macquarie": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia"
+    }, {
+      "op": "intersect",
+      "source": "manual-polygon",
+      "data": [[[159,-53],[156,-56],[162,-56],[159,-53]]],
+      "description": "Intersect part of Australia to isolate Macquarie Island."
+    }
+  ],
+  "Antarctica/Mawson": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[55,-86],[60,-86],[75,-86],[77.5,-86],[77.5,-68],[55,-64],[55,-86]]],
+      "description": "Best guess at extent of Mawson timezone.  This timezone also includes the Zhongshan and Bharati stations because apparently they also use UTC+5 https://encyclopedia.timegenie.com/time_zones/antarctica/."
+    }
+  ],
+  "Antarctica/McMurdo": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[-180,-90],[-180,-75],[-150,-75],[-150,-86],[-135,-86],[-120,-86],[-105,-86],[-90,-86],[-80,-86],[-75,-86],[-67.5,-86],[-60,-86],[-58,-86],[-45,-86],[-35,-86],[-30,-86],[-17,-86],[-15,-86],[-5.5,-86],[0,-86],[15,-86],[25,-86],[30,-86],[45,-86],[55,-86],[60,-86],[75,-86],[77.5,-86],[86,-86],[90,-86],[105,-86],[115,-86],[120,-86],[135,-86],[150,-86],[160,-86],[160,-65],[180,-70],[180,-86],[180,-90],[-180,-90]]],
+      "description": "Best guess at McMurdo timezone according to map of timezone at https://en.wikipedia.org/wiki/Time_in_Antarctica.  Includes South Pole timezone since it is linked to McMurdo in the timezone DB."
+    }
+  ],
+  "Antarctica/Palmer": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[-64,-64],[-65,-64.5],[-65.5,-65.25],[-63.8,-64.8],[-64,-64]]],
+      "description": "Best guess at Palmer timezone."
+    }
+  ],
+  "Antarctica/Rothera": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[-80,-86],[-75,-86],[-67.5,-86],[-67.5,-66],[-80,-66],[-80,-86]]],
+      "description": "Best guess at Rothera timezone partially based off of British Antarctic Claims."
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[-35,-86],[-30,-86],[-17,-86],[-17,-71],[-35,-75],[-35,-86]]],
+      "description": "Add area near Halley Station."
+    }
+  ],
+  "Antarctica/Syowa": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[25,-86],[30,-86],[45,-86],[55,-86],[55,-64],[25,-68],[25,-86]]],
+      "description": "Best guess at Syowa timezone according to map of timezone at https://en.wikipedia.org/wiki/Time_in_Antarctica.  Also, Mirny station uses the same timezone as Vostok according to https://encyclopedia.timegenie.com/time_zones/antarctica/.  Also includes Japanese Asuka Station."
+    }
+  ],
+  "Antarctica/Troll": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[0,-86],[15,-86],[25,-86],[25,-68],[0,-68],[0,-86]]],
+      "description": "Best guess at Troll timezone according to map of timezone at https://en.wikipedia.org/wiki/Time_in_Antarctica, but subtracting area to west starting with SANAE IV station.  Also includes Maitri and Novolazarevskaya Stations."
+    }
+  ],
+  "Antarctica/Vostok": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[86,-86],[90,-86],[105,-86],[115,-86],[115,-70],[97,-70],[97,-64],[86,-65],[86,-86]]],
+      "description": "Best guess at Vostok timezone according to map of timezone at https://en.wikipedia.org/wiki/Time_in_Antarctica.  Also, Mirny station uses the same timezone as Vostok according to https://encyclopedia.timegenie.com/time_zones/antarctica/."
+    }
+  ],
+  "Arctic/Longyearbyen": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Arctic-Longyearbyen-tz"
+    }
+  ],
+  "Asia/Aden": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Aden-tz"
+    }
+  ],
+  "Asia/Almaty": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Almaty-tz"
+    }
+  ],
+  "Asia/Amman": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Amman-tz"
+    }
+  ],
+  "Asia/Anadyr": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Anadyr-tz"
+    }
+  ],
+  "Asia/Aqtau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Aqtau-tz"
+    }
+  ],
+  "Asia/Aqtobe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Aqtobe-tz"
+    }
+  ],
+  "Asia/Ashgabat": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Ashgabat-tz"
+    }
+  ],
+  "Asia/Atyrau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Atyrau-tz"
+    }
+  ],
+  "Asia/Baghdad": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Baghdad-tz"
+    }
+  ],
+  "Asia/Bahrain": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Bahrain-tz"
+    }
+  ],
+  "Asia/Baku": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Baku-tz"
+    }
+  ],
+  "Asia/Bangkok": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Bangkok-tz"
+    }
+  ],
+  "Asia/Barnaul": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Barnaul-tz"
+    }
+  ],
+  "Asia/Beirut": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Beirut-tz"
+    }
+  ],
+  "Asia/Bishkek": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Bishkek-tz"
+    }
+  ],
+  "Asia/Brunei": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Brunei-tz"
+    }
+  ],
+  "Asia/Chita": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Chita-tz"
+    }
+  ],
+  "Asia/Choibalsan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Choibalsan-tz"
+    }
+  ],
+  "Asia/Colombo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Colombo-tz"
+    }
+  ],
+  "Asia/Damascus": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Damascus-tz"
+    }
+  ],
+  "Asia/Dhaka": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Dhaka-tz"
+    }
+  ],
+  "Asia/Dili": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Dili-tz"
+    }
+  ],
+  "Asia/Dubai": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Dubai-tz"
+    }
+  ],
+  "Asia/Dushanbe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Dushanbe-tz"
+    }
+  ],
+  "Asia/Famagusta": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Famagusta-tz"
+    }
+  ],
+  "Asia/Gaza": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Gaza-tz"
+    }
+  ],
+  "Asia/Hebron": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Hebron-tz"
+    }
+  ],
+  "Asia/Ho_Chi_Minh": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Ho_Chi_Minh-tz"
+    }
+  ],
+  "Asia/Hong_Kong": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Hong_Kong-tz"
+    }
+  ],
+  "Asia/Hovd": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Hovd-tz"
+    }
+  ],
+  "Asia/Irkutsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Irkutsk-tz"
+    }
+  ],
+  "Asia/Jakarta": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Jakarta-tz"
+    }
+  ],
+  "Asia/Jayapura": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Jayapura-tz"
+    }
+  ],
+  "Asia/Jerusalem": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Jerusalem-tz"
+    }
+  ],
+  "Asia/Kabul": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kabul-tz"
+    }
+  ],
+  "Asia/Kamchatka": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kamchatka-tz"
+    }
+  ],
+  "Asia/Karachi": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Karachi-tz"
+    }
+  ],
+  "Asia/Kathmandu": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kathmandu-tz"
+    }
+  ],
+  "Asia/Khandyga": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Khandyga-tz"
+    }
+  ],
+  "Asia/Kolkata": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kolkata-tz"
+    }
+  ],
+  "Asia/Krasnoyarsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Krasnoyarsk-tz"
+    }
+  ],
+  "Asia/Kuala_Lumpur": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kuala_Lumpur-tz"
+    }
+  ],
+  "Asia/Kuching": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kuching-tz"
+    }
+  ],
+  "Asia/Kuwait": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Kuwait-tz"
+    }
+  ],
+  "Asia/Macau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Macau-tz"
+    }
+  ],
+  "Asia/Magadan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Magadan-tz"
+    }
+  ],
+  "Asia/Makassar": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Makassar-tz"
+    }
+  ],
+  "Asia/Manila": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Manila-tz"
+    }
+  ],
+  "Asia/Muscat": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Muscat-tz"
+    }
+  ],
+  "Asia/Nicosia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Nicosia-tz"
+    }
+  ],
+  "Asia/Novokuznetsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Novokuznetsk-tz"
+    }
+  ],
+  "Asia/Novosibirsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Novosibirsk-tz"
+    }
+  ],
+  "Asia/Omsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Omsk-tz"
+    }
+  ],
+  "Asia/Oral": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Oral-tz"
+    }
+  ],
+  "Asia/Phnom_Penh": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Phnom_Penh-tz"
+    }
+  ],
+  "Asia/Pontianak": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Pontianak-tz"
+    }
+  ],
+  "Asia/Pyongyang": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Pyongyang-tz"
+    }
+  ],
+  "Asia/Qatar": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Qatar-tz"
+    }
+  ],
+  "Asia/Qostanay": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Qostanay-tz"
+    }
+  ],
+  "Asia/Qyzylorda": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Qyzylorda-tz"
+    }
+  ],
+  "Asia/Riyadh": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Riyadh-tz"
+    }
+  ],
+  "Asia/Sakhalin": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Sakhalin-tz"
+    }
+  ],
+  "Asia/Samarkand": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Samarkand-tz"
+    }
+  ],
+  "Asia/Seoul": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Seoul-tz"
+    }
+  ],
+  "Asia/Shanghai": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Shanghai-tz"
+    }
+  ],
+  "Asia/Singapore": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Singapore-tz"
+    }
+  ],
+  "Asia/Srednekolymsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Srednekolymsk-tz"
+    }
+  ],
+  "Asia/Taipei": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Taipei-tz"
+    }
+  ],
+  "Asia/Tashkent": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Tashkent-tz"
+    }
+  ],
+  "Asia/Tbilisi": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Tbilisi-tz"
+    }
+  ],
+  "Asia/Tehran": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Tehran-tz"
+    }
+  ],
+  "Asia/Thimphu": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Thimphu-tz"
+    }
+  ],
+  "Asia/Tokyo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Tokyo-tz"
+    }
+  ],
+  "Asia/Tomsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Tomsk-tz"
+    }
+  ],
+  "Asia/Ulaanbaatar": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Ulaanbaatar-tz"
+    }
+  ],
+  "Asia/Urumqi": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Urumqi-tz"
+    }
+  ],
+  "Asia/Ust-Nera": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Ust-Nera-tz"
+    }
+  ],
+  "Asia/Vientiane": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Vientiane-tz"
+    }
+  ],
+  "Asia/Vladivostok": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Vladivostok-tz"
+    }
+  ],
+  "Asia/Yakutsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Yakutsk-tz"
+    }
+  ],
+  "Asia/Yangon": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Yangon-tz"
+    }
+  ],
+  "Asia/Yekaterinburg": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Yekaterinburg-tz"
+    }
+  ],
+  "Asia/Yerevan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Asia-Yerevan-tz"
+    }
+  ],
+  "Atlantic/Azores": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Azores-tz"
+    }
+  ],
+  "Atlantic/Bermuda": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Bermuda-tz"
+    }
+  ],
+  "Atlantic/Canary": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Canary-tz"
+    }
+  ],
+  "Atlantic/Cape_Verde": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Cape_Verde-tz"
+    }
+  ],
+  "Atlantic/Faroe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Faroe-tz"
+    }
+  ],
+  "Atlantic/Madeira": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Madeira-tz"
+    }
+  ],
+  "Atlantic/Reykjavik": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Reykjavik-tz"
+    }
+  ],
+  "Atlantic/South_Georgia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-South_Georgia-tz"
+    }
+  ],
+  "Atlantic/St_Helena": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-St_Helena-tz"
+    }
+  ],
+  "Atlantic/Stanley": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Atlantic-Stanley-tz"
+    }
+  ],
+  "Australia/Adelaide": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Adelaide-tz"
+    }
+  ],
+  "Australia/Brisbane": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Brisbane-tz"
+    }
+  ],
+  "Australia/Broken_Hill": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Broken_Hill-tz"
+    }
+  ],
+  "Australia/Darwin": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Darwin-tz"
+    }
+  ],
+  "Australia/Eucla": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Eucla-tz"
+    }
+  ],
+  "Australia/Hobart": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Hobart-tz"
+    }
+  ],
+  "Australia/Lindeman": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Lindeman-tz"
+    }
+  ],
+  "Australia/Lord_Howe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Lord_Howe-tz"
+    }
+  ],
+  "Australia/Melbourne": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Melbourne-tz"
+    }
+  ],
+  "Australia/Perth": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Perth-tz"
+    }, {
+      "op": "union",
+      "source": "manual-polygon",
+      "data": [[[115,-86],[120,-86],[135,-86],[150,-86],[160,-86],[160,-70],[150,-70],[135,-70],[120,-70],[115,-70],[115,-86]]],
+      "description": "According to chatter on the tz mailing list, the Antrarctic Concordia Station uses Australia/Perth time.  https://mm.icann.org/pipermail/tz/2013-December/020520.html"
+    }
+  ],
+  "Australia/Sydney": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Australia-Sydney-tz"
+    }
+  ],
+  "Etc/UTC": [
+    {
+      "op": "init",
+      "source": "manual-polygon",
+      "data": [[[-17,-86],[-15,-86],[-5.5,-86],[-5.5,-69],[-17,-71],[-17,-86]]],
+      "description": "Add Antarctic Station Neumayer III Station.  See discussion at https://github.com/evansiroky/timezone-boundary-builder/issues/61"
+    }
+  ],
+  "Europe/Amsterdam": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Amsterdam-tz"
+    }
+  ],
+  "Europe/Andorra": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Andorra-tz"
+    }
+  ],
+  "Europe/Astrakhan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Astrakhan-tz"
+    }
+  ],
+  "Europe/Athens": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Athens-tz"
+    }
+  ],
+  "Europe/Belgrade": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Belgrade-tz"
+    }
+  ],
+  "Europe/Berlin": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Berlin-tz"
+    }
+  ],
+  "Europe/Bratislava": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Bratislava-tz"
+    }
+  ],
+  "Europe/Brussels": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Brussels-tz"
+    }
+  ],
+  "Europe/Bucharest": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Bucharest-tz"
+    }
+  ],
+  "Europe/Budapest": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Budapest-tz"
+    }
+  ],
+  "Europe/Busingen": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Busingen-tz"
+    }
+  ],
+  "Europe/Chisinau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Chisinau-tz"
+    }
+  ],
+  "Europe/Copenhagen": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Copenhagen-tz"
+    }
+  ],
+  "Europe/Dublin": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Dublin-tz"
+    }
+  ],
+  "Europe/Gibraltar": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Gibraltar-tz"
+    }
+  ],
+  "Europe/Guernsey": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Guernsey-tz"
+    }
+  ],
+  "Europe/Helsinki": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Helsinki-tz"
+    }
+  ],
+  "Europe/Isle_of_Man": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Isle_of_Man-tz"
+    }
+  ],
+  "Europe/Istanbul": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Istanbul-tz"
+    }
+  ],
+  "Europe/Jersey": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Jersey-tz"
+    }
+  ],
+  "Europe/Kaliningrad": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Kaliningrad-tz"
+    }
+  ],
+  "Europe/Kyiv": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Kyiv-tz"
+    }
+  ],
+  "Europe/Kirov": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Kirov-tz"
+    }
+  ],
+  "Europe/Lisbon": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Lisbon-tz"
+    }
+  ],
+  "Europe/Ljubljana": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Ljubljana-tz"
+    }
+  ],
+  "Europe/London": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-London-tz"
+    }
+  ],
+  "Europe/Luxembourg": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Luxembourg-tz"
+    }
+  ],
+  "Europe/Madrid": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Madrid-tz"
+    }
+  ],
+  "Europe/Malta": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Malta-tz"
+    }
+  ],
+  "Europe/Mariehamn": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Mariehamn-tz"
+    }
+  ],
+  "Europe/Minsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Minsk-tz"
+    }
+  ],
+  "Europe/Monaco": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Monaco-tz"
+    }
+  ],
+  "Europe/Moscow": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Moscow-tz"
+    }
+  ],
+  "Europe/Oslo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Oslo-tz"
+    }
+  ],
+  "Europe/Paris": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Paris-tz"
+    }
+  ],
+  "Europe/Podgorica": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Podgorica-tz"
+    }
+  ],
+  "Europe/Prague": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Prague-tz"
+    }
+  ],
+  "Europe/Riga": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Riga-tz"
+    }
+  ],
+  "Europe/Rome": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Rome-tz"
+    }
+  ],
+  "Europe/Samara": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Samara-tz"
+    }
+  ],
+  "Europe/San_Marino": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-San_Marino-tz"
+    }
+  ],
+  "Europe/Sarajevo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Sarajevo-tz"
+    }
+  ],
+  "Europe/Saratov": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Saratov-tz"
+    }
+  ],
+  "Europe/Simferopol": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Simferopol-tz"
+    }
+  ],
+  "Europe/Skopje": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Skopje-tz"
+    }
+  ],
+  "Europe/Sofia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Sofia-tz"
+    }
+  ],
+  "Europe/Stockholm": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Stockholm-tz"
+    }
+  ],
+  "Europe/Tallinn": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Tallinn-tz"
+    }
+  ],
+  "Europe/Tirane": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Tirane-tz"
+    }
+  ],
+  "Europe/Ulyanovsk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Ulyanovsk-tz"
+    }
+  ],
+  "Europe/Vaduz": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Vaduz-tz"
+    }
+  ],
+  "Europe/Vatican": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Vatican-tz"
+    }
+  ],
+  "Europe/Vienna": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Vienna-tz"
+    }, {
+      "op": "union",
+      "source": "overpass",
+      "id": "Jungholz, AT",
+      "description": "Add back Jungholz as union to Austria. See https://github.com/evansiroky/timezone-boundary-builder/issues/93"
+    }
+  ],
+  "Europe/Vilnius": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Vilnius-tz"
+    }
+  ],
+  "Europe/Volgograd": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Volgograd-tz"
+    }
+  ],
+  "Europe/Warsaw": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Warsaw-tz"
+    }
+  ],
+  "Europe/Zagreb": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Zagreb-tz"
+    }
+  ],
+  "Europe/Zurich": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Europe-Zurich-tz"
+    }
+  ],
+  "Indian/Antananarivo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Antananarivo-tz"
+    }
+  ],
+  "Indian/Chagos": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Chagos-tz"
+    }
+  ],
+  "Indian/Christmas": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Christmas-tz"
+    }
+  ],
+  "Indian/Cocos": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Cocos-tz"
+    }
+  ],
+  "Indian/Comoro": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Comoro-tz"
+    }
+  ],
+  "Indian/Kerguelen": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Kerguelen-tz"
+    }
+  ],
+  "Indian/Mahe": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Mahe-tz"
+    }
+  ],
+  "Indian/Maldives": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Maldives-tz"
+    }
+  ],
+  "Indian/Mauritius": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Mauritius-tz"
+    }
+  ],
+  "Indian/Mayotte": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Mayotte-tz"
+    }
+  ],
+  "Indian/Reunion": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Indian-Reunion-tz"
+    }
+  ],
+  "Pacific/Apia": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Apia-tz"
+    }
+  ],
+  "Pacific/Auckland": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Auckland-tz"
+    }
+  ],
+  "Pacific/Bougainville": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Bougainville-tz"
+    }
+  ],
+  "Pacific/Chatham": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Chatham-tz"
+    }
+  ],
+  "Pacific/Chuuk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Chuuk-tz"
+    }
+  ],
+  "Pacific/Easter": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Easter-tz"
+    }
+  ],
+  "Pacific/Efate": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Efate-tz"
+    }
+  ],
+  "Pacific/Fakaofo": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Fakaofo-tz"
+    }
+  ],
+  "Pacific/Fiji": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Fiji-tz"
+    }
+  ],
+  "Pacific/Funafuti": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Funafuti-tz"
+    }
+  ],
+  "Pacific/Galapagos": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Galapagos-tz"
+    }
+  ],
+  "Pacific/Gambier": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Gambier-tz"
+    }
+  ],
+  "Pacific/Guadalcanal": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Guadalcanal-tz"
+    }
+  ],
+  "Pacific/Guam": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Guam-tz"
+    }
+  ],
+  "Pacific/Honolulu": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Honolulu-tz"
+    }
+  ],
+  "Pacific/Kanton": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Kanton-tz"
+    }
+  ],
+  "Pacific/Kiritimati": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Kiritimati-tz"
+    }
+  ],
+  "Pacific/Kosrae": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Kosrae-tz"
+    }
+  ],
+  "Pacific/Kwajalein": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Kwajalein-tz"
+    }
+  ],
+  "Pacific/Majuro": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Majuro-tz"
+    }
+  ],
+  "Pacific/Marquesas": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Marquesas-tz"
+    }
+  ],
+  "Pacific/Midway": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Midway-tz"
+    }
+  ],
+  "Pacific/Nauru": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Nauru-tz"
+    }
+  ],
+  "Pacific/Niue": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Niue-tz"
+    }
+  ],
+  "Pacific/Norfolk": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Norfolk-tz"
+    }
+  ],
+  "Pacific/Noumea": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Noumea-tz"
+    }
+  ],
+  "Pacific/Pago_Pago": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Pago_Pago-tz"
+    }
+  ],
+  "Pacific/Palau": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Palau-tz"
+    }
+  ],
+  "Pacific/Pitcairn": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Pitcairn-tz"
+    }
+  ],
+  "Pacific/Pohnpei": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Pohnpei-tz"
+    }
+  ],
+  "Pacific/Port_Moresby": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Port_Moresby-tz"
+    }
+  ],
+  "Pacific/Rarotonga": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Rarotonga-tz"
+    }
+  ],
+  "Pacific/Saipan": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Saipan-tz"
+    }
+  ],
+  "Pacific/Tahiti": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Tahiti-tz"
+    }
+  ],
+  "Pacific/Tarawa": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Tarawa-tz"
+    }
+  ],
+  "Pacific/Tongatapu": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Tongatapu-tz"
+    }
+  ],
+  "Pacific/Wake": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Wake-tz"
+    }
+  ],
+  "Pacific/Wallis": [
+    {
+      "op": "init",
+      "source": "overpass",
+      "id": "Pacific-Wallis-tz"
+    }
+  ]
+}
diff --git a/lightningcast/visualize_validation.py b/lightningcast/visualize_validation.py
index 7737143db8955ca75ff83b6c87e9513c9af20f0d..0f37f42e97322fa377facf92b567eb14b43e3763 100755
--- a/lightningcast/visualize_validation.py
+++ b/lightningcast/visualize_validation.py
@@ -1315,21 +1315,60 @@ def verification(conv_preds, labels, outdir, climo=-1):
 
 # ------------------------------------------------------------------------------------------------------------------
 def spatial_verification(
-    preds, targets, thresholds=np.arange(0.0, 0.91, 0.1), target_thresh=0.1
+    preds, targets, thresholds=np.arange(0.0, 0.91, 0.1), target_thresh=0.1, outdir='',
 ):
 
-    ny, nx = preds.shape
+    if len(preds.shape) == 2:
+        preds = np.expand_dims(preds, axis=0)
+    npatches, ny, nx = preds.shape
     hits, misses, FAs = [
         np.zeros((len(thresholds), ny, nx), dtype=np.int32) for _ in range(3)
     ]
 
-    for k, t in enumerate(thresholds):
-        hit_ind = np.logical_and(preds >= t, targets >= target_thresh)
-        hits[k][hit_ind] += 1
-        FA_ind = np.logical_and(preds >= t, targets < target_thresh)
-        FAs[k][FA_ind] += 1
-        miss_ind = np.logical_and(preds < t, targets >= target_thresh)
-        misses[k][miss_ind] += 1
+    for _ in range(npatches):
+      for k, t in enumerate(thresholds):
+          hit_ind = np.logical_and(preds[_] >= t, targets[_] >= target_thresh)
+          hits[k][hit_ind] += 1
+          FA_ind = np.logical_and(preds[_] >= t, targets[_] < target_thresh)
+          FAs[k][FA_ind] += 1
+          miss_ind = np.logical_and(preds[_] < t, targets[_] >= target_thresh)
+          misses[k][miss_ind] += 1
+
+    if len(outdir):
+        datasets = {}
+        global_atts = {}
+        dims = {"t": len(thresholds), "y": ny, "x": nx}
+        datasets["hits"] = {
+            "data": hits,
+            "dims": ("t", "y", "x"),
+            "atts": {},
+        }
+        datasets["misses"] = {
+            "data": misses,
+            "dims": ("t", "y", "x"),
+            "atts": {},
+        }
+        datasets["FAs"] = {
+            "data": FAs,
+            "dims": ("t", "y", "x"),
+            "atts": {},
+        }
+        datasets["thresholds"] = {
+            "data": np.array(thresholds, dtype=float),
+            "dims": ("t"),
+            "atts": {},
+        }
+        global_atts["sample_shape"] = f"({npatches},{ny},{nx})"
+        global_atts["created"] = datetime.utcnow().strftime(
+            "%Y%m%d-%H%M UTC"
+        )
+        utils.write_netcdf(
+            f"{outdir}/spatial_counts_month.nc",
+            datasets,
+            dims,
+            atts=global_atts,
+            gzip=False,
+        )
 
     return hits, misses, FAs
 
@@ -1449,7 +1488,7 @@ if __name__ == "__main__":
             if (not (args.outdir))
             else args.outdir[0]
         )
-        utils.mkdir_p(outdir)
+        os.makedirs(outdir, exist_ok=True)
         all_preds = preds_labs_dict1["preds"]
         all_labels = preds_labs_dict1["labels"]
         datetimes = preds_labs_dict1["datetimes"]
@@ -1564,7 +1603,7 @@ if __name__ == "__main__":
             # Some bulk verification for "ALL"
             print("Calc verification scores for ALL...")
             tmpdir = f"{outdir}/ALL/"
-            utils.mkdir_p(tmpdir)
+            os.makedirs(tmpdir, exist_ok=True)
             scores_dict = verification(good_preds, good_labels, tmpdir)
             pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
 
@@ -1584,7 +1623,7 @@ if __name__ == "__main__":
                     npreds = len(good_preds)
                     print(f"Calc verification scores for month={MM}...")
                     tmpdir = f"{outdir}/month{MM}/"
-                    utils.mkdir_p(tmpdir)
+                    os.makedirs(tmpdir, exist_ok=True)
                     scores_dict = verification(good_preds, good_labels, tmpdir)
                     scores_dict["npreds"] = npreds
                     pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
@@ -1683,7 +1722,7 @@ if __name__ == "__main__":
                     npreds = len(good_preds)
                     print(f"Calc verification scores for local hour={HH}...")
                     tmpdir = f"{outdir}/hour{HH}/"
-                    utils.mkdir_p(tmpdir)
+                    os.makedirs(tmpdir, exist_ok=True)
                     scores_dict = verification(good_preds, good_labels, tmpdir)
                     scores_dict["npreds"] = npreds
                     pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
@@ -1761,25 +1800,18 @@ if __name__ == "__main__":
                                 )
                             ).hour
 
-            # Screen out very easy predictions to reduce RAM. These are predictions with P<0.01 and no lightning.
-            ind = (all_preds >= 0.01) | (all_labels >= 1)
-            all_preds = all_preds[ind]
-            all_labels = all_labels[ind]
-            if args.do_hourly_verification:
-                LSTs = LSTs[ind]
 
             # performance for all predictions
-            good_preds = all_preds.flatten()
-            good_labels = all_labels.flatten()
+    #        good_preds = all_preds.flatten()
+    #        good_labels = all_labels.flatten()
             # Some bulk verification for "ALL"
-            print("Calc verification scores for ALL...")
-            tmpdir = f"{outdir}/ALL/"
-            utils.mkdir_p(tmpdir)
-            scores_dict = verification(
-                good_preds, good_labels, tmpdir, climo=0.01
-            )  # climo hard-coded
-            pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
-            sys.exit()
+    #        print("Calc verification scores for ALL...")
+    #        tmpdir = f"{outdir}/ALL/"
+    #        os.makedirs(tmpdir, exist_ok=True)
+    #        scores_dict = verification(
+    #            good_preds, good_labels, tmpdir, climo=0.01
+    #        )  # climo hard-coded
+    #        pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
 
             # month of each sample
             months = np.array([dt.month for dt in datetimes])
@@ -1797,10 +1829,10 @@ if __name__ == "__main__":
                     npreds = len(good_preds)
                     print(f"Calc verification scores for month={MM}...")
                     tmpdir = f"{outdir}/month{MM}/"
-                    utils.mkdir_p(tmpdir)
-                    scores_dict = verification(good_preds, good_labels, tmpdir)
-                    scores_dict["npreds"] = npreds
-                    pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
+                    os.makedirs(tmpdir, exist_ok=True)
+               #     scores_dict = verification(good_preds, good_labels, tmpdir)
+               #     scores_dict["npreds"] = npreds
+               #     pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
 
                     if args.do_spatial_verification:
                         # spatial verification
@@ -1819,8 +1851,6 @@ if __name__ == "__main__":
                         ]
                         # get halfsizes for possibly strided patches
                         sy, sx = all_preds[0, :, :].shape
-                        hsy = sy // 2
-                        hsx = sx // 2
 
                         all_hits = np.zeros((len(thresholds), sy, sx), dtype=np.int32)
                         all_misses = np.copy(all_hits)
@@ -1873,21 +1903,22 @@ if __name__ == "__main__":
                             gzip=False,
                         )
 
-            # by hour
-            thehours = np.arange(24)
-            for hh in thehours:
-                HH = str(int(hh)).zfill(2)
-                good_ind = np.where(LSTs == hh)
-                if len(good_ind[0]) > 0:
-                    good_preds = all_preds[good_ind]
-                    good_labels = all_labels[good_ind]
-                    npreds = len(good_preds)
-                    print(f"Calc verification scores for local hour={HH}...")
-                    tmpdir = f"{outdir}/hour{HH}/"
-                    utils.mkdir_p(tmpdir)
-                    scores_dict = verification(good_preds, good_labels, tmpdir)
-                    scores_dict["npreds"] = npreds
-                    pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
+            if args.do_hourly_verification:
+                # by hour
+                thehours = np.arange(24)
+                for hh in thehours:
+                    HH = str(int(hh)).zfill(2)
+                    good_ind = np.where(LSTs == hh)
+                    if len(good_ind[0]) > 0:
+                        good_preds = all_preds[good_ind]
+                        good_labels = all_labels[good_ind]
+                        npreds = len(good_preds)
+                        print(f"Calc verification scores for local hour={HH}...")
+                        tmpdir = f"{outdir}/hour{HH}/"
+                        os.makedirs(tmpdir, exist_ok=True)
+                        scores_dict = verification(good_preds, good_labels, tmpdir)
+                        scores_dict["npreds"] = npreds
+                        pickle.dump(scores_dict, open(f"{tmpdir}/scores_dict.pkl", "wb"))
 
     # now make the figures
 
diff --git a/setup.sh b/setup.sh
index 5f2ee652b47f7e0e85024ac7f14ba93437202cb9..8ed07327ce21c200eae7c290c691a87a49d75f33 100644
--- a/setup.sh
+++ b/setup.sh
@@ -23,19 +23,24 @@ pip install -e .) || oops "failed to build python environment."
 #glm agg send to LDM (UW-CIMSS only)
 (cd ${PLTG}/lightningcast/
 gcc glm_ldmsend.c
-mv a.out glm_ldmsend.out) || oops "failed to compile src/glm_ldmsend.c"
+mv a.out glm_ldmsend.out) || oops "failed to compile lightningcast/glm_ldmsend.c"
 
 #cp ENI files to lustre (UW-CIMSS only)
 (cd ${PLTG}/lightningcast/
 gcc cp_eni.c
-mv a.out cp_eni.out) || oops "failed to compile src/cp_eni.c"
+mv a.out cp_eni.out) || oops "failed to compile lightningcast/cp_eni.c"
 
 
 # Add to crontab
 
 #Purge LightningCast on-demand DSS events if old
-#(crontab -l 2>/dev/null; echo "0 0,12 * * * (source ${PLTG}/config.sh && /home/jcintineo/mambaforge/envs/TF/bin/python ${PLTG}/src/purge_ondemand_events.py --database=lightning_dss --purge_thresh=0.5) >/dev/null 2>&1") | crontab -
+#(crontab -l 2>/dev/null; echo "0 0,12 * * * (source ${PLTG}/config.sh && /home/jcintineo/mambaforge/envs/TF/bin/python ${PLTG}/lightningcast/purge_ondemand_events.py --database=lightning_dss --purge_thresh=0.5) >/dev/null 2>&1") | crontab -
+
+#Purge old DB entries (>365 days) for static meteograms (stadiums and airports)
+#(crontab -l 2>/dev/null; echo "0 10 * * * (source ${PLTG}/config.sh && /home/jcintineo/mambaforge/envs/TF/bin/python ${PLTG}/lightningcast/purge_static_records.py ) >/dev/null 2>&1") | crontab -
+
 #Make NIFC current fires layer and upload to RealEarth
-#(crontab -l 2>/dev/null; echo "*/5 * * * * (source ${PLTG}/config.sh && /home/jcintineo/mambaforge/envs/TF/bin/python ${PLTG}/src/make_NIFC_layer.py /apollo/grain/common/NIFC/Current-Fire-Location.geojson -ru ${PLTG}/src/re_upload) >/dev/null 2>&1") | crontab -
+#(crontab -l 2>/dev/null; echo "*/5 * * * * (source ${PLTG}/config.sh && /home/jcintineo/mambaforge/envs/TF/bin/python ${PLTG}/lightningcast/make_NIFC_layer.py /ships19/grain/common/NIFC/Current-Fire-Location.geojson -ru ${PLTG}/lightningcast/realtime/re_upload) >/dev/null 2>&1") | crontab -
+
 #(crontab -l 2>/dev/null; echo "*/10 * * * * (source /data/PLTG/lightningcast/config.sh && /home/jcintineo/mambaforge/envs/TF/bin/python /data/PLTG/lightningcast/lightningcast/check_DQFs.py) >/dev/null 2>&1") | crontab -
 
diff --git a/test/verification/geojson-verification.py b/test/verification/geojson-verification.py
new file mode 100644
index 0000000000000000000000000000000000000000..b16d6c9ac2d7901db997fc027240502952f3dcd1
--- /dev/null
+++ b/test/verification/geojson-verification.py
@@ -0,0 +1,185 @@
+import argparse
+import copy
+import math
+import os
+import re
+import sys
+
+import geojson
+
+# Assumptions:
+# 1. Polygon's can be treated as arrays of lineStrings. Source: https://www.mongodb.com/docs/manual/reference/geojson/
+#
+# 2. If files are equivalent than LineStrings will be identical between files even if they are in different locations
+# between files (such as being in a Polygon in one and not the other). By Identical I mean have identical properties and
+# coordinates within one epsilon (default 0.001) If this is false then verification will fail. If verification fails
+# but a manual review seems to show equivalent geojsons then probably what is happening is this assumption is false for
+# those files.
+#
+# Accounted for:
+# 1. LineStrings from some runs seem to be their own thing while in other runs the identical linestring will be part of
+# a polygon. Source: Looking through data. If this is false verification will still run fine.
+
+
+parser = argparse.ArgumentParser(description='Compare GeoJson Files. Takes two positional arguments: Truth and test. They are paths either directly to '
+                    ' json files to compare or two directories with the same subdirectory structure. Like '
+                    ' two lightningcast output dirs. If passing in paths to directories add the -m flag. '
+                    ' You can also set --epsilon (-e) value to add tollerances for slight variation in coordinates.')
+
+# Positional arguments
+parser.add_argument("truth_data", type=str, help="Path to the truth data file")
+parser.add_argument("test_data", type=str, help="Path to the test data file")
+
+# Optional non-positional argument
+parser.add_argument("-e", "--epsilon", type=float, default=0.001, help="Epsilon value (default: 0.001)")
+parser.add_argument("-i", "--ignore_endtime", default=False, action='store_true', help="Ignore endtime in json "
+                                                                                       "filenames")
+parser.add_argument("-m", "--multi", default=False, action='store_true', help="If passed we take in paths to"
+                                                                              " 2 lc output directorys and "
+                                                                              "compare their contents.")
+
+args = parser.parse_args()
+
+truth_input = args.truth_data  # "/home/lpfantz/work/repos/lc-repos/testdata/LightningCast_testdata/output/20230704/goes_east/RadC/json/1fl-60min/LtgCast-RadC_v1-2-0_g16_s202307042001183_e202307042003569_c202405081312210.json"
+test_input = args.test_data  # "/home/lpfantz/work/repos/lc-repos/master/lightningcast/lightningcast/OUTPUT/20230704/goes_east/RadC/json/1fl-60min/LtgCast-RadC_v1-2-1_g16_s202307042001183_e202307042003569_c202405282109004.json"
+epsilon = args.epsilon
+multi = args.multi
+ignore_endtime = args.ignore_endtime
+
+dataset_match = ['platform', 'platform_type', 'instrument']
+dataset_match_len = ['features']
+feature_properties = ['OPACITY', 'BOPACITY', 'COLOR', 'BCOLOR', 'WIDTH', 'INFO']  # TODO Make sure properties also match
+
+
+def compare_files(truth_json, test_json, dataset_match, dataset_match_len, feature_properties, epsilon):
+    # Load truth_json
+    with open(truth_json, 'r') as file:
+        truth_data = geojson.load(file)
+
+    # Load test_json
+    with open(test_json, 'r') as file:
+        test_data = geojson.load(file)
+
+    for item in dataset_match:
+        if truth_data[item] != test_data[item]:
+            raise Exception(f"{item}: truth: {truth_data[item]} does not match test: {test_data[item]}")
+
+    for item in dataset_match_len:
+        if len(truth_data[item]) != len(test_data[item]):
+            raise Exception(
+                f"number of {item}: truth: {len(truth_data[item])} does not match test: {len(test_data[item])}")
+
+    def get_sorted_list_of_list_of_coords(features):
+        out_coords = []
+        features = copy.deepcopy(features)
+        for feat in features:
+            if feat['geometry']['type'] == 'LineString':
+                out_coords.append((feat['geometry']['coordinates'], feat['properties']))
+            else:
+                for sub_coord in feat['geometry']['coordinates']:
+                    out_coords.append((sub_coord, feat['properties']))
+        out_coords.sort()
+        return out_coords
+
+    def compare_lists(truth_list_input, test_list_input, feature_properties):
+
+        truth_list = truth_list_input[0]
+        truth_properties = truth_list_input[1]
+
+        test_list = test_list_input[0]
+        test_properties = test_list_input[1]
+
+        for item in feature_properties:
+            if truth_properties[item] != test_properties[item]:
+                return False
+
+        if len(test_list) != len(truth_list):
+            return False
+
+        truth_list = copy.deepcopy(truth_list)
+        test_list = copy.deepcopy(test_list)
+
+        for i in range(len(truth_list)):
+            truth_coords = truth_list[i]
+            test_coords = test_list[i]
+            if len(truth_coords) != 2 or len(test_coords) != 2:
+                raise Exception(f"Coords appear to be misformed: truth: {truth_coords}, test: {test_coords}")
+            for j in range(2):
+                if not math.isclose(truth_coords[j], test_coords[j], rel_tol=epsilon):
+                    return False
+        return True
+
+    truth_coord_list = get_sorted_list_of_list_of_coords(truth_data['features'])
+    test_coord_list = get_sorted_list_of_list_of_coords(test_data['features'])
+
+    if len(truth_coord_list) != len(test_coord_list):
+        raise Exception("Number of coord lists in truth not the same as in test!")
+
+    for i in range(len(truth_coord_list)):
+        if not compare_lists(truth_coord_list[i], test_coord_list[i], feature_properties):
+            raise Exception("GeoJsons don't match!")
+
+    return True
+
+
+def get_files_in_path(path, extension='json'):
+    out_arr = []
+    start_dir = os.getcwd()
+    os.chdir(path)
+    for root, dirs, files in os.walk('./'):
+        for file in files:
+            if file.endswith(extension):
+                out_arr.append(os.path.join(root, file))
+    out_arr.sort()
+    os.chdir(start_dir)
+    return out_arr
+
+
+def replace_in_path(truth, test, *reg_patts):
+    for reg_patt in reg_patts:
+        truth = re.sub(reg_patt, '', truth)
+        test = re.sub(reg_patt, '', test)
+    return truth, test
+
+
+def compare_json_paths(truth, test, ignore_endtime=False):
+    reg_ex = [r'c\d{15}\.json', r'c\d{15}\.json', r'v\d+\-\d+\-\d+']
+    if ignore_endtime:
+        reg_ex.append(r'e\d{15}')
+    truth, test = replace_in_path(truth, test, *reg_ex)
+
+    if truth == test:
+        return True
+    else:
+        return False
+
+
+def get_multi_jsons(truth_path, test_path, ignore_endtime=False):
+    truth_json_paths = get_files_in_path(truth_path)
+    test_json_paths = get_files_in_path(test_path)
+    out_arr = []
+    if len(test_json_paths) != len(test_json_paths):
+        raise Exception("Json count mismatch between test dir and truth dir!")
+
+    for i in range(len(truth_json_paths)):
+        if not compare_json_paths(truth_json_paths[i], test_json_paths[i], ignore_endtime=ignore_endtime):
+            raise Exception(
+                f"Json mismatch between test dir and truth dir! Expected to match but didn't: truth: {truth_json_paths[i]}, test: {test_json_paths[i]}")
+        out_arr.append((truth_path + '/' + truth_json_paths[i], test_path + '/' + test_json_paths[i]))
+
+    return out_arr
+
+
+try:
+    if multi:
+        for path_set in get_multi_jsons(truth_input, test_input, ignore_endtime=ignore_endtime):
+            compare_files(path_set[0], path_set[1], dataset_match, dataset_match_len, feature_properties, epsilon)
+            print(f"GeoJsons Match! Truth: {path_set[0]}, test: {path_set[1]}")
+    else:
+        compare_files(truth_input, test_input, dataset_match, dataset_match_len, feature_properties, epsilon)
+        print("GeoJsons Match!")
+except Exception as e:
+    print(f"GeoJson comparison failed! Truth: {truth_input}, test: {test_input}. Error: {e}")
+    sys.exit(1)
+
+print("All GeoJsons Match!!!")