Skip to content
Snippets Groups Projects
aeolus_amv.py 35.20 KiB
import datetime, os
from datetime import timezone
import glob
import numpy as np
import xarray as xr
from netCDF4 import Dataset, Dimension, Variable
from aeolus.geos_nav import GEOSNavigation
from util.util import haversine_np
from amv.intercompare import best_fit, bin_data_by, get_press_bin_ranges, spd_dir_from_uv, uv_from_spd_dir, direction_difference
import math
from metpy.units import units


amv_file_duration = 60  # minutes
half_width = 25  # search box centered on AEOLUS profile (FGF coordinates)
num_elems = 5424
num_lines = 5424


class MyGenericException(Exception):
    def __init__(self, message):
        self.message = message


class AMVFiles:

    def __init__(self, files_path, file_time_span, pattern, band='14'):
        self.flist = glob.glob(files_path + pattern)
        self.band = band
        self.ftimes = []
        for pname in self.flist:  # TODO: make better with regular expressions (someday)
            dto = self.get_datetime(pname)
            dto_start = dto
            dto_end = dto + datetime.timedelta(minutes=file_time_span)
            self.ftimes.append((dto_start.timestamp(), dto_end.timestamp()))

    def get_datetime(self, pathname):
        pass

    def get_navigation(self):
        pass

    def get_file_containing_time(self, timestamp):
        k = -1
        for i in range(len(self.ftimes)):
            if (timestamp >= self.ftimes[i][0]) and (timestamp < self.ftimes[i][1]):
                k = i
                break
        if k < 0:
            return None, None, None

        return self.flist[k], self.ftimes[k], k

    def get_parameters(self):
        pass

    def get_out_parameters(self):
        pass

    def get_meta_dict(self):
        pass


class Framework(AMVFiles):
    def __init__(self, files_path, file_time_span, band='14'):
        super().__init__(files_path, file_time_span, '*WINDS_AMV_EN-'+band+'*.nc', band)

    def get_navigation(self):
        GEOSNavigation(sub_lon=-75.0)

    def get_datetime(self, pathname):
        fname = os.path.split(pathname)[1]
        toks = fname.split('_')
        dstr = toks[4]
        tstr = toks[5]
        dtstr = dstr + tstr
        dto = datetime.datetime.strptime(dtstr, '%Y%j%H%M').replace(tzinfo=timezone.utc)

        return dto


class OPS(AMVFiles):
    def __init__(self, files_path, file_time_span, band='14'):
        super().__init__(files_path, file_time_span, 'OR_ABI-L2-DMWF*'+'C'+band+'*.nc', band)

        self.elem_name = None
        self.line_name = None
        self.lon_name = 'lon'
        self.lat_name = 'lat'

        self.out_params = ['Lon', 'Lat', 'Element', 'Line', 'pressure', 'wind_speed', 'wind_direction']
        self.params = ['pressure', 'wind_speed', 'wind_direction']
        self.meta_dict = {'Lon': ('degrees east', 'f4'), 'Lat': ('degrees north', 'f4'), 'Element': (None, 'i4'), 'Line': (None, 'i4'),
                          'pressure': ('hPa', 'f4'), 'wind_speed': ('m s-1', 'f4'), 'wind_direction': ('degrees', 'f4')}

    def get_navigation(self):
        return GEOSNavigation(sub_lon=-75.2)

    def get_datetime(self, pathname):
        fname = os.path.split(pathname)[1]
        toks = fname.split('_')
        dtstr = toks[3]
        dtstr = dtstr[:-3]
        dto = datetime.datetime.strptime(dtstr, 's%Y%j%H%M').replace(tzinfo=timezone.utc)

        return dto

    def get_parameters(self):
        return self.params

    def get_out_parameters(self):
        return self.out_params

    def get_meta_dict(self):
        return self.meta_dict


class CarrStereo(AMVFiles):
    def __init__(self, files_path, file_time_span, band='14'):
        super().__init__(files_path, file_time_span, 'tdw_qc_GOES*'+'ch_'+band+'.nc', band)

        self.elem_name = 'Element'
        self.line_name = 'Line'
        self.lon_name = 'Lon'
        self.lat_name = 'Lat'

        self.out_params = ['Lon', 'Lat', 'Element', 'Line', 'V_3D_u', 'V_3D_v', 'H_3D', 'pres', 'Fcst_Spd', 'Fcst_Dir', 'SatZen',
                           'InversionFlag', 'CloudPhase', 'CloudType']

        self.params = ['V_3D', 'H_3D', 'pres', 'Fcst_Spd', 'Fcst_Dir', 'SatZen',
                       'InversionFlag', 'CloudPhase', 'CloudType']

        self.meta_dict = {'H_3D': ('m', 'f4'), 'pres': ('hPa', 'f4'), 'Fcst_Spd': ('m s-1', 'f4'),
                          'Fcst_Dir': ('degree', 'f4'),
                          'SatZen': ('degree', 'f4'), 'InversionFlag': (None, 'u1'),
                          'CloudPhase': (None, 'u1'), 'CloudType': (None, 'u1'),
                          'V_3D_u': ('m s-1', 'f4'), 'V_3D_v': ('m s-1', 'f4'), 'Lon': ('degrees east', 'f4'),
                          'Lat': ('degrees north', 'f4'), 'Element': (None, 'i4'), 'Line': (None, 'i4')}

    def get_navigation(self):
        return GEOSNavigation(sub_lon=-137.0)

    def get_datetime(self, pathname):
        fname = os.path.split(pathname)[1]
        toks = fname.split('_')
        dtstr = toks[3]
        dto = datetime.datetime.strptime(dtstr, '%Y%j.%H%M.ch').replace(tzinfo=timezone.utc)

        return dto

    def get_parameters(self):
        return self.params

    def get_out_parameters(self):
        return self.out_params

    def get_meta_dict(self):
        return self.meta_dict


# raob_dict: time -> profiles
# amv_files_path: directory containing AMVs, '/home/user/amvdir/'
# return dict: raob -> tuple (amv_lon, amv_lat, amv_pres, amv_spd, amv_dir)
def match_amvs_to_raobs(raob_dict,  raob_time, amv_files):
    nav = amv_files.get_navigation()
    amv_params = amv_files.get_parameters()
    match_dict = {}

    #fname, ftime, f_idx = amv_files.get_file_containing_time(raob_time)
    # fname = '/Users/tomrink/data/OR_ABI-L2-DMWF-M6C14_G16_s20201190000156_e20201190009464_c20201190023107.nc'
    fname = '/Users/tomrink/data/OR_ABI-L2-DMWF-M6C14_G16_s20201191200158_e20201191209466_c20201191223041.nc'

    ds = Dataset(fname)

    amv_lons = ds[amv_files.lon_name][:].data
    amv_lats = ds[amv_files.lat_name][:].data
    if amv_files.elem_name is None or amv_files.line_name is None:
        cc, ll = nav.earth_to_lc_s(amv_lons, amv_lats)
    else:
        cc = ds[amv_files.elem_name][:]
        ll = ds[amv_files.line_name][:]

    param_s = []
    param_s.append(amv_lons)
    param_s.append(amv_lats)
    param_s.append(cc)
    param_s.append(ll)

    vld = None
    for param in amv_params:
        data = ds[param][:]
        if vld is None:
            vld = np.invert(data.mask)
        else:
            vld = np.logical_and(vld, np.invert(data.mask))
        if param == 'V_3D':
            param_s.append(data.data[0])
            param_s.append(data.data[1])
        else:
            param_s.append(data.data)

    param_nd = np.vstack(param_s)
    param_nd = param_nd[:, vld]
    cc = param_nd[2, :]
    ll = param_nd[3, :]

    ds.close()

    keys = list(raob_dict.keys())

    for key in keys:

        raob = raob_dict.get(key)
        if raob is None:
            continue

        lat = key[0]
        lon = key[1]

        c_rng, l_rng = get_search_box(nav, lon, lat)
        if c_rng is None:
            continue

        in_cc = np.logical_and(cc > c_rng[0], cc < c_rng[1])
        in_ll = np.logical_and(ll > l_rng[0], ll < l_rng[1])
        in_box = np.logical_and(in_cc, in_ll)

        num_amvs = np.sum(in_box)
        if num_amvs == 0:
            continue
        match_dict[key] = param_nd[:, in_box]

    return match_dict


def create_file2(filename, raob_to_amv_dct, raob_dct, amv_files):
    keys = list(raob_to_amv_dct.keys())

    num_amvs = []
    num_levs = []
    times = []

    namvs = 0
    nlevs = 0
    for key in keys:
        param_nd = raob_to_amv_dct.get(key)
        num_amvs.append(param_nd.shape[1])
        namvs += param_nd.shape[1]

        prof = raob_dct.get(key)
        num_levs.append(prof.shape[0])
        nlevs += prof.shape[0]

        # TODO: need a time
        times.append(0.0)

    # amv_per_alus = len(aeolus_to_amv_dct)
    rootgrp = Dataset(filename, 'w', format='NETCDF4')
    dim_amvs = rootgrp.createDimension('amvs', size=namvs)
    dim_alus = rootgrp.createDimension('raobs', size=nlevs)
    dim_num_aeolus_prof = rootgrp.createDimension('num_raobs', size=len(raob_to_amv_dct))

    nc4_vars = []
    out_params = amv_files.get_out_parameters()
    meta_dict = amv_files.get_meta_dict()

    for pidx, param in enumerate(out_params):
        u, t = meta_dict.get(param)
        var = rootgrp.createVariable(param, t, ['amvs'])
        if u is not None:
            var.units = u
        nc4_vars.append(var)

    dist = rootgrp.createVariable('dist_to_raob', 'f4', ['amvs'])
    dist.units = 'km'

    num_amvs_per_raob = rootgrp.createVariable('num_amvs_per_raob', 'i4', ['num_raobs'])
    num_levs_per_raob = rootgrp.createVariable('num_levs_per_raob', 'i4', ['num_raobs'])
    prof_time = rootgrp.createVariable('time', 'f4', ['num_raobs'])
    # ---- Profile variables ---------------
    prf_lon = rootgrp.createVariable('raob_longitude', 'f4', ['num_raobs'])
    prf_lon.units = 'degrees east'
    prf_lat = rootgrp.createVariable('raob_latitude', 'f4', ['num_raobs'])
    prf_lat.units = 'degrees north'
    prof_time.units = 'seconds since 1970-01-1 00:00:00'

    prf_azm = rootgrp.createVariable('prof_azm', 'f4', ['raobs'])
    prf_azm.units = 'degree'
    prf_spd = rootgrp.createVariable('prof_spd', 'f4', ['raobs'])
    prf_spd.units = 'm s-1'
    prf_prs = rootgrp.createVariable('prof_pres', 'f4', ['raobs'])
    prf_prs.units = 'hPa'
    prf_tmp = rootgrp.createVariable('prof_temp', 'f4', ['raobs'])
    prf_tmp.units = 'K'
    # --------------------------------------

    i_a = 0
    i_c = 0
    for idx, key in enumerate(keys):
        namvs = num_amvs[idx]
        nlevs = num_levs[idx]
        i_b = i_a + namvs
        i_d = i_c + nlevs

        prof = raob_dct.get(key)
        prf_azm[i_c:i_d] = prof[:, 2]
        prf_spd[i_c:i_d] = prof[:, 3]
        prf_prs[i_c:i_d] = prof[:, 0]
        prf_tmp[i_c:i_d] = prof[:, 1]
        i_c += nlevs

        plat = key[0]
        plon = key[1]
        prf_lat[idx::] = plat
        prf_lon[idx::] = plon

        param_nd = raob_to_amv_dct.get(key)
        for pidx, param in enumerate(out_params):
            nc4_vars[pidx][i_a:i_b] = param_nd[pidx, :]
        dist[i_a:i_b] = haversine_np(plon, plat, param_nd[0, :], param_nd[1, :])
        i_a += namvs

    num_amvs_per_raob[:] = num_amvs
    num_levs_per_raob[:] = num_levs
    prof_time[:] = times

    rootgrp.close()


def bulk_stats(filename):
    pass


def analyze2(raob_to_amv_dct, raob_dct):
    keys = list(raob_to_amv_dct.keys())

    amvs_list = []
    bf_list = []
    for key in keys:
        rlat = key[0]
        rlon = key[1]

        raob = raob_dct.get(key)
        nlevs = raob.shape[0]
        raob_prs = raob[:, 0]
        raob_spd = raob[:, 2]
        raob_dir = raob[:, 3]
        amvs = raob_to_amv_dct.get(key)
        amvs_list.append(amvs)
        num_amvs = amvs.shape[1]
        for i in range(num_amvs):
            amv_lon = amvs[0, i]
            amv_lat = amvs[1, i]
            amv_prs = amvs[4, i]
            amv_spd = amvs[5, i]
            amv_dir = amvs[6, i]

            bf = best_fit(amv_spd, amv_dir, amv_prs, amv_lat, amv_lon, raob_spd, raob_dir, raob_prs)
            bf_list.append(bf)
            if bf[3] == 0:
                bspd, bdir = spd_dir_from_uv(bf[0], bf[1])
                #print(amv_spd, bspd, amv_dir, bdir)

    amvs = np.concatenate(amvs_list, axis=1)
    amvs = np.transpose(amvs, axes=[1, 0])
    bfs = np.stack(bf_list, axis=0)

    good_amvs = amvs
    num_good = good_amvs.shape[0]
    didx = 6
    sidx = 5
    pidx = 4

    print('Number of AMVs: {0:d}'.format(num_good))
    spd_min = good_amvs[:, sidx].min()
    spd_max = good_amvs[:, sidx].max()
    print('spd min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(spd_min, spd_max, np.average(good_amvs[:, sidx])))

    p_min = good_amvs[:, pidx].min()
    p_max = good_amvs[:, pidx].max()
    print('pres min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(p_min, p_max, np.average(good_amvs[:, pidx])))

    low = good_amvs[:, pidx] >= 700
    mid = np.logical_and(good_amvs[:, pidx] < 700, good_amvs[:, pidx] > 400)
    hgh = good_amvs[:, pidx] <= 400

    n_low = np.sum(low)
    n_mid = np.sum(mid)
    n_hgh = np.sum(hgh)

    print('% low: {0:.2f}'.format(100.0*(n_low/num_good)))
    print('% mid: {0:.2f}'.format(100.0*(n_mid/num_good)))
    print('% hgh: {0:.2f}'.format(100.0*(n_hgh/num_good)))
    print('---------------------------')

    print('Low Spd min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(good_amvs[low, sidx].min(), good_amvs[low, sidx].max(), good_amvs[low,sidx].mean()))
    print('Low Press min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(good_amvs[low, pidx].min(), good_amvs[low, pidx].max(), good_amvs[low, pidx].mean()))
    print('---------------------------')

    print('Mid Spd min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(good_amvs[mid, sidx].min(), good_amvs[mid, sidx].max(), good_amvs[mid, sidx].mean()))
    print('Mid Press min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(good_amvs[mid, pidx].min(), good_amvs[mid, pidx].max(), good_amvs[mid, pidx].mean()))
    print('---------------------------')

    print('Hgh Spd min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(good_amvs[hgh, sidx].min(), good_amvs[hgh, sidx].max(), good_amvs[hgh, sidx].mean()))
    print('Hgh Press min/max/mean: {0:.2f}  {1:.2f}  {2:.2f}'.format(good_amvs[hgh, pidx].min(), good_amvs[hgh, pidx].max(), good_amvs[hgh, pidx].mean()))

    bin_size = 200.0
    vld_bf = bfs[:, 3] == 0
    keep_idxs = vld_bf

    amv_p = good_amvs[keep_idxs, pidx]
    bf_p = bfs[keep_idxs, 2]
    diff = amv_p - bf_p
    mad = np.average(np.abs(diff))
    bias = np.average(diff)
    print('********************************************************')
    print('Number of good best fits: ', bf_p.shape[0])
    print('press, MAD: {0:.2f}'.format(mad))
    print('press, bias: {0:.2f}'.format(bias))
    pd_std = np.std(diff)
    pd_mean = np.mean(diff)
    print('press bias/rms: {0:.2f}  {1:.2f} '.format(pd_mean, np.sqrt(pd_mean**2 + pd_std**2)))
    print('------------------------------------------')

    bin_ranges = get_press_bin_ranges(50, 1050, bin_size=bin_size)
    bin_pres = bin_data_by(diff, amv_p, bin_ranges)

    amv_spd = good_amvs[keep_idxs, sidx]
    amv_dir = good_amvs[keep_idxs, didx]
    bf_spd, bf_dir = spd_dir_from_uv(bfs[keep_idxs, 0], bfs[keep_idxs, 1])

    diff = amv_spd * units('m/s') - bf_spd
    spd_mad = np.average(np.abs(diff))
    spd_bias = np.average(diff)
    print('spd, MAD: {0:.2f}'.format(spd_mad))
    print('spd, bias: {0:.2f}'.format(spd_bias))
    spd_mean = np.mean(diff)
    spd_std = np.std(diff)
    print('spd MAD/bias/rms: {0:.2f}  {1:.2f}  {2:.2f}'.format(np.average(np.abs(diff)), spd_mean, np.sqrt(spd_mean**2 + spd_std**2)))
    print('-----------------')
    bin_spd = bin_data_by(diff, amv_p, bin_ranges)

    dir_diff = direction_difference(amv_dir, bf_dir.magnitude)
    print('dir, MAD: {0:.2f}'.format(np.average(np.abs(dir_diff))))
    print('dir bias: {0:.2f}'.format(np.average(dir_diff)))
    print('-------------------------------------')
    bin_dir = bin_data_by(dir_diff, amv_p, bin_ranges)

    amv_u, amv_v = uv_from_spd_dir(good_amvs[keep_idxs, sidx], good_amvs[keep_idxs, didx])
    u_diffs = amv_u - (bfs[keep_idxs, 0] * units('m/s'))
    v_diffs = amv_v - (bfs[keep_idxs, 1] * units('m/s'))

    vd = np.sqrt(u_diffs**2 + v_diffs**2)
    vd_mean = np.mean(vd)
    vd_std = np.std(vd)
    print('VD bias/rms: {0:.2f}  {1:.2f}'.format(vd_mean, np.sqrt(vd_mean**2 + vd_std**2)))
    print('------------------------------------------')

    x_values = []
    num_pres = []
    num_spd = []
    num_dir = []
    for i in range(len(bin_ranges)):
        x_values.append(np.average(bin_ranges[i]))
        num_pres.append(bin_pres[i].shape[0])
        num_spd.append(bin_spd[i].shape[0])
        num_dir.append(bin_dir[i].shape[0])

    #return x_values, bin_pres, num_pres, bin_spd, num_spd, bin_dir, num_dir

    return amvs, bfs


# imports the S4 NOAA output
# filename: full path as a string, '/home/user/filename'
# returns a dict: time -> list of profiles (a profile is a list of levels)
def get_aeolus_time_dict(filename, lon360=False, do_sort=True):
    time_dict = {}

    with open(filename) as file:
        while True:
            prof_hdr = file.readline()
            if not prof_hdr:
                break
            toks = prof_hdr.split()

            yr = int(float(toks[0]))
            mon = int(float(toks[1]))
            dy = int(float(toks[2]))
            hr = int(float(toks[3]))
            mn = int(float(toks[4]))
            ss = int(float(toks[5]))
            lon = float(toks[6])
            lat = float(toks[7])
            nlevs = int(toks[8])

            if lon360:
                if lon < 0:
                    lon += 360.0
            else:
                if lon > 180.0:
                    lon -= 360.0

            dto = datetime.datetime(year=yr, month=mon, day=dy, hour=hr, minute=mn, second=ss)
            dto = dto.replace(tzinfo=timezone.utc)
            timestamp = dto.timestamp()

            prof = []
            if time_dict.get(timestamp, -1) == -1:
                prof_s = []
                prof_s.append(prof)
                time_dict[timestamp] = prof_s
            else:
                prof_s = time_dict.get(timestamp)
                prof_s.append(prof)

            for k in range(nlevs):
                line = file.readline()
                toks = line.split()
                lvlidx = int(toks[0])
                hhh = float(toks[1]) * 1000.0
                hht = float(toks[2]) * 1000.0
                hhb = float(toks[3]) * 1000.0
                err = float(toks[4])
                azm = float(toks[5])
                ws = float(toks[6])
                len = float(toks[7])

                tup = (lat, lon, hhh, hht, hhb, azm, ws)

                prof.append(tup)

        file.close()

    if do_sort:
        keys = np.array(list(time_dict.keys()))
        keys.sort()
        keys = keys.tolist()

        sorted_time_dict = {}

        for key in keys:
            sorted_time_dict[key] = time_dict.get(key)
        time_dict = sorted_time_dict

    return time_dict


# make each profile at a timestamp a numpy array
def time_dict_to_nd(time_dict):
    keys = list(time_dict.keys())
    for key in keys:
        vals = time_dict[key]
        if vals is not None:
            nda = np.array(vals[0])  # only take one profile per second
            time_dict[key] = nda

    return time_dict


def concat(t_dct_0, t_dct_1):
    keys_0 = list(t_dct_0.keys())
    nda_0 = np.array(keys_0)

    keys_1 = list(t_dct_1.keys())
    nda_1 = np.array(keys_1)

    comm_keys, comm0, comm1 = np.intersect1d(nda_0, nda_1, return_indices=True)

    comm_keys = comm_keys.tolist()

    for key in comm_keys:
        t_dct_1.pop(key)
    t_dct_0.update(t_dct_1)

    return t_dct_0


def get_aeolus_time_dict_s(files_path, lon360=False, do_sort=True, chan='mie'):
    ftimes = []
    fnames = glob.glob(files_path + chan + '1day.out.*')
    time_dct = {}
    for pathname in fnames:
        fname = os.path.split(pathname)[1]
        toks = fname.split('.')
        dstr = toks[2]
        dto = datetime.datetime.strptime(dstr, '%Y-%m-%d').replace(tzinfo=timezone.utc)
        ts = dto.timestamp()
        ftimes.append(ts)
        time_dct[ts] = pathname

    sorted_filenames = []
    ftimes.sort()
    for t in ftimes:
        sorted_filenames.append(time_dct.get(t))

    dct_s = []
    for fname in sorted_filenames:
        a_dct = get_aeolus_time_dict(fname, lon360=lon360, do_sort=do_sort)
        dct_s.append(a_dct)

    t_dct = dct_s[0]

    for dct in dct_s[1:]:
        concat(t_dct, dct)

    return t_dct


def time_dict_to_cld_layers(time_dict):
    time_dict_layers = {}

    keys = list(time_dict.keys())
    for key in keys:
        prof_s = time_dict[key]
        layers = []
        prof = prof_s[0]

        if len(prof) == 1:
            tup = prof[0]
            layers.append((tup[0], tup[1], tup[3], tup[4]))
            time_dict_layers[key] = layers
            continue

        top = -9999.9
        last_bot = -9999.9
        tup = None
        for i in range(len(prof)):
            tup = prof[i]

            if i == 0:
                top = tup[3]
                bot = tup[4]
                last_bot = bot
            else:
                if math.fabs(last_bot - tup[3]) > 10.0:
                    layers.append((tup[0], tup[1], top, last_bot))
                    top = tup[3]
                last_bot = tup[4]

        layers.append((tup[0], tup[1], top, tup[4]))

        time_dict_layers[key] = layers

    return time_dict_layers


def time_dict_to_nd_2(time_dict):
    keys = list(time_dict.keys())
    for key in keys:
        vals = time_dict[key]
        if vals is not None:
            time_dict[key] = np.stack(vals)

    return time_dict


def get_cloud_layers_dict(filename, lon360=False):
    a_d = get_aeolus_time_dict(filename, lon360=lon360)
    c_d = time_dict_to_cld_layers(a_d)
    cld_lyr_dct = time_dict_to_nd_2(c_d)
    return cld_lyr_dct


def get_cloud_layers_dict_s(aeolus_files_dir, lon360=False):
    a_d = get_aeolus_time_dict_s(aeolus_files_dir, lon360=lon360, do_sort=True, chan='mie')
    cld_lyr_dct = time_dict_to_cld_layers(a_d)
    cld_lyr_dct = time_dict_to_nd_2(cld_lyr_dct)
    return cld_lyr_dct


def run_amv_aeolus_best_fit(match_dict, aeolus_dict):
    keys = list(match_dict.keys())

    for key in keys:
        profs = aeolus_dict.get(key)
        layers = profs[0]
        if layers is None:
            continue
        lat = layers[0, 0]
        lon = layers[0, 1]

    return None


def get_search_box(nav, lon, lat):
    cc, ll = nav.earth_to_lc(lon, lat)
    if cc is None:
        return None, None

    c_rng = [cc - half_width, cc + half_width]
    l_rng = [ll - half_width, ll + half_width]

    if c_rng[0] < 0:
        c_rng[0] = 0

    if l_rng[0] < 0:
        l_rng[0] = 0

    if c_rng[1] >= num_elems:
        c_rng[1] = num_elems - 1

    if l_rng[1] >= num_lines:
        l_rng[1] = num_lines - 1

    return c_rng, l_rng


# aeolus_dict: time -> profiles
# amv_files_path: directory containing AMVs, '/home/user/amvdir/'
# return dict: aeolus time -> tuple (amv_lon, amv_lat, amv_pres, amv_spd, amv_dir)
def match_amvs_to_aeolus(aeolus_dict, amv_files_path, amv_source='OPS', band='14', amv_files=None):
    nav = amv_files.get_navigation()
    amv_params = amv_files.get_parameters()
    match_dict = {}

    keys = list(aeolus_dict.keys())

    last_f_idx = -1
    for key in keys:
        fname, ftime, f_idx = amv_files.get_file_containing_time(key)
        if f_idx is None:
            continue
        profs = aeolus_dict.get(key)
        if profs is None:
            continue

        layers = profs

        lat = layers[0, 0]
        lon = layers[0, 1]

        c_rng, l_rng = get_search_box(nav, lon, lat)
        if c_rng is None:
            continue

        if f_idx != last_f_idx:
            last_f_idx = f_idx
            ds = Dataset(fname)

            amv_lons = ds[amv_files.lon_name][:]
            amv_lats = ds[amv_files.lat_name][:]
            cc = ds[amv_files.elem_name][:]
            ll = ds[amv_files.line_name][:]
            # cc, ll = nav.earth_to_lc_s(amv_lons, amv_lats)

            param_s = []
            param_s.append(amv_lons)
            param_s.append(amv_lats)
            param_s.append(cc)
            param_s.append(ll)
            for param in amv_params:
                if param == 'V_3D':
                    param_s.append(ds[param][:, 0])
                    param_s.append(ds[param][:, 1])
                else:
                    param_s.append(ds[param][:])

            ds.close()

        in_cc = np.logical_and(cc > c_rng[0], cc < c_rng[1])
        in_ll = np.logical_and(ll > l_rng[0], ll < l_rng[1])
        in_box = np.logical_and(in_cc, in_ll)

        num_amvs = np.sum(in_box)
        if num_amvs == 0:
            continue
        # dist = haversine_np(lon, lat, amv_lons[in_box], amv_lats[in_box])
        param_nd = np.vstack(param_s)
        param_nd = param_nd[:, in_box]
        match_dict[key] = param_nd

    return match_dict


# full path as string filename to create, '/home/user/newfilename'
# aeolus_to_amv_dct: output from match_amvs_to_aeolus
# aeolus_dct: output from get_aeolus_time_dict
# amv_files: container representing specific AMV product info
def create_file(filename, aeolus_to_amv_dct, aeolus_dct, amv_files, cld_lyr=False):
    keys = list(aeolus_to_amv_dct.keys())

    num_amvs = []
    num_levs = []
    times = []

    namvs = 0
    nlevs = 0
    for key in keys:
        param_nd = aeolus_to_amv_dct.get(key)
        num_amvs.append(param_nd.shape[1])
        namvs += param_nd.shape[1]

        prof = aeolus_dct.get(key)
        num_levs.append(prof.shape[0])
        nlevs += prof.shape[0]

        times.append(key)

    amv_per_alus = len(aeolus_to_amv_dct)
    rootgrp = Dataset(filename, 'w', format='NETCDF4')
    dim_amvs = rootgrp.createDimension('amvs', size=namvs)
    dim_alus = rootgrp.createDimension('profs', size=nlevs)
    dim_num_aeolus_prof = rootgrp.createDimension('num_aeolus_profs', size=len(aeolus_to_amv_dct))

    nc4_vars = []
    out_params = amv_files.get_out_parameters()
    meta_dict = amv_files.get_meta_dict()

    for pidx, param in enumerate(out_params):
        u, t = meta_dict.get(param)
        var = rootgrp.createVariable(param, t, ['amvs'])
        if u is not None:
            var.units = u
        nc4_vars.append(var)

    dist = rootgrp.createVariable('dist_to_prof', 'f4', ['amvs'])
    dist.units = 'km'

    num_amvs_per_prof = rootgrp.createVariable('num_amvs_per_prof', 'i4', ['num_aeolus_profs'])
    num_levs_per_prof = rootgrp.createVariable('num_levs_per_prof', 'i4', ['num_aeolus_profs'])
    prof_time = rootgrp.createVariable('time', 'f4', ['num_aeolus_profs'])
    # ---- Profile variables ---------------
    prf_lon = rootgrp.createVariable('prof_longitude', 'f4', ['num_aeolus_profs'])
    prf_lon.units = 'degrees east'
    prf_lat = rootgrp.createVariable('prof_latitude', 'f4', ['num_aeolus_profs'])
    prf_lat.units = 'degrees north'
    prof_time.units = 'seconds since 1970-01-1 00:00:00'

    if not cld_lyr:
        prf_azm = rootgrp.createVariable('prof_azm', 'f4', ['profs'])
        prf_azm.units = 'degree'
        prf_spd = rootgrp.createVariable('prof_spd', 'f4', ['profs'])
        prf_spd.units = 'm s-1'
    prf_hht = rootgrp.createVariable('prof_hht', 'f4', ['profs'])
    prf_hht.units = 'meter'
    prf_hhb = rootgrp.createVariable('prof_hhb', 'f4', ['profs'])
    prf_hhb.units = 'meter'
    # --------------------------------------

    i_a = 0
    i_c = 0
    for idx, key in enumerate(keys):
        namvs = num_amvs[idx]
        nlevs = num_levs[idx]
        i_b = i_a + namvs
        i_d = i_c + nlevs

        prof = aeolus_dct.get(key)
        if not cld_lyr:
            prf_hht[i_c:i_d] = prof[:, 3]
            prf_hhb[i_c:i_d] = prof[:, 4]
            prf_azm[i_c:i_d] = prof[:, 5]
            prf_spd[i_c:i_d] = prof[:, 6]
        else:
            prf_hht[i_c:i_d] = prof[:, 2]
            prf_hhb[i_c:i_d] = prof[:, 3]
        i_c += nlevs

        plat = prof[0, 0]
        plon = prof[0, 1]
        prf_lat[idx::] = plat
        prf_lon[idx::] = plon

        param_nd = aeolus_to_amv_dct.get(key)
        for pidx, param in enumerate(out_params):
            nc4_vars[pidx][i_a:i_b] = param_nd[pidx, :]
        dist[i_a:i_b] = haversine_np(plon, plat, param_nd[0, :], param_nd[1, :])
        i_a += namvs

    num_amvs_per_prof[:] = num_amvs
    num_levs_per_prof[:] = num_levs
    prof_time[:] = times

    rootgrp.close()


# aeolus_files_dir: S4 NOAA txt output files
# amv_files_dir: G16/17 AMV product files
# outfile: pathname for the Netcdf match file
def create_amv_to_aeolus_match_file(aeolus_files_dir, amv_files_dir, outfile=None, amv_source='OPS', band='14', chan='mie'):
    if chan == 'mie':
        a_d = get_cloud_layers_dict_s(aeolus_files_dir)
    else:
        a_d = get_aeolus_time_dict_s(aeolus_files_dir, chan=chan)
        a_d = time_dict_to_nd(a_d)

    amv_files = None
    if amv_source == 'CARR':
        amv_files = CarrStereo(amv_files_dir, 60, band)

    m_d = match_amvs_to_aeolus(a_d, amv_files_dir, amv_source, band, amv_files)

    if outfile is not None:
        cld_lyr = False
        if chan == 'mie':
            cld_lyr = True
        create_file(outfile, m_d, a_d, amv_files, cld_lyr=cld_lyr)


# match_file: pathname for the product file
# dt_str_0: start time (YYYY-MM-DD_HH:MM)
# dt_str_1: end time (YYYY-MM-DD_HH:MM)
# amv_var_names: list of amv parameters (see match_file ncdump) to subset
# returns: Xarray.DataArray
# amvs[nprofs, max_num_amvs_per_prof, num_of_params], profs[nprofs, max_num_levs_per_prof, num_of_params],
# prof_locs[nprofs, (lon, lat)
def subset_by_time(match_file, dt_str_0, dt_str_1, amv_var_names):
    rootgrp = Dataset(match_file, 'r', format='NETCDF4')
    all_dims = rootgrp.dimensions
    t_var = rootgrp['time']

    n_profs = len(all_dims['num_aeolus_profs'])
    n_amvs_per_prof = rootgrp['num_amvs_per_prof'][:]
    n_levs_per_prof = rootgrp['num_levs_per_prof'][:]

    a_nc_vars = []
    for vname in amv_var_names:
        a_nc_vars.append(rootgrp[vname])
    nvars = len(a_nc_vars)

    mf_vars = list(rootgrp.variables.keys())

    p_lon_v = rootgrp['prof_longitude']
    p_lat_v = rootgrp['prof_latitude']
    p_vars = []
    p_var_names = []
    if 'prof_hhb' in mf_vars:
        p_vars.append(rootgrp['prof_hhb'])
        p_var_names.append('layer_bot')
    if 'prof_hht' in mf_vars:
        p_vars.append(rootgrp['prof_hht'])
        p_var_names.append('layer_top')
    if 'prof_spd' in mf_vars:
        p_vars.append(rootgrp['prof_spd'])
        p_var_names.append('speed')
    if 'prof_azm' in mf_vars:
        p_vars.append(rootgrp['prof_azm'])
        p_var_names.append('azimuth')
    npvars = len(p_vars)

    dto = datetime.datetime.strptime(dt_str_0, '%Y-%m-%d_%H:%M').replace(tzinfo=timezone.utc)
    dto.replace(tzinfo=timezone.utc)
    t_0 = dto.timestamp()
    dto = datetime.datetime.strptime(dt_str_1, '%Y-%m-%d_%H:%M').replace(tzinfo=timezone.utc)
    dto.replace(tzinfo=timezone.utc)
    t_1 = dto.timestamp()

    if t_1 < t_0:
        t = t_0
        t_1 = t_0
        t_0 = t

    times = t_var[:]
    time_idxs = np.arange(n_profs)
    valid = np.logical_and(times >= t_0, times < t_1)
    time_idxs = time_idxs[valid]
    n_times = time_idxs.shape[0]

    lons = p_lon_v[:]
    lats = p_lat_v[:]

    prf_idx_start = np.sum(n_levs_per_prof[0:time_idxs[0]])
    amv_idx_start = np.sum(n_amvs_per_prof[0:time_idxs[0]])

    mx_namvs = np.max(n_amvs_per_prof[time_idxs[0]:time_idxs[0]+n_times])
    mx_nlevs = np.max(n_levs_per_prof[time_idxs[0]:time_idxs[0]+n_times])

    amvs = np.zeros((n_times, mx_namvs, nvars))
    profs = np.zeros((n_times, mx_nlevs, npvars))
    amvs.fill(-999.0)
    profs.fill(-999.0)

    accum_prf = prf_idx_start
    accum_amv = amv_idx_start
    for idx, t_i in enumerate(time_idxs):
        n_amvs = n_amvs_per_prof[t_i]
        n_levs = n_levs_per_prof[t_i]

        a = accum_amv
        b = accum_amv + n_amvs

        c = accum_prf
        d = accum_prf + n_levs

        for k in range(nvars):
            amvs[idx, 0:n_amvs, k] = a_nc_vars[k][a:b]

        for k in range(npvars):
            profs[idx, 0:n_levs, k] = p_vars[k][c:d]

        accum_amv += n_amvs
        accum_prf += n_levs

    coords = {'num_profs': times[time_idxs], 'num_params': p_var_names}
    prof_da = xr.DataArray(profs, coords=coords, dims=['num_profs', 'max_num_levels', 'num_params'])

    coords = {'num_profs': times[time_idxs], 'num_params': amv_var_names}
    amvs_da = xr.DataArray(amvs, coords=coords, dims=['num_profs', 'max_num_amvs', 'num_params'])

    prof_locs_da = xr.DataArray(np.column_stack([lons[time_idxs], lats[time_idxs], n_amvs_per_prof[time_idxs], n_levs_per_prof[time_idxs]]),
                                coords=[times[time_idxs], ['longitude', 'latitude', 'num_amvs', 'num_levels']],
                                dims=['num_profs', 'space'])

    return prof_da, amvs_da, prof_locs_da


def analyze(prof_da, amvs_da, prof_locs_da, dist_threshold=5.0):
    # sort amvs by distance to profile
    dst = amvs_da.sel(num_params='dist_to_prof')
    s_i = np.argsort(dst, axis=1)
    s_i = s_i.values
    for k in range(amvs_da.shape[0]):
        amvs_da[k] = amvs_da[k, s_i[k]]

    # sort profiles by level highest to lowest
    top = prof_da.sel(num_params='layer_top')
    s_i = np.argsort(top, axis=1)
    s_i = s_i.values
    for k in range(prof_da.shape[0]):
        prof_da[k] = prof_da[k, s_i[k, ::-1]]

    # analyze single cloud layer profiles
    one_lyr = prof_locs_da.sel(space='num_levels') == 1
    one_lyr_profs = prof_da[one_lyr]
    one_lyr_amvs = amvs_da[one_lyr]
    print('number of one layer profs: ', one_lyr_profs.shape[0])

    hgt_vld = one_lyr_amvs.sel(num_params='H_3D') > 0
    hgt_vld = hgt_vld.values
    has_vld = hgt_vld.sum(1) > 0

    one_lyr_amvs = one_lyr_amvs[has_vld]
    one_lyr_profs = one_lyr_profs[has_vld]
    print('number of one layer profs with at least one valid AMV height', one_lyr_profs.shape[0])

    # compare profile highest cloud layer top to closest within 5km
    dst = one_lyr_amvs.sel(num_params='dist_to_prof')
    close = (np.logical_and(dst > 0.0, dst < dist_threshold)).values
    close = close.sum(1) > 0

    one_lyr_amvs = one_lyr_amvs[close]
    one_lyr_profs = one_lyr_profs[close]
    num_one_lyr_profs = one_lyr_profs.shape[0]
    print('number of one layer profs with at least one AMV within threshold: ', num_one_lyr_profs)

    cnt = 0
    prof_bot = one_lyr_profs.sel(num_params='layer_bot')
    prof_top = one_lyr_profs.sel(num_params='layer_top')
    for k in range(num_one_lyr_profs):
        dst = one_lyr_amvs[k, :, ].sel(num_params='dist_to_prof')
        b = np.logical_and(dst > 0.0, dst < dist_threshold)
        h_3d = one_lyr_amvs[k, :].sel(num_params='H_3D')
        h_3d = h_3d[b]
        vld = h_3d > 0
        h_3d = h_3d[vld]
        if len(h_3d) > 0:
            in_lyr = np.logical_and(h_3d > prof_bot[k, 0], h_3d < prof_top[k, 0])
            cnt += np.sum(in_lyr)

    print('fraction hits single cloud layer: ', cnt/num_one_lyr_profs)

    # Do calculations for single and multi-layer combined
    hgt_vld = amvs_da.sel(num_params='H_3D') > 0
    hgt_vld = hgt_vld.values
    has_vld = hgt_vld.sum(1) > 0

    amvs_da = amvs_da[has_vld]
    prof_da = prof_da[has_vld]
    prof_locs_da = prof_locs_da[has_vld]
    print('number of profs with at least one valid AMV height', prof_da.shape[0])

    # compare profile highest cloud layer top to closest within 5km
    dst = amvs_da.sel(num_params='dist_to_prof')
    close = (np.logical_and(dst > 0.0, dst < dist_threshold)).values
    close = close.sum(1) > 0

    amvs_da = amvs_da[close]
    prof_da = prof_da[close]
    prof_locs_da = prof_locs_da[close]
    num_profs = prof_da.shape[0]
    print('number of profs with at least one AMV within 5km: ', num_profs)

    cnt = 0
    prof_bot = prof_da.sel(num_params='layer_bot')
    prof_top = prof_da.sel(num_params='layer_top')
    for k in range(num_profs):
        dst = amvs_da[k, :, ].sel(num_params='dist_to_prof')
        b = np.logical_and(dst > 0.0, dst < dist_threshold)
        h_3d = amvs_da[k, :].sel(num_params='H_3D')
        h_3d = h_3d[b]
        vld = h_3d > 0
        h_3d = h_3d[vld]
        if len(h_3d) > 0:
            nlevs = prof_locs_da[k].values.astype(int)[3]
            for j in range(nlevs):
                in_lyr = np.logical_and(h_3d > prof_bot[k, j], h_3d < prof_top[k, j])
                cnt += np.sum(in_lyr)

    print('fraction hits multi layer: ', cnt/num_profs)
    return one_lyr_profs, one_lyr_amvs