Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • master
  • develop
  • 0.1.5
  • v1-grouper
  • v1-historical
  • v1-i-band
  • v1-i5
  • v1-m-band
  • v1-master
  • v2-iasi-viirs
  • v2-master
  • v2-modisfuse
  • v3-cris_iasi_airs
  • v3-iasi_airs
  • v3-iasi_cris
  • v3-master
16 results

Target

Select target project
  • Collocation / intercal
  • Fazlul Shahriar / intercal
2 results
Select Git revision
  • modissrf
  • develop
  • master
  • v1-grouper
  • v1-historical
  • v1-i-band
  • v1-i5
  • v1-m-band
  • v1-master
  • v2-iasi-viirs
  • v2-master
  • v2-modisfuse
  • v3-cris_iasi_airs
  • v3-iasi_airs
  • v3-iasi_cris
  • v3-master
16 results
Show changes

Commits on Source 25

29 files
+ 1980
37
Compare changes
  • Side-by-side
  • Inline

Files

Original line number Original line Diff line number Diff line
@@ -22,11 +22,11 @@ def read_channel_properties(chan_prop_file):
            break
            break
    offset = i
    offset = i
    size = len(lines) - offset
    size = len(lines) - offset
    mask = np.empty((size,), np.bool)
    mask = np.empty((size,), bool)
    wn = np.empty((size,), np.float64)
    wn = np.empty((size,), np.float64)
    nedts = np.empty((size,), np.float64)
    nedts = np.empty((size,), np.float64)
    for i in range(offset, len(lines)):
    for i in range(offset, len(lines)):
        mask[i - offset] = np.bool(lines[i][76] == '0')
        mask[i - offset] = bool(lines[i][76] == '0')
        wn[i - offset] = np.float64(lines[i][6:14])
        wn[i - offset] = np.float64(lines[i][6:14])
        nedts[i - offset] = np.float64(lines[i][26:32])
        nedts[i - offset] = np.float64(lines[i][26:32])
    return wn, mask, nedts
    return wn, mask, nedts
Original line number Original line Diff line number Diff line
@@ -4,7 +4,14 @@ import numpy as np
def low_res_wavenumbers():
def low_res_wavenumbers():
    return np.concatenate([wn_low_res[b] for b in ['lw', 'mw', 'sw']])
    return np.concatenate([wn_low_res[b] for b in ['lw', 'mw', 'sw']])


def full_res_wavenumbers():
    return np.concatenate([wn_full_res[b] for b in ['lw', 'mw', 'sw']])

wn_low_res = {'lw': np.linspace(648.75, 1096.25, 717),
wn_low_res = {'lw': np.linspace(648.75, 1096.25, 717),
              'mw': np.linspace(1207.5, 1752.5, 437),
              'mw': np.linspace(1207.5, 1752.5, 437),
              'sw': np.linspace(2150, 2555, 163)}
              'sw': np.linspace(2150, 2555, 163)}


wn_full_res = {'lw': np.linspace(648.75, 1096.25, 717),
            'mw': np.linspace(1207.5, 1752.5, 869),
            'sw': np.linspace(2150, 2555, 637)}
+21 −0
Original line number Original line Diff line number Diff line
#!/bin/bash
set -e

# run this from the top level of a crisahi flo link tree

for granule in */*/*/crisahi.*; do
    echo -n "$granule "
    cp $granule ./
    granule=$(basename $granule)
    tmp=tmp.$granule
    python -m intercal.crisahi.same_path $granule $tmp
    if [[ -e $tmp ]]; then
        ncks -O --mk_rec_dmn obs $tmp $tmp
        ncpdq -O -a obs,band $tmp $tmp
    fi
    rm $granule
done
ncrcat tmp.crisahi.* crisahi.nc
rm tmp.crisahi.*
ncpdq -O -a band,obs crisahi.nc crisahi.nc
Original line number Original line Diff line number Diff line
@@ -7,18 +7,17 @@ import h5py
import netCDF4
import netCDF4
import numpy as np
import numpy as np
from pkg_resources import resource_filename
from pkg_resources import resource_filename
import pyhdf.SD
from intercal.crisahi import ahi_srf
from intercal import ahi_srf


bands = [9, 10, 12, 13, 14, 15, 16]
bands = [9, 10, 12, 13, 14, 15, 16]


def main():
def main():
    colloc_file, gcrso_file = sys.argv[1:3]
    colloc_file, gcrso_file, ahi_geo_file = sys.argv[1:4]
    ahi_files = [pair for pair in grouper(sys.argv[3:], 2)]
    ahi_files = [pair for pair in grouper(sys.argv[4:], 2)]
    ahi_idx = arrange_follower_indexes_by_master(read_collocation(colloc_file),
    ahi_idx = arrange_follower_indexes_by_master(read_collocation(colloc_file),
                                                 master_dims=[60, 30, 9])
                                                 master_dims=[60, 30, 9])
    gcrso = read_gcrso(gcrso_file)
    gcrso = read_gcrso(gcrso_file)
    sensor_zenith, sensor_azimuth = determine_sensor_angles(ahi_idx)
    sensor_zenith, sensor_azimuth = determine_sensor_angles(ahi_geo_file, ahi_idx)
    output = Output(gcrso_file, sensor_zenith, sensor_azimuth)
    output = Output(gcrso_file, sensor_zenith, sensor_azimuth)
    for i, band in enumerate(bands):
    for i, band in enumerate(bands):
        ahi_1, ahi_2 = read_ahi(ahi_files[i][0]), read_ahi(ahi_files[i][1])
        ahi_1, ahi_2 = read_ahi(ahi_files[i][0]), read_ahi(ahi_files[i][1])
@@ -30,17 +29,14 @@ def main():
        output(band, t, rad, rad_std, bt, bt_std)
        output(band, t, rad, rad_std, bt, bt_std)


def read_collocation(file_path):
def read_collocation(file_path):
    sd = pyhdf.SD.SD(file_path)
    d = netCDF4.Dataset(file_path)
    num_master_dims = 3
    num_master_dims = len(d.dimensions['master_dim'])
    num_follower_dims = 2
    num_follower_dims = len(d.dimensions['follower_dim'])
    num_master_idxs, max_followers_per_master = sd.datasets()['Follower_Index_1'][1]
    num_master_idxs = len(d.dimensions['master_obs'])
    master_idxs = np.empty([num_master_dims, num_master_idxs], np.int32)
    max_followers_per_master = len(d.dimensions['follower_obs'])
    follower_idxs = np.empty([num_follower_dims, num_master_idxs, max_followers_per_master],
    master_idxs = np.ma.masked_array(d['master_index'][:]).filled(0).transpose([1, 0]) - 1
                             np.int32)
    follower_idxs = np.ma.masked_array(d['follower_index'][:]).filled(0).transpose([2, 0, 1]) - 1
    for i in range(num_master_dims):
    d.close()
        master_idxs[i] = sd.select('Master_Index_{}'.format(num_master_dims - i))[:] - 1
    for i in range(num_follower_dims):
        follower_idxs[i] = sd.select('Follower_Index_{}'.format(num_follower_dims - i))[:,:] - 1
    return {'num_master_dims': num_master_dims, 'num_follower_dims': num_follower_dims,
    return {'num_master_dims': num_master_dims, 'num_follower_dims': num_follower_dims,
            'num_master_idxs': num_master_idxs,
            'num_master_idxs': num_master_idxs,
            'max_followers_per_master': max_followers_per_master,
            'max_followers_per_master': max_followers_per_master,
@@ -60,8 +56,8 @@ def read_ahi(file_path):
    return {'time': line_time_offset.base_time + line_time_offset[:],
    return {'time': line_time_offset.base_time + line_time_offset[:],
            'radiance': d.variables['RAD'][:]}
            'radiance': d.variables['RAD'][:]}


def determine_sensor_angles(ahi_idx):
def determine_sensor_angles(ahi_geo_file, ahi_idx):
    d = netCDF4.Dataset(resource_filename(__name__, 'AHI_geolocation2km.nc'))
    d = netCDF4.Dataset(ahi_geo_file)
    sz = d.variables['sensor_zenith'][:]
    sz = d.variables['sensor_zenith'][:]
    sa = d.variables['sensor_azimuth'][:]
    sa = d.variables['sensor_azimuth'][:]
    ahi_nearest_idx = tuple(ahi_idx[:,:,:,:,0])
    ahi_nearest_idx = tuple(ahi_idx[:,:,:,:,0])
Original line number Original line Diff line number Diff line
@@ -34,13 +34,13 @@ def cat(dir_path):
basemap = Basemap(projection='geos', lon_0=140.7, resolution='c')
basemap = Basemap(projection='geos', lon_0=140.7, resolution='c')


def make_mask(arrays):
def make_mask(arrays):
    mask = np.ones(arrays['cris_lat'].shape, np.bool)
    mask = np.ones(arrays['cris_lat'].shape, bool)
    mask &= (arrays['ahi_sen_zen'] != -999.)
    mask &= (arrays['ahi_sen_zen'] != -999.)
    mask &= (arrays['ahi_sen_zen'] <= 60.)
    mask &= (arrays['ahi_sen_zen'] <= 60.)
    return mask
    return mask


def make_band_mask(arrays, band_idx):
def make_band_mask(arrays, band_idx):
    mask = np.ones(arrays['cris_lat'].shape, np.bool)
    mask = np.ones(arrays['cris_lat'].shape, bool)
    mask &= (arrays['ahi_sen_zen'] != -999.)
    mask &= (arrays['ahi_sen_zen'] != -999.)
    mask &= (arrays['ahi_sen_zen'] <= 60.)
    mask &= (arrays['ahi_sen_zen'] <= 60.)
    mask &= (arrays['cris_bt'][band_idx] >= 160.)
    mask &= (arrays['cris_bt'][band_idx] >= 160.)
Original line number Original line Diff line number Diff line
@@ -5,7 +5,7 @@ import sys
import h5py
import h5py
import netCDF4
import netCDF4
import numpy as np
import numpy as np
from intercal import ahi_srf
from intercal.crisahi import ahi_srf
from intercal.util import wn2wl, rad_wn2wl
from intercal.util import wn2wl, rad_wn2wl


ahi_bands = [9, 10, 12, 13, 14, 15, 16]
ahi_bands = [9, 10, 12, 13, 14, 15, 16]
+41 −0
Original line number Original line Diff line number Diff line

"""Filter a crisahi granule to only contain data where sensor vectors align"""

import sys
import netCDF4
import numpy as np
from intercal.crisahi.cat import path_diff as calc_path_diff

path_diff_threshold = 5.  # degrees

def main():
    in_file, out_file = sys.argv[1:]
    data = read_granule(in_file)
    path_diff = calc_path_diff(data['cris_lat'], data['cris_lon'], data['cris_sen_zen'],
                               data['cris_sen_azm'], data['ahi_sen_zen'], data['ahi_sen_azm'])
    mask = (path_diff <= path_diff_threshold)
    if np.any(mask):
        new_data = {var: data[var][...,mask] for var in data}
        write_granule(new_data, out_file)
    print 'Kept {} CrIS-AHI matchups'.format(mask.sum())

def read_granule(in_file):
    d = netCDF4.Dataset(in_file)
    data = {var: d[var][...] for var in d.variables}
    data['cris_time'] = np.broadcast_arrays(data['cris_time'][...,None], data['cris_lat'])[0]
    return data

def write_granule(data, out_file):
    d = netCDF4.Dataset(out_file, 'w')
    num_bands, num_obs = data['cris_bt'].shape
    d.createDimension('band', num_bands)
    d.createDimension('obs', num_obs)
    dims = {1: ['obs'], 2: ['band', 'obs']}
    for var, arr in data.items():
        d.createVariable(var, arr.dtype, dims[arr.ndim], zlib=True)
        d[var][...] = arr
    d.close()

if __name__ == '__main__':
    main()
+113 −0
Original line number Original line Diff line number Diff line

netcdf sndrahi {{

dimensions:
    band = {band};
    master_obs = {master_obs};
    follower_obs = {follower_obs};
    master_dim = {master_dim};
    follower_dim = {follower_dim};
    input = {input};
    filename_checksum = 2;

variables:

    string band_name(band);
    band_name:long_name = "AHI Band";

    string input_file(input, filename_checksum);
    input_file:info = "Filename and MD5 checksum of each input";

group: master {{
variables:

    double time(master_obs);
    time:long_name = "Master observation time";
    time:units = "seconds since 1993-01-01 00:00";
    time:info = "TAI93 format; leap seconds must be accounted for to convert to UTC";
    time:standard_name = "time";

    double bt(band, master_obs);
    bt:long_name = "Master brightness temperature";
    bt:units = "K";
    bt:standard_name = "brightness_temperature";

    float lat(master_obs);
    lat:long_name = "Master latitude";
    lat:units = "degrees_north";
    float lat:valid_range = -90, 90;
    lat:standard_name = "latitude";

    float lon(master_obs);
    lon:long_name = "Master latitude";
    lon:units = "degrees_east";
    float lon:valid_range = -180, 180;
    lon:standard_name = "longitude";

    float sat_zen(master_obs);
    sat_zen:long_name = "Master satellite zenith angle";
    sat_zen:units = "degree";
    float sat_zen:valid_range = 0, 90;
    sat_zen:standard_name = "sensor_zenith_angle";

    float sat_azi(master_obs);
    sat_azi:long_name = "Master satellite azimuth angle";
    sat_azi:units = "degree";
    sat_azi:info = "Positive clockwise, zero at north";
    float sat_azi:valid_range = -180, 180;
    sat_azi:standard_name = "sensor_azimuth_angle";

    float sol_zen(master_obs);
    sol_zen:long_name = "Master solar zenith angle";
    sol_zen:units = "degree";
    float sol_zen:valid_range = 0, 180;
    sol_zen:standard_name = "solar_zenith_angle";

    float sol_azi(master_obs);
    sol_azi:long_name = "Master solar azimuth angle";
    sol_azi:units = "degree";
    float sol_azi:valid_range = -180, 180;
    sol_azi:standard_name = "solar_azimuth_angle";

    int obs_idx(master_obs, master_dim);
    obs_idx:long_name = "Master data array indexes"; }}

group: follower {{
variables:

    double time(master_obs);
    time:long_name = "Follower mean observation time";
    time:units = "seconds since 1993-01-01 00:00";
    time:info = "TAI93 format; leap seconds must be accounted for to convert to UTC";
    time:standard_name = "time";

    double bt(band, master_obs);
    bt:long_name = "Follower brightness temperature";
    bt:info = "Computed from mean radiance of collocated follower observations";
    bt:units = "K";
    bt:standard_name = "brightness_temperature";

    double bt_dev(band, master_obs);
    bt_dev:long_name = "Follower brightness temperature deviation";
    bt_dev:info = "Computed from radiance standard deviation of collocated follower observations";
    bt_dev:units = "K";

    float sat_zen(master_obs);
    sat_zen:long_name = "Follower satellite zenith angle";
    sat_zen:units = "degree";
    float sat_zen:valid_range = 0, 90;
    sat_zen:standard_name = "sensor_zenith_angle";

    float sat_azi(master_obs);
    sat_azi:long_name = "Follower satellite azimuth angle";
    sat_azi:units = "degree";
    sat_azi:info = "Positive clockwise, zero at north";
    float sat_azi:valid_range = -180, 180;
    sat_azi:standard_name = "sensor_azimuth_angle";

    int obs_count(master_obs);
    obs_count:long_name = "Number of collocated follower observations";

    int obs_idx(master_obs, follower_obs, follower_dim);
    obs_idx:long_name = "Follower data array indexes"; }} }}
+15 −0
Original line number Original line Diff line number Diff line

"""Create a level 1 match file for hyperspectral sounder data versus AHI"""

from argparse import ArgumentParser
from intercal.fn import parse as parse_filename

def main():
    parser = ArgumentParser(description=__doc__)
    parser.add_argument('colloc_file')
    parser.add_argument('l1a_files', nargs='+', help='Sounder and AHI level 1 data files')
    args = parser.parse_args()

if __name__ == '__main__':
    main()
+39 −0
Original line number Original line Diff line number Diff line

import numpy as np
import pyhdf.HDF
import pyhdf.SD
import pyhdf.VS

class AirsGranule(object):

    num_fors_per_scan = 90
    num_fovs_per_for = 1

    has_imaginary_radiance = False

    num_files = 1

    def __init__(self, filename):
        self.sd = pyhdf.SD.SD(filename)
        self.wavenumber = self.read_wavenumber(filename)

    def read_wavenumber(self, filename):
        num_channels = 2378
        nested_list = pyhdf.HDF.HDF(filename).vstart().attach('nominal_freq').read(num_channels)
        return np.array(nested_list).flatten()

    @property
    def num_scans(self):
        return self.sd.datasets()['radiances'][1][0]

    def read_latitude(self, latitude):
        latitude[:,:,0] = self.sd.select('Latitude')[:]

    def read_longitude(self, longitude):
        longitude[:,:,0] = self.sd.select('Longitude')[:]

    def read_radiance(self, radiance, imaginary):
        if imaginary:
            raise ValueError('no imaginary radiances for AIRS')
        radiance[:,:,0,:] = self.sd.select('radiances')[:]
+63 −0
Original line number Original line Diff line number Diff line

import h5py
import numpy as np

class CrisGranule(object):

    num_fors_per_scan = 30
    num_fovs_per_for = 9

    wavenumber = np.concatenate([np.linspace(648.75, 1096.25, 717),
                                 np.linspace(1207.5, 1752.5, 437),
                                 np.linspace(2150, 2555, 163)])

    has_imaginary_radiance = True

    num_files = 2

    def __init__(self, gcrso_file, scris_file):

        self.gcrso_file = gcrso_file
        self.scris_file = scris_file

    @property
    def wavenumber(self):
        bands = ['lw', 'mw', 'sw']
        ranges = [(650., 1095.), (1210., 1750.), (2155., 2550.)]
        guards = 2
        with h5py.File(self.scris_file, 'r') as scris:
            points = [scris['/All_Data/CrIS-SDR_All/ES_Real{}'.format(band.upper())].shape[-1]
                          for band in bands]
        wn = []
        for band, rng, sz in zip(bands, ranges, points):
            spacing = (rng[1] - rng[0]) / (sz - 1 - 2*guards)
            wn.append(np.linspace(rng[0] - guards*spacing, rng[1] + guards*spacing, sz))
        return np.concatenate(wn)

    @property
    def num_scans(self):

        with h5py.File(self.gcrso_file, 'r') as gcrso:
            return len(gcrso['/All_Data/CrIS-SDR-GEO_All/Latitude'])

    def read_latitude(self, latitude):

        with h5py.File(self.gcrso_file, 'r') as gcrso:
            latitude[:,:,:] = gcrso['/All_Data/CrIS-SDR-GEO_All/Latitude'][:,:,:]

    def read_longitude(self, longitude):

        with h5py.File(self.gcrso_file, 'r') as gcrso:
            longitude[:,:,:] = gcrso['/All_Data/CrIS-SDR-GEO_All/Longitude'][:,:,:]

    def read_radiance(self, radiance, imaginary):

        with h5py.File(self.scris_file, 'r') as scris:
            group = scris['/All_Data/CrIS-SDR_All']
            wavenumber_idx_ini = 0
            for band in ['LW', 'MW', 'SW']:
                dataset = 'ES_' + {False: 'Real', True: 'Imaginary'}[imaginary] + band
                wavenumber_idx_fin = wavenumber_idx_ini + group[dataset].shape[-1]
                radiance[:,:,:,wavenumber_idx_ini:wavenumber_idx_fin] = group[dataset]
                wavenumber_idx_ini = wavenumber_idx_fin
        
+101 −0
Original line number Original line Diff line number Diff line

import functools

import coda
import numpy as np

class IasiGranule(object):

    wavenumber = np.linspace(645.0, 2760.0, 8461)
    num_fors_per_scan = 30
    num_fovs_per_for = 4

    has_imaginary_radiance = False

    num_files = 1

    def __init__(self, filename):

        self.filename = filename

    @property
    def num_scans(self):

        with CodaHandle(self.filename) as handle:
            num_scans, = handle.get_size('MDR')
        return num_scans

    def read_latitude(self, latitude):

        self._read_geolocation('latitude', latitude)

    def read_longitude(self, longitude):

        self._read_geolocation('longitude', longitude)

    def read_radiance(self, radiance, imaginary):

        if imaginary:
            raise ValueError('Imaginary radiance not available for IASI')
        with CodaHandle(self.filename) as handle:
            scaled_radiances = (
                np.array([a for a in handle.fetch('MDR', -1, 'MDR', 'GS1cSpect')]))
            scale_factors = handle.fetch('GIADR_ScaleFactors')
            quality_flags = (
                np.array([a for a in handle.fetch('MDR', -1, 'MDR', 'GQisFlagQual')],
                         bool))
        self._unscale_radiances(scaled_radiances, scale_factors, radiance)
        self._set_low_quality_spectra_to_nan(quality_flags, radiance)

    def _read_geolocation(self, variable, output_array):

        latlon_idx = dict(latitude=1, longitude=0)[variable]
        with CodaHandle(self.filename) as handle:
            output_array[:,:,:] = [
                a for a in handle.fetch('MDR', -1, 'MDR', 'GGeoSondLoc',
                                        [-1, -1, latlon_idx])]

    def _unscale_radiances(self, scaled_radiances, scale_factors, radiances):

        num_scale_factors = scale_factors.IDefScaleSondNbScale
        for scale_factor_idx in range(num_scale_factors):
            scale_factor = scale_factors.IDefScaleSondScaleFactor[scale_factor_idx]
            wavenumber_idx_ini = (scale_factors.IDefScaleSondNsfirst[scale_factor_idx] -
                                      self._first_channel_number)
            wavenumber_idx_fin = (scale_factors.IDefScaleSondNslast[scale_factor_idx] -
                                      self._first_channel_number + 1)
            radiances[:,:,:,wavenumber_idx_ini:wavenumber_idx_fin] = (
                scaled_radiances[:,:,:,wavenumber_idx_ini:wavenumber_idx_fin] *
                    (10.0 ** -scale_factor * self._radiance_conversion_factor))

    def _set_low_quality_spectra_to_nan(self, quality_flags, radiances):

        radiances[quality_flags.any(axis=-1)] = np.nan

    @property
    def _first_channel_number(self):

        with CodaHandle(self.filename) as handle:
            return handle.fetch('MDR', 0, 'MDR', 'IDefNsfirst1b')

    # to convert from W / m^2 / sr^1 / m^-1 to mW / m^2 / sr / cm^-1
    _radiance_conversion_factor = 1e5

class CodaHandle(object):

    def __init__(self, filename):

        self.handle = coda.open(filename)

    def __enter__(self):

        return self

    def __exit__(self, exc_type, exc_value, exc_trace):

        coda.close(self.handle)

    def __getattr__(self, attr):

        return functools.partial(getattr(coda, attr), self.handle)
+126 −0
Original line number Original line Diff line number Diff line

import argparse
from datetime import datetime
import os

import h5py
import numpy as np

from .airs import AirsGranule
from .cris import CrisGranule
from .iasi import IasiGranule
from .super_granule import SuperGranule
from .util import gcdist

def _parse_args():

    parser = argparse.ArgumentParser()
    parser.add_argument('platform_1')
    parser.add_argument('time_1', type=parse_datetime)
    parser.add_argument('platform_2')
    parser.add_argument('time_2', type=parse_datetime)
    parser.add_argument('latitude', type=float)
    parser.add_argument('longitude', type=float)
    parser.add_argument('max_distance', type=float)
    parser.add_argument('sensor', choices=['airs', 'iasi', 'cris', 'cris-fsr'])
    parser.add_argument('files', nargs='+', metavar='file')
    return parser.parse_args()

def parse_datetime(s):

    return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')

def _create_granule(sensor, files):

    granule_cls = {'airs': AirsGranule, 'iasi': IasiGranule,
                   'cris': CrisGranule, 'cris-fsr': CrisGranule}[sensor]
    return SuperGranule(granule_cls, files)

def _filter_by_proximity(granule, lat, lon, max_distance):

    distance = gcdist(granule.latitude, granule.longitude, lat, lon)
    return (distance <= max_distance).nonzero()

def _process_spectra(granule, radiance, scan_idx, for_idx, fov_idx,
                     output_file, dataset_prefix=None):

    radiance = radiance.astype(np.float64)
    R = _grab_spectra(granule, radiance, scan_idx, for_idx, fov_idx)
    n, sum, sum_of_squares = _aggregate_by_fov(granule, fov_idx, R)
    new_dataset_prefix = (
        'Radiance' if dataset_prefix is None else dataset_prefix + '_Radiance')
    _output(n, sum, sum_of_squares, output_file, 'FOV_' + new_dataset_prefix)
    _output(n.sum(axis=0), sum.sum(axis=0), sum_of_squares.sum(axis=0),
            output_file, new_dataset_prefix)

def _grab_spectra(granule, radiance, scan_idx, for_idx, fov_idx):

    flat_fov_idx = ((scan_idx * granule.num_fors_per_scan +
                        for_idx) * granule.num_fovs_per_for +
                        fov_idx)
    wn_idx = np.arange(granule.num_wavenumbers)
    flat_sample_idx = (flat_fov_idx[:,None] * granule.num_wavenumbers + wn_idx).flatten()
    return radiance.take(flat_sample_idx)

def _aggregate_by_fov(granule, fov_idx, radiance):

    wn_idx = np.arange(granule.num_wavenumbers)
    flat_output_idx = (fov_idx[:,None] * granule.num_wavenumbers + wn_idx).flatten()
    num_output_idxs = granule.num_fovs_per_for * granule.num_wavenumbers
    n = np.bincount(fov_idx, minlength=granule.num_fovs_per_for)
    r = (np.bincount(flat_output_idx, weights=radiance, minlength=num_output_idxs)
             .reshape([granule.num_fovs_per_for, granule.num_wavenumbers]))
    r2 = (np.bincount(flat_output_idx, weights=(radiance * radiance),
                      minlength=num_output_idxs)
              .reshape([granule.num_fovs_per_for, granule.num_wavenumbers]))
    return n, r, r2

def _output(n, sum, sum_of_squares, output_file, dataset_prefix):

    mean = sum / n[...,None]
    std = np.sqrt(sum_of_squares / n[...,None] - mean * mean)
    output_file.create_dataset(dataset_prefix + '_Count', data=n)
    output_file.create_dataset(dataset_prefix + '_Mean', data=mean)
    output_file.create_dataset(dataset_prefix + '_Std', data=std)

def unix_time(t):

    return (t - datetime(1970, 1, 1)).total_seconds()

def output_file_name(args):

    platform_1, platform_2 = sorted([args.platform_1, args.platform_2])
    earlier_time = min(args.time_1, args.time_2)
    return 'sno_{}_{}_{}_{}.h5'.format(platform_tag(platform_1).lower(),
                                       platform_tag(platform_2).lower(),
                                       args.sensor.lower(),
                                       earlier_time.strftime('%Y-%m-%dT%H-%M'))

def platform_tag(platform):
    tags = {'AQUA': 'Aqua', 'METOP-A': 'Metop-A', 'METOP-B': 'Metop-B', 'SUOMI NPP': 'NPP'}
    return tags[platform]

def main():

    args = _parse_args()
    granule = _create_granule(args.sensor, args.files)
    scan_idx, for_idx, fov_idx = (
        _filter_by_proximity(granule, args.latitude, args.longitude, args.max_distance))
    with h5py.File(output_file_name(args), 'w') as output_file:
        _process_spectra(granule, granule.radiance, scan_idx, for_idx, fov_idx,
                         output_file)
        if granule.has_imaginary_radiance:
            _process_spectra(
                granule, granule.imaginary_radiance, scan_idx, for_idx, fov_idx,
                output_file, 'Imaginary')
        output_file.create_dataset('{}_Time'.format(platform_tag(args.platform_1)),
                                   data=unix_time(args.time_1))
        output_file.create_dataset('{}_Time'.format(platform_tag(args.platform_2)),
                                   data=unix_time(args.time_2))
        output_file.create_dataset('SNO_Latitude', data=args.latitude)
        output_file.create_dataset('SNO_Longitude', data=args.longitude)
        output_file.create_dataset('Input_Files', data=[os.path.basename(f) for f in args.files])

if __name__ == '__main__':
    main()
Original line number Original line Diff line number Diff line

import numpy as np

class SuperGranule(object):

    def __init__(self, granule_cls, files):

        self.granules = [granule_cls(*args)
                             for args
                             in self._grouped(files, granule_cls.num_files)]

    @property
    def num_scans(self):

        return sum(g.num_scans for g in self.granules)

    @property
    def num_fors_per_scan(self):

        return self.granules[0].num_fors_per_scan

    @property
    def num_fovs_per_for(self):

        return self.granules[0].num_fovs_per_for

    @property
    def num_wavenumbers(self):

        return len(self.wavenumber)

    @property
    def wavenumber(self):

        return self.granules[0].wavenumber

    @property
    def latitude(self):

        if not hasattr(self, '_latitude'):
            self._latitude = self._get_geolocation('latitude')
        return self._latitude

    @property
    def longitude(self):

        if not hasattr(self, '_longitude'):
            self._longitude = self._get_geolocation('longitude')
        return self._longitude

    @property
    def radiance(self):

        if not hasattr(self, '_radiance'):
            self._radiance = self._get_radiance()
        return self._radiance

    @property
    def has_imaginary_radiance(self):

        return self.granules[0].has_imaginary_radiance

    @property
    def imaginary_radiance(self):

        if not hasattr(self, '_imaginary_radiance'):
            self._imaginary_radiance = self._get_radiance(imaginary=True)
        return self._imaginary_radiance

    def _get_geolocation(self, variable):

        output_array = (
            np.empty([self.num_scans, self.num_fors_per_scan, self.num_fovs_per_for],
                     np.float32))
        for granule, scan_idx_ini, scan_idx_fin in self._granules_with_indexes:
            read_method = getattr(granule, 'read_' + variable)
            read_method(output_array[scan_idx_ini:scan_idx_fin,:,:])
        return output_array

    def _get_radiance(self, imaginary=False):

        output_array = np.empty([self.num_scans, self.num_fors_per_scan,
                                 self.num_fovs_per_for, len(self.wavenumber)],
                                np.float32)
        for granule, scan_idx_ini, scan_idx_fin in self._granules_with_indexes:
            granule.read_radiance(output_array[scan_idx_ini:scan_idx_fin,:,:],
                                  imaginary)
        return output_array

    @property
    def _granules_with_indexes(self):

        scan_idx = 0
        for granule in self.granules:
            yield granule, scan_idx, scan_idx + granule.num_scans
            scan_idx += granule.num_scans

    def _grouped(self, iterable, n):

        return zip(*([iter(iterable)] * n))
Original line number Original line Diff line number Diff line

from unittest import TestCase

from intercal.iasi import Iasi, CodaFile

class IasiTest(TestCase):

    def test_latitude(self):

        lat = Iasi(EXAMPLE_IASI_FILE).latitude
        self.assertEqual(lat.shape, (22, 30, 4))
        self.assertAlmostEqual(lat[0,0,0], 150.069, places=3)

    def test_longitude(self):

        lon = Iasi(EXAMPLE_IASI_FILE).longitude
        self.assertEqual(lon.shape, (22, 30, 4))
        self.assertAlmostEqual(lon[0,0,0], -50.907, places=3)

    def test_radiance(self):

        rad = Iasi(EXAMPLE_IASI_FILE).radiance

class CodaFileTest(TestCase):

    def test_getattr(self):

        with CodaFile(EXAMPLE_IASI_FILE) as coda_file:
            self.assertEqual(coda_file.get_product_filename(), EXAMPLE_IASI_FILE)

EXAMPLE_IASI_FILE = ('/data/gregq/iasi/IASI_xxx_1C_M02_20121106234457Z_' +
                         '20121106234753Z_N_O_20121107004428Z__20121107004545')
+112 −0
Original line number Original line Diff line number Diff line

"""Utility functions for inter-calibration"""

import numpy as np

# mean earth radius; km
r_earth_mean = 6371.0

# speed of light; m * s^-1
c = 2.99792458e8

# Planck constant; J * s
h = 6.62606957e-34

# Boltzmann constant; J * K^-1
k_B = 1.3806488e-23

# Planck function coefficients
c1 = 2.0 * h * c**2
c2 = h * c / k_B

def gcdist(lat_a, lon_a, lat_b, lon_b):

    """Great circle distance in km between lat lon pair"""

    lat_a, lon_a, lat_b, lon_b = (
        np.radians(x) for x in [lat_a, lon_a, lat_b, lon_b])
    tmp = (np.sin(lat_a) * np.sin(lat_b) +
               np.cos(lat_a) * np.cos(lat_b) * np.cos(lon_a - lon_b))
    tmp[tmp > 1.0] = 1.0
    return np.arccos(tmp) * r_earth_mean

def wl2wn(lambda_):

    """Convert from wavelength to wavenumber.

    lambda_ given in microns. Result in inverse centimeters.
    """

    return 1e4 / lambda_

def wn2wl(nu):

    """Convert from wavenumber to wavelength.

    nu given in inverse centimeters. Result in microns.
    """

    return 1e4 / nu

def planck_wl(lambda_, T):

    """Planck function for lambda_ in microns, T in Kelvin

    Output in W per m^2 per sr per micron.
    """

    lambda_ = lambda_ * 1e-6
    return c1 / lambda_**5 / (np.exp(c2 / (lambda_ * T)) - 1.0) * 1e-6

def planck_wn(nu, T):

    """Planck function for nu in inverse centimeters, T in Kelvin

    Output in mW per m^2 per sr per inverse centimeter.
    """

    nu = nu * 1e2
    return c1 * nu**3 / (np.exp(c2 * nu / T) - 1.0) * (1e2 * 1e3)

def inv_planck_wl(lambda_, B):

    """Inverse planck routine for wavelength in microns

    B is radiance in W per m^2 per sr per micron.
    """

    lambda_ = lambda_ * 1e-6
    B = B * 1e6
    return c2 / lambda_ / np.log(c1 / (lambda_**5 * B) + 1.0)

def inv_planck_wn(nu, B):

    """Inverse planck routine for wavenumber in inverse centimeters

    B is radiance in mW per m^2 per sr per inverse centimeter.
    """

    nu = nu * 1e2
    B = B * (1e-2 * 1e-3)
    return c2 * nu / np.log(c1 * nu**3 / B + 1.0)

def rad_wl2wn(lambda_, R):

    """Convert radiances from per-wavelength to per-wavenumber units

    lambda_ is in microns and R is in W per m^2 per sr per micron.
    Output is in mW per m^2 per sr per inverse centimeter.
    """

    return R * lambda_**2 * (1e3 * 1e-4)

def rad_wn2wl(nu, R):

    """Convert radiances from per-wavenumber to per-wavelength units

    nu is in inverse centimeters and R is in mW per m^2 per sr per
    inverse centimeter. Output is in W per m^2 per sr per micron.
    """

    return R * nu**2 * (1e-3 * 1e-4)

src/intercal/fn.py

0 → 100644
+220 −0
Original line number Original line Diff line number Diff line

from datetime import datetime, timedelta
import os
import re

def parse(path):
    """Parse metadata from a satellite data file name"""
    file_name = os.path.basename(path)
    p, = [p for p in parsers if p.match(file_name)]
    return p.parse(file_name)

def parser(cls):
    """Decorator to register a filename parser"""
    parsers.append(cls())

parsers = []

@parser
class Iasi(object):

    def match(self, file_name):
        return re.search('^IASI_xxx_', file_name)

    def parse(self, file_name):
        def f():
            x = file_name.split('_')
            return {'sensor': 'IASI', 'level': x[2], 'platform': map_platform(x[3]),
                    'begin_time': parse_time(x[4]), 'end_time': parse_time(x[5]),
                    'processing_mode': x[6], 'disposition_mode': x[7],
                    'creation_time': parse_time(x[8])}
        def map_platform(p):
            return {'M02': 'METOP-A', 'M01': 'METOP-B'}[p]
        def parse_time(s):
            return datetime.strptime(s, '%Y%m%d%H%M%SZ')
        return f()

@parser
class Avhrr(object):

    def match(self, file_name):
        return re.search(r'^[A-Z]{3}\.[A-Z]{4}\..{2}\.D[0-9]{5}\.S[0-9]{4}\.E[0-9]{4}\.',
                         file_name)

    def parse(self, file_name):
        def f():
            x = file_name.split('.')
            begin_time, end_time = parse_times(x[3], x[4], x[5])
            return {'sensor': 'AVHRR', 'processing_center': x[0], 'data_type': x[1],
                    'platform': map_platform(x[2]), 'begin_time': begin_time,
                    'end_time': end_time, 'processing_block_id': x[6], 'source': x[7]}
        def parse_times(day_str, begin_str, end_str):
            begin_day = datetime.strptime(day_str, 'D%y%j')
            begin_time = begin_day + timedelta(hours=int(begin_str[1:3]),
                                               minutes=int(begin_str[3:5]))
            end_time = begin_day + timedelta(hours=int(end_str[1:3]),
                                             minutes=int(end_str[3:5]))
            if end_time < begin_time:
                end_time += timedelta(days=1)
            return begin_time, end_time
        def map_platform(p):
            return {'N': map_noaa_platform, 'M': map_metop_platform}[p[0]](p)
        def map_noaa_platform(p):
            suffixes_by_letter = {'A': '6', 'B': 'B', 'C': '7', 'D': '12', 'E': '8', 'F': '9',
                                'G': '10', 'H': '11', 'I': '13', 'J': '14', 'K': '15', 'L': '16',
                                'M': '17', 'N': '18', 'P': '19'}
            return 'NOAA ' + suffixes_by_letter[p[1]]
        def map_metop_platform(p):
            return {'M2': 'METOP-A', 'M1': 'METOP-B'}[p]
        return f()

@parser
class Modis(object):

    def match(self, file_name):
        return re.search(r'^M[OY]D.*\.A[0-9]{7}\.[0-9]{4}\.[0-9]{3}\.[0-9]{13}\.', file_name)

    def parse(self, file_name):
        def f():
            x = file_name.split('.')
            return {'sensor': 'MODIS', 'platform': platform(x[0]),
                    'product': product(x[0]), 'begin_time': begin_time(x[1], x[2]),
                    'collection': x[3], 'creation_time': creation_time(x[4])}
        def platform(s):
            return {'MOD': 'TERRA', 'MYD': 'AQUA'}[s[:3]]
        def product(s):
            return s.replace('MYD', 'MOD')
        def begin_time(d, t):
            return datetime.strptime(d + t, 'A%Y%j%H%M')
        def creation_time(s):
            return datetime.strptime(s, '%Y%j%H%M%S')
        return f()

@parser
class Iff(object):

    def match(self, file_name):
        return re.search(self.regex, file_name)

    def parse(self, file_name):
        m = re.search(self.regex, file_name)
        return {'product': m.group('product'),
                'platform': {'aqua': 'AQUA', 'npp': 'SUOMI NPP'}[m.group('platform')],
                'sensor': {'aqua': 'MODIS', 'npp': 'VIIRS'}[m.group('platform')],
                'begin_time': datetime.strptime(m.group('begin_time'), 'd%Y%m%d_t%H%M%S'),
                'creation_time': datetime.strptime(m.group('creation_time'), 'c%Y%m%d%H%M%S'),
                'format': m.group('format')}

    regex = (r'^(?P<product>IFF[A-Z]+)_(?P<platform>[a-z]+)_(?P<begin_time>d[0-9]{8}_t[0-9]{6})_' +
                 r'(?P<creation_time>c[0-9]{14})_ssec_dev\.(?P<format>(hdf|nc))$')

@parser
class Idps(object):

    def match(self, file_name):
        regex = (r'[A-Z0-9]{5}_[a-z0-9]{3}_d[0-9]{8}_t[0-9]{7}_e[0-9]{7}_b[0-9]{5}_' +
                     r'c[0-9]{20}_[a-z]{4}_[a-z]{3}\.')
        return re.search(regex, file_name)

    def parse(self, file_name):
        def f():
            parts = file_name.split('.')[0].split('_')
            begin_time, end_time = times(*parts[2:5])
            return {'sensor': self.product_sensors[parts[0]], 'product': parts[0],
                    'platform': platform(parts[1]), 'begin_time': begin_time,
                    'end_time': end_time, 'orbit_number': orbit_number(parts[5]),
                    'creation_time': creation_time(parts[6]), 'origin': parts[7],
                    'domain': parts[8]}
        def platform(s):
            return {'npp': 'SUOMI NPP'}[s]
        def times(day_str, begin_str, end_str):
            day = datetime.strptime(day_str, 'd%Y%m%d')
            def time(s):
                return timedelta(hours=int(s[1:3]), minutes=int(s[3:5]), seconds=int(s[5:7]),
                                 milliseconds=(100 * int(s[7:8])))
            begin_time = day + time(begin_str)
            end_time = day + time(end_str)
            return begin_time, end_time
        def orbit_number(s):
            return int(s[1:])
        def creation_time(s):
            return datetime.strptime(s, 'c%Y%m%d%H%M%S%f')
        return f()

    sensor_products = {}
    sensor_products['VIIRS'] = """
        AVAFO GAERO GCLDO GDNBO GITCO GMODO GMTCO IICMO IVAOT
        IVCBH IVCLT IVCOP IVCTP IVICC IVPCP IVPTP RNSCA RVIDU RVIRD RVIRS RVIRT
        RVITD SVDNB SVI01 SVI02 SVI03 SVI04 SVI05 SVM01 SVM02 SVM03 SVM04 SVM05
        SVM06 SVM07 SVM08 SVM09 SVM10 SVM11 SVM12 SVM13 SVM14 SVM15 SVM16 VAOOO
        VCBHO VCCLO VCEPO VCOTO VCTHO VCTPO VCTTO VISAO VIVIO VSSTO VSUMO""".split()
    sensor_products['CrIS'] = """
        GCRIO GCRSO IIROO RCRID RCRIH RCRII RCRIM RCRIT RCRIU REDRO SCRIS""".split()
    sensor_products['ATMS'] = """
        GATMO GATRO RATMD RATMM RATMS RATMT RATMW SATMR SATMS""".split()
    sensor_products['OMPS'] = """
        GONPO GOTCO SOMPS SOMTC""".split()
    product_sensors = {product: sensor
                           for sensor, products in sensor_products.items()
                           for product in products}

@parser
class Caliop(object):

    def match(self, file_name):
        return re.search(self.regex, file_name)

    def parse(self, file_name):
        m = re.search(self.regex, file_name)
        d = m.groupdict()
        d['platform'] = 'CALIPSO'
        d['sensor'] = 'CALIOP' if d['sensor'] == 'LID' else d['sensor']
        d['begin_time'] = datetime.strptime(d['begin_time'], '%Y-%m-%dT%H-%M-%SZ')
        return d

    regex = (r'^CAL_(?P<sensor>[A-Z]{3})_(?P<product>.*)-(?P<production_strategy>.*)-' +
                  r'(?P<version>V.*)\.(?P<begin_time>[-0-9T]{19}Z)(?P<day_night>D|N)\.hdf')

@parser
class Cloudsat(object):

    def match(self, file_name):
        return re.search(self.regex, file_name)

    def parse(self, file_name):
        m = re.search(self.regex, file_name)
        rv = m.groupdict()
        rv['platform'] = 'CLOUDSAT'
        rv['sensor'] = 'CLOUDSAT'
        rv['begin_time'] = datetime.strptime(rv['begin_time'], '%Y%j%H%M%S')
        rv['granule_num'] = int(rv['granule_num'])
        rv['epoch_num'] = int(rv['epoch_num'])
        return rv

    regex = (r'^(?P<begin_time>\d{13})_(?P<granule_num>\d{5})_CS_(?P<product>.*)_GRANULE_' +
                 r'(?P<processing_iteration>.*_R.*)_E(?P<epoch_num>\d{2}).hdf$')

@parser
class GeocatSeviri(object):

    def match(self, file_name):
        return re.search(self.regex, file_name)

    def parse(self, file_name):
        m = re.search(self.regex, file_name)
        rv = m.groupdict()
        rv['platform'] = rv['platform'].upper()
        rv['sensor'] = 'SEVIRI'
        return rv

    regex = r'^geocatNAV\.(?P<platform>Meteosat-\d+)\..{7}\.\d{6}\.hdf'

@parser
class Ahipack(object):

    def match(self, file_name):
        return file_name == 'AHIPACK'

    def parse(self, file_name):
        return {'sensor': 'AHI'}
Original line number Original line Diff line number Diff line
@@ -5,11 +5,11 @@ from intercal.modis_srf import ModisSrfCatalog


class Airs2Modis(object):
class Airs2Modis(object):


    def __init__(self, chan_prop_path, srf_path):
    def __init__(self, chan_prop_path, srf_path, shift_id=0):


        self.channel_mask, self.wavenumber_domain = (
        self.channel_mask, self.wavenumber_domain = (
            self.parse_channel_props(chan_prop_path))
            self.parse_channel_props(chan_prop_path))
        self.srf_catalog = ModisSrfCatalog(srf_path)
        self.srf_catalog = ModisSrfCatalog(srf_path, shift_id=shift_id)


    def __call__(self, band, rad):
    def __call__(self, band, rad):


@@ -49,11 +49,11 @@ def read_airs_chan_data(filename):
            break
            break
    offset = i
    offset = i
    size = len(lines) - offset
    size = len(lines) - offset
    mask = np.empty((size,), np.bool)
    mask = np.empty((size,), bool)
    wn = np.empty((size,), np.float64)
    wn = np.empty((size,), np.float64)
    nedts = np.empty((size,), np.float64)
    nedts = np.empty((size,), np.float64)
    for i in range(offset, len(lines)):
    for i in range(offset, len(lines)):
        mask[i - offset] = np.bool(lines[i][76] == '0')
        mask[i - offset] = bool(lines[i][76] == '0')
        wn[i - offset] = np.float64(lines[i][6:14])
        wn[i - offset] = np.float64(lines[i][6:14])
        nedts[i - offset] = np.float64(lines[i][26:32])
        nedts[i - offset] = np.float64(lines[i][26:32])


@@ -61,10 +61,10 @@ def read_airs_chan_data(filename):


class Cris2Modis(object):
class Cris2Modis(object):


    def __init__(self, srf_path):
    def __init__(self, srf_path, shift_id=0, fsr=False):

        self.srf_catalog = ModisSrfCatalog(srf_path)


        self.srf_catalog = ModisSrfCatalog(srf_path, shift_id=shift_id)
        self.fsr = fsr
    def __call__(self, band, rad_lw, rad_mw, rad_sw):
    def __call__(self, band, rad_lw, rad_mw, rad_sw):
        
        
        nu = self.cris_wavenumber_domain()
        nu = self.cris_wavenumber_domain()
@@ -81,6 +81,12 @@ class Cris2Modis(object):


    def cris_wavenumber_domain(self):
    def cris_wavenumber_domain(self):


       if self.fsr:
           return np.concatenate([np.linspace(648.75, 1096.25, 717),
                               np.linspace(1207.5, 1752.5, 869),
                               np.linspace(2150, 2555, 637)],
                              axis=-1)
       else:
            return np.concatenate([np.linspace(648.75, 1096.25, 717),
            return np.concatenate([np.linspace(648.75, 1096.25, 717),
                               np.linspace(1207.5, 1752.5, 437),
                               np.linspace(1207.5, 1752.5, 437),
                               np.linspace(2150, 2555, 163)],
                               np.linspace(2150, 2555, 163)],
+5 −0
Original line number Original line Diff line number Diff line

from intercal.iasi_viirs.iasi_viirs import process_in_flo

main = process_in_flo
+115 −0
Original line number Original line Diff line number Diff line

"""Support for EUMETSAT's Infrared Atmospheric Sounding Interferometer"""

from datetime import datetime, timedelta

import coda
import numpy as np

from intercal.iasi_viirs.sensor import Sensor

class Iasi(Sensor):

    """Class to facilitate access to an IASI L1C file"""

    sensor = 'IASI'
    primary_file_type = 'IASI_L1C'

    wavenumber = np.linspace(645.0, 2760.0, 8461)
    fors_per_scan = 30
    fovs_per_for = 4

    max_granule_duration = timedelta(minutes=3, seconds=5)

    @staticmethod
    def parse_filename(filename):

        """Pull timestamp out of an IASI L1C filename

        Return file type, timestamp tuple as expected by Sensor
        base class.
        """

        # FIXME: use Bruce's filename parsing from ingest. or perhaps
        # better yet factor it out into a util lib
        if not filename.startswith('IASI_xxx_1C'):
            raise ValueError('Bad filename prefix')
        begin_time = datetime.strptime(filename.split('_')[4], '%Y%m%d%H%M%SZ')
        return 'IASI_L1C', begin_time

    def shape_helper(self, filename):

        """Return shape of lat / lon arrays in given file"""

        handle = coda.open(filename)
        scans, = coda.get_size(handle, 'MDR')
        shape = coda.get_size(handle, 'MDR', 0, 'MDR', 'GGeoSondLoc')[:-1]
        coda.close(handle)
        assert tuple(shape) == (self.fors_per_scan, self.fovs_per_for)
        return [scans] + list(shape)

    def load_location_helper(self, files, lat, lon):

        """Read in lat / lon arrays"""

        h = coda.open(files['IASI_L1C'])
        loc = coda.fetch(h, 'MDR', -1, 'MDR', 'GGeoSondLoc')
        coda.close(h)
        lat[...] = [a[:,:,1] for a in loc]
        lon[...] = [a[:,:,0] for a in loc]

    def load_radiance_helper(self, band, files, rad):

        """Read in radiance spectra

        band argument should be None as this class reads in all
        radiances as a single band.
        """

        assert band is None

        handle = coda.open(files['IASI_L1C'])

        # read in channel info and verify it matches up with what
        # we expect
        delta_nu = 1e-2 * coda.fetch(handle, 'MDR', 0, 'MDR', 'IDefSpectDWn1b')
        first_chan = coda.fetch(handle, 'MDR', 0, 'MDR', 'IDefNsfirst1b')
        assert (first_chan - 1) * delta_nu == self.wavenumber[0]
        last_chan = coda.fetch(handle, 'MDR', 0, 'MDR', 'IDefNslast1b')
        assert (last_chan - 1) * delta_nu == self.wavenumber[-1]
        num_chans = last_chan - first_chan + 1
        assert num_chans == len(self.wavenumber)

        # read in scaled integers
        rad_scaled = coda.fetch(handle, 'MDR', -1, 'MDR', 'GS1cSpect')

        # apply scale factors and unit conversion (from SI units to
        # mW / (m^2 sr cm^-1))
        sf = coda.fetch(handle, 'GIADR_ScaleFactors')
        ch_idx_ini = 0
        for sf_idx in range(sf.IDefScaleSondNbScale):
            assert ch_idx_ini == sf.IDefScaleSondNsfirst[sf_idx] - first_chan
            ch_idx_fin = sf.IDefScaleSondNslast[sf_idx] - first_chan + 1
            factor = 10.0 ** -sf.IDefScaleSondScaleFactor[sf_idx]
            factor *= 1e5
            rad[:,:,:,ch_idx_ini:ch_idx_fin] = [
                rad_scaled[i][:,:,ch_idx_ini:ch_idx_fin] * factor
                    for i in range(len(rad_scaled))]
            ch_idx_ini = ch_idx_fin
        assert ch_idx_fin == num_chans

        # replace anything marked as bad in the quality flags with
        # NaN fill values
        qf = coda.fetch(handle, 'MDR', -1, 'MDR', 'GQisFlagQual')
        mask = np.array([a for a in qf], bool)
        rad[mask] = np.nan

        coda.close(handle)

    @property
    def radiance_shape(self):

        """Shape of radiance array"""

        return self.shape + [len(self.wavenumber)]
+187 −0
Original line number Original line Diff line number Diff line

import argparse
from calendar import timegm
from datetime import timedelta
from glob import glob
import os

import h5py
import numpy as np

from intercal import srfio
from intercal.iasi_viirs.iasi import Iasi
from intercal.iasi_viirs.viirs import Viirs, ViirsIBand
from intercal.srf import WavelengthSrf
from intercal.util import gcdist

def read_srf_dir(srf_dir):
    def f():
        if j1_srf_dir(srf_dir):
            return read_j1_srf_dir(srf_dir)
        else:
            return srfio.read_viirs_dir(srf_dir)
    def j1_srf_dir(srf_dir):
        return os.path.exists(os.path.join(srf_dir, 'M15_det'))
    return f()

def read_j1_srf_dir(srf_dir):
    def f():
        return {band: read_j1_srf(srf_file) for band, srf_file in srf_files(srf_dir)}
    def srf_files(srf_dir):
        for band_dir in os.listdir(srf_dir):
            band = band_dir.split('_')[0]
            srf_file, = glob(os.path.join(srf_dir, band_dir, '*_BA_*'))
            yield band, srf_file
    return f()

def read_j1_srf(srf_file):
    def f():
        table = np.array([[float(wl), float(rsr)] for band, wl, rsr in srf_file_data(srf_file)])
        wl = table[:,0] / 1000.  # convert from nm to microns
        resp = table[:,1]
        return WavelengthSrf(wl, resp)
    def srf_file_data(srf_file):
        with open(srf_file) as file_obj:
            for line in file_obj:
                if line[0] != '#':
                    yield line.split()
    return f()

def dump_planck_coeffs(filename, srfs, bands=('M12', 'M13', 'M14', 'M15', 'M16', 'I4', 'I5')):

    lambda_ = np.empty([len(bands)], np.float64)
    a = np.empty([len(bands), 3], np.float64)
    for band_idx, band in enumerate(bands):
        print band
        srf = srfs[band]
        srf.bright(np.array(0.0))
        lambda_[band_idx] = srf.centroid
        a[band_idx,:] = srf.monochrom_coeffs
        print '    Wavelength:', srf.centroid
        print '            a2:', srf.monochrom_coeffs[0]
        print '            a1:', srf.monochrom_coeffs[1]
        print '            a0:', srf.monochrom_coeffs[2]
        print
    with h5py.File(filename, 'w') as f:
        f.create_dataset('Wavelength', data=lambda_)
        f.create_dataset('Coefficients', data=a)

def plot_results(filename, band_idx, T_min=190.0, T_max=320.0):

    import matplotlib.pyplot as plt
    with h5py.File(filename, 'r') as f:
        iasi = f['IASI_Mean_T_b'][:,band_idx]
        viirs = f['VIIRS_Mean_T_b'][:,band_idx]
    plt.scatter(iasi, viirs)
    plt.plot([T_min, T_max], [T_min, T_max])
    plt.xlim(T_min, T_max)
    plt.ylim(T_min, T_max)
    plt.xlabel('IASI')
    plt.ylabel('VIIRS')

def process_in_flo():

    parser = argparse.ArgumentParser()
    parser.add_argument('lat', type=float)
    parser.add_argument('lon', type=float)
    parser.add_argument('iasi_time', type=float)
    parser.add_argument('viirs_time', type=float)
    parser.add_argument('srf_dir')
    parser.add_argument('out_filename')
    args = parser.parse_args()

    m_bands = ['M12', 'M13', 'M14', 'M15', 'M16']
    i_bands = ['I4', 'I5']
    bands = m_bands + i_bands
    
    lat, lon = (
        np.empty([1], np.float64) for i in range(2))
    iasi_t, viirs_t = (
        np.empty([1], np.int64) for i in range(2))
    iasi_n, viirs_n = (
        np.empty([1, len(bands)], np.int32) for i in range(2))
    iasi_mean_T_b, iasi_T_b_std, viirs_mean_T_b, viirs_T_b_std = (
        np.empty([1, len(bands)], np.float64) for i in range(4))

    filenames = os.listdir('.')
    iasi = Iasi.from_files(*filenames)
    viirs = Viirs.from_files(*filenames)
    viirs_i_band = ViirsIBand.from_files(*filenames)

    lat[0], lon[0] = args.lat, args.lon
    iasi_t[0] = args.iasi_time
    viirs_t[0] = args.viirs_time

    srfs = read_srf_dir(args.srf_dir)

    n, mu, sigma = process_sno(iasi, args.lat, args.lon, bands, srfs)
    iasi_n[0,:] = n
    iasi_mean_T_b[0,:] = mu
    iasi_T_b_std[0,:] = sigma

    n, mu, sigma = process_sno(viirs, args.lat, args.lon, m_bands, srfs)
    viirs_n[0,:len(m_bands)] = n
    viirs_mean_T_b[0,:len(m_bands)] = mu
    viirs_T_b_std[0,:len(m_bands)] = sigma

    n, mu, sigma = process_sno(viirs_i_band, args.lat, args.lon, i_bands, srfs)
    viirs_n[0,len(m_bands):] = n
    viirs_mean_T_b[0,len(m_bands):] = mu
    viirs_T_b_std[0,len(m_bands):] = sigma

    with h5py.File(args.out_filename, 'w') as out_file:
        d = lambda n, a: out_file.create_dataset(n, data=a[...,:])
        d('Latitude', lat)
        d('Longitude', lon)
        d('IASI_Time', iasi_t)
        d('IASI_N', iasi_n)
        d('IASI_Mean_T_b', iasi_mean_T_b)
        d('IASI_T_b_Std', iasi_T_b_std)
        d('VIIRS_Time', viirs_t)
        d('VIIRS_N', viirs_n)
        d('VIIRS_Mean_T_b', viirs_mean_T_b)
        d('VIIRS_T_b_Std', viirs_T_b_std)
        d('Band', np.array(bands))

def process_sno(sensor, sno_lat, sno_lon, bands, srfs, d_max=50.0):

    sensor_lat, sensor_lon = sensor.latitude, sensor.longitude
    dist = gcdist(sensor_lat, sensor_lon, sno_lat, sno_lon)
    mask = dist <= d_max

    if isinstance(sensor, Iasi):
        R = sensor.radiance()[mask]

    n, mu, sigma = [], [], []

    for band in bands:

        srf = srfs[band]

        if isinstance(sensor, Iasi):
            srf = srf.to_wn()
            R_band = srf.apply(sensor.wavenumber, R)

        else:
            R_band = sensor.radiance(band)[mask]

        # filter out NaN fill values
        R_band = R_band[np.logical_not(np.isnan(R_band))]

        # collect stats on brightness temperature
        mu_R = R_band.mean()
        sigma_R = R_band.std()
        n.append(len(R_band))
        mu.append(srf.bright(mu_R))
        sigma.append(srf.bright(mu_R + sigma_R) - mu[-1])

    return n, mu, sigma

def sno_str(sno):

    return ' / '.join(t.strftime('%Y-%m-%d %H:%M') for t in [sno.t_1, sno.t_2])

def unix_time(t):

    return timegm(t.timetuple())
+299 −0
Original line number Original line Diff line number Diff line

from glob import glob
import os

import numpy as np
# import sqlalchemy as sa

# from peate.ingest.fs import ingest_path
# import peate.ingest.model as ingest_model

def ingest_subdir(file_type, t):

    """Generate ingest-style subdirectories

    For use with Sensor.from_file_search.
    """

    return '%s/%s' % (t.strftime('%Y/%j'), file_type)

def product_subdir(dataset_ids):

    """Return function to generate flo-style subdirectories

    For use with Sensor.from_file_search. dataset_ids provides a
    mapping from file type to flo dataset ID.
    """

    def subdir(file_type, t):
        return '%s/%s' % (dataset_ids[file_type], t.strftime('%Y/%j'))
    return subdir

class Sensor(object):

    """Sensor base class

    Supports loading polar-orbiting sensor data either by time range
    (by fetching data from the ingest system) or via a collection of
    files. Supports concatenation of multiple granules. We'll call
    such a concatenated set of granules a super-granule.
    """

    @classmethod
    def from_file_search(cls, root_dir, begin_time, end_time=None,
                         subdir=ingest_subdir):

        """Initialize by looking for data files in the file system

        subdir is a callable that takes a file type and datetime and
        returns a path relative to root_dir that stores files for that
        file type and time.
        """

        if end_time is None:
            end_time = begin_time

        # we want to ensure that we get all granules with any data
        # from the provided time range, so adjust the given begin time
        # by the maximum granule size we ever expect to see
        begin_time -= cls.max_granule_duration

        # get a set of the subdirectories that we'll search through
        # (assumes the given time range crosses over a subdirectory
        # boundary at most once)
        subdirs = set()
        for t in [begin_time, end_time]:
            dir = os.path.join(root_dir, subdir(cls.primary_file_type, t))
            if os.path.exists(dir):
                subdirs.add(dir)

        # get sorted list of all full pathnames of our primary type
        files = sorted(os.path.join(d, f)
                           for d in subdirs for f in os.listdir(d))

        # build a file map to hand off to __init__
        file_map = {}
        for pathname in files:

            try:
                file_type, t = cls.parse_filename(os.path.basename(pathname))
            except ValueError:
                continue
            assert file_type == cls.primary_file_type

            # if we haven't reached the beginning of our range yet,
            # keep looking; if we're past the end, we're done
            if t < begin_time:
                continue
            if t > end_time:
                break

            # we have a file in our range of interest; look for
            # files of our other file types with the same timestamp
            file_map[t] = {cls.primary_file_type: pathname}
            for file_type in cls.other_file_types:
                dir = os.path.join(root_dir, subdir(file_type, t))
                g = glob('%s/%s' % (dir, cls.glob(file_type, t)))
                if g:
                    file_map[t][file_type], = g

        if not file_map:
            raise ValueError('no data')
        return cls(file_map)

    @classmethod
    def from_times(cls, begin_time, end_time=None):

        """Initialize using granules with data from [begin_time, end_time]
        
        If only begin_time is given than use just the granule that contains
        that point in time.
        """

        if end_time is None:
            end_time = begin_time

        m = ingest_model

        # FIXME: need a central module for ingest DB stuff
        m.init(os.environ['INGEST_DB_URL'])

        # query for files of the needed types within our time range,
        # using a window function so we can easily pull the ones with
        # the most recent processing times
        rank_expr = (sa.func.row_number()
                         .over(partition_by=[m.File.begin_time,
                                             m.FileType.name],
                               order_by=m.File.processed_time.desc()))
        q = m.meta.Session.query(rank_expr, m.File.begin_time,
                                 m.FileType.name, m.File.name)
        q = q.filter(m.File.file_type_id == m.FileType.id)
        q = q.filter(m.FileType.name.in_(cls.all_file_types()))
        q = q.filter(m.File.begin_time <= end_time)
        q = q.filter(m.File.end_time >= begin_time)
        q = q.filter(m.File.state_id == 100)
        file_query = q

        # build the file map structure expected by __init__
        file_map = {}
        for rank, begin_time, file_type, filename in file_query:
            if rank != 1:
                continue
            pathname = ingest_path(filename, type_name=file_type,
                                   platform=cls.platform.lower(),
                                   sensor=cls.sensor.lower(),
                                   begin_time=begin_time)
            file_map.setdefault(begin_time, {})[file_type] = pathname

        # filter out any times for which we don't have the primary
        # file type
        file_map = {k: v for k, v in file_map.items()
                        if cls.primary_file_type in v}

        if not file_map:
            raise ValueError('no data')
        return cls(file_map)

    @classmethod
    def from_files(cls, *filenames):

        # use the parse_filename method defined by subclasses to
        # build the file map structure expected by __init__
        # TODO: check for existence / readability of these files
        # TODO: verify all timestamps have the primary file type
        file_map = {}
        for filename in filenames:
            try:
                file_type, begin_time = cls.parse_filename(os.path.basename(filename))
            except ValueError:
                continue
            file_map.setdefault(begin_time, {})[file_type] = filename
        if not file_map:
            raise Exception('coverage problem: no data')

        return cls(file_map)

    def __init__(self, file_map):

        """Constructor; meant as a private interface

        The file_map argument is a mapping onto full pathnames
        indexed first by timestamp then by file type (using ingest
        database file type names).
        """

        # sorted list of timestamps
        self.times = [t for t in sorted(file_map)]

        # corresponding list of mappings indexed by file type
        self.files = [file_map.get(t, None) for t in self.times]

        # self.scans: list of number of scan lines in each granule
        # self.shape: shape of lat / lon arrays for our super-granule
        # note use of subclass method shape_helper
        self.scans = []
        last_shape = None
        for i in range(self.num_granules):
            shape = self.shape_helper(self.files[i][self.primary_file_type])
            assert last_shape is None or (shape[1:] == last_shape[1:])
            self.scans.append(shape[0])
        self.shape = [sum(self.scans)] + list(shape[1:])
            
        # radiance arrays indexed by band
        self.rad = {}

    @property
    def latitude(self):

        """Latitude array of our super-granule"""

        self.load_location()
        return self.lat

    @property
    def longitude(self):

        """Longitude array of our super-granule"""

        self.load_location()
        return self.lon

    def radiance(self, band=None):

        """Return super-granule radiance array for the given band

        The band argument is left out for sensors for which band is
        not a useful concept.
        """

        self.load_radiance(band)
        return self.rad[band]

    def load_location(self):

        """Load latitude and longitude if not done already"""

        if hasattr(self, 'lat'):
            return

        self.lat = np.empty(self.shape, np.float64)
        self.lon = np.empty(self.shape, np.float64)

        # use the subclass load_location_helper method to fill in
        # the arrays
        scan_idx_ini = 0
        for i in range(self.num_granules):
            scan_idx_fin = scan_idx_ini + self.scans[i]
            self.load_location_helper(self.files[i],
                                      self.lat[scan_idx_ini:scan_idx_fin],
                                      self.lon[scan_idx_ini:scan_idx_fin])
            scan_idx_ini = scan_idx_fin

    def load_radiance(self, band):

        """Load radiances for the given band if not done already"""

        if band in self.rad:
            return

        self.rad[band] = np.empty(self.radiance_shape, np.float64)

        # fill in the array using the subclass load_radiance_helper
        # method. any files that are missing will result in NaNs
        # being used as fill
        scan_idx_ini = 0
        for i in range(self.num_granules):
            scan_idx_fin = scan_idx_ini + self.scans[i]
            rad = self.rad[band][scan_idx_ini:scan_idx_fin]
            if self.files[i]:
                self.load_radiance_helper(band, self.files[i], rad)
            else:
                rad[...] = np.nan
            scan_idx_ini = scan_idx_fin

    @property
    def num_granules(self):

        """Number of granules in our super-granule"""

        return len(self.times)

    @property
    def radiance_shape(self):

        """Shape of radiance arrays; by default, same as lat / lon"""

        return self.shape

    @classmethod
    def all_file_types(cls):

        """Convenience method to get all file types"""

        return [cls.primary_file_type] + cls.other_file_types

    # subclasses must define primary_file_type but may leave out
    # other_file_types if there are none. these names correspond
    # to the file_types.name column in the ingest DB
    other_file_types = []
+148 −0
Original line number Original line Diff line number Diff line

"""Support for the Visible Infrared Imaging Radiometer Suite"""

from datetime import datetime, timedelta
import os

import h5py
import numpy as np
from pkg_resources import resource_listdir, resource_stream

from intercal.iasi_viirs.sensor import Sensor
from intercal.srf import WavelengthSrf

class Viirs(Sensor):

    """Class to facilitate access to VIIRS data

    NOTE: limited to IR M-bands thus far
    """

    platform = 'NPP'
    sensor = 'VIIRS'

    primary_file_type = 'GMTCO'
    other_file_types = ['SVM%02d' % i for i in range(1, 16 + 1)]

    scans_per_granule = 768
    fovs_per_scan = 3200

    @staticmethod
    def parse_filename(filename):

        """Pull file type and timestamp out of a VIIRS filename"""

        # FIXME: use Bruce's filename parsing from ingest. or perhaps
        # better yet factor it out into a util lib
        file_type = filename[:5]
        begin_time = (datetime.strptime(filename[10:27], 'd%Y%m%d_t%H%M%S') +
                          timedelta(seconds=(0.1 * float(filename[27]))))
        return file_type, begin_time

    def shape_helper(self, filename):

        """Return shape of lat / lon arrays in given file"""

        with h5py.File(filename, 'r') as handle:
            shape = handle['All_Data/VIIRS-MOD-GEO-TC_All/Latitude'].shape
        assert tuple(shape) == (self.scans_per_granule, self.fovs_per_scan)
        return shape

    def load_location_helper(self, files, lat, lon):

        """Read in lat / lon arrays"""

        with h5py.File(files['GMTCO'], 'r') as handle:
            g = handle['All_Data/VIIRS-MOD-GEO-TC_All']
            lat[:,:] = g['Latitude']
            lon[:,:] = g['Longitude']

    def load_radiance_helper(self, band, files, rad):

        """Read in radiance data for the given band"""

        # file type and HDF dataset name will depend on the
        # band type and number
        band_type, band_num = band[0], int(band[1:])

        filename = files['SV%s%02d' % (band_type, band_num)]

        # read the data, applying scale factors if they're given
        with h5py.File(filename, 'r') as handle:
            g = handle['All_Data/VIIRS-%s-SDR_All' % band]
            if 'RadianceFactors' in g:
                a1, a0 = g['RadianceFactors']
                rad[:,:] = a1 * g['Radiance'][:,:] + a0
                mask = rad > a1 * 65000 + a0
                rad[mask] = np.nan
            else:
                rad[:,:] = g['Radiance']
                mask = rad <= -999.0
                rad[mask] = np.nan

class ViirsIBand(Sensor):

    """Class to facilitate access to VIIRS I-band data"""

    platform = 'NPP'
    sensor = 'VIIRS'

    primary_file_type = 'GITCO'
    other_file_types = ['SVI%02d' % i for i in range(1, 5 + 1)]

    scans_per_granule = 1536
    fovs_per_scan = 6400

    @staticmethod
    def parse_filename(filename):

        """Pull file type and timestamp out of a VIIRS filename"""

        # FIXME: use Bruce's filename parsing from ingest. or perhaps
        # better yet factor it out into a util lib
        file_type = filename[:5]
        begin_time = (datetime.strptime(filename[10:27], 'd%Y%m%d_t%H%M%S') +
                          timedelta(seconds=(0.1 * float(filename[27]))))
        return file_type, begin_time

    def shape_helper(self, filename):

        """Return shape of lat / lon arrays in given file"""

        with h5py.File(filename, 'r') as handle:
            shape = handle['All_Data/VIIRS-IMG-GEO-TC_All/Latitude'].shape
        assert tuple(shape) == (self.scans_per_granule, self.fovs_per_scan)
        return shape

    def load_location_helper(self, files, lat, lon):

        """Read in lat / lon arrays"""

        with h5py.File(files['GITCO'], 'r') as handle:
            g = handle['All_Data/VIIRS-IMG-GEO-TC_All']
            lat[:,:] = g['Latitude']
            lon[:,:] = g['Longitude']

    def load_radiance_helper(self, band, files, rad):

        """Read in radiance data for the given band"""

        # file type and HDF dataset name will depend on the
        # band type and number
        band_type, band_num = band[0], int(band[1:])

        filename = files['SV%s%02d' % (band_type, band_num)]

        # read the data, applying scale factors if they're given
        with h5py.File(filename, 'r') as handle:
            g = handle['All_Data/VIIRS-%s-SDR_All' % band]
            if 'RadianceFactors' in g:
                a1, a0 = g['RadianceFactors']
                rad[:,:] = a1 * g['Radiance'][:,:] + a0
                mask = rad > a1 * 65000 + a0
                rad[mask] = np.nan
            else:
                rad[:,:] = g['Radiance']
                mask = rad <= -999.0
                rad[mask] = np.nan
+113 −0
Original line number Original line Diff line number Diff line

import numpy as np
from pyhdf import SD
from intercal.srf import WavelengthSrf
from intercal.util import wn2wl

# Spectral shifts from Eva Borbas 2017/06/15 & 2023/02/15 & 2025/09/17
"""
    shift_id:
    0: no shifts
    1: Aqua C5/6
    2: Terra C5/6
    3: Aqua C6.1
    4: Terra C6.1
    5: Aqua C7
    6: Terra C7
"""
shifts = [
    {},
    {
        # Just for Aqua C5/6
        27: 5.0,
        28: 2.0,
        30: 0.0,
        34: 0.8,
        35: 0.8,
        36: 1.0
    },
    {
        # Just for Terra C5/6
        27: 4.0,
        28: 2.0,
        30: 1.0,
        34: 0.8,
        35: 0.8,
        36: 1.0
    },
    {
        # Just for Aqua C6.1
        27: 3.0,
        28: 1.0,
        30: -1.0,
        34: 0.8,
        35: 0.8,
        36: 1.0
    },
    {
        # Just for Terra C6.1
        25: 2.0,
        27: 0.0,
        28: 2.0,
        30: 1.0,
        34: 0.8,
        35: 0.8,
        36: 1.0
    },
        {
        # Just for Aqua C7
        27: 2.0,
        28: 1.0,
        29: 1.0,
        30: -2.0,
        34: 0.8,
        35: 0.8,
        36: 1.0
    },
    {
        # Just for Terra C7
        25: 2.0,
        27: -1.0,
        28: 3.0,
        29: 1.0,
        30: -1.0,
        34: 0.8,
        35: 0.8,
        36: 1.0
    }
]

class ModisSrfCatalog(object):

    def __init__(self, srf_path, shift_id=0):

        self.shift_id = shift_id
        self.srfs = self.read_srfs(srf_path)

    def get(self, band):

        return self.srfs[band]

    def read_srfs(self, srf_path):

        sd = SD.SD(srf_path)

        # work around a weird issue with modis_terra.srf.nc. indexing the
        # SDS with [:] returns [20 0 0 ...], but doing it this way works
        # fine. no idea why
        band = list(sd.select('channel_list'))
        nu_begin = sd.select('begin_frequency')[:]
        nu_end = sd.select('end_frequency')[:]

        srfs = {}
        shift = 0.0
        for band, nu_begin, nu_end in zip(band, nu_begin, nu_end):
            if band in shifts[self.shift_id]:
                shift = shifts[self.shift_id][band]
            F = sd.select('channel_%s_response' % band)[:][::-1]
            lambda_ = wn2wl(np.linspace(nu_begin + shift, nu_end + shift, len(F)))[::-1]
            srfs[band] = WavelengthSrf(lambda_, F)
        sd.end()

        return srfs
Original line number Original line Diff line number Diff line
@@ -33,7 +33,7 @@ def read_viirs_dir(dir_path):
        srfs = {band(f): read_viirs_file(os.path.join(dir_path, f)) for f in rsr_files}
        srfs = {band(f): read_viirs_file(os.path.join(dir_path, f)) for f in rsr_files}
        return srfs
        return srfs
    def is_rsr_file(file_name):
    def is_rsr_file(file_name):
        return re.search('^NPP_VIIRS_NG_RSR_.*_BA\.dat$', file_name)
        return re.search('^NPP_VIIRS_NG_RSR_', file_name)
    def band(file_name):
    def band(file_name):
        return file_name.split('_')[4]
        return file_name.split('_')[4]
    return f()
    return f()
+82 −0
Original line number Original line Diff line number Diff line

from datetime import datetime
from unittest import TestCase
from collopy.fn import parse

class TestParsers(TestCase):

    def test_iasi(self):
        file_name = ('IASI_xxx_1C_M02_20111104233257Z_20111104233552Z_N_O_' + 
                        '20111105001850Z__20111105002056')
        info = {'sensor': 'IASI', 'platform': 'METOP-A', 'level': '1C',
                'begin_time': datetime(2011, 11, 4, 23, 32, 57),
                'end_time': datetime(2011, 11, 4, 23, 35, 52),
                'processing_mode': 'N', 'disposition_mode': 'O',
                'creation_time': datetime(2011, 11, 5, 0, 18, 50)}
        self.assertEqual(parse(file_name), info)

    def test_avhrr(self):
        file_name = 'NSS.GHRR.NK.D09019.S1938.E2119.B1169495.SV.level2.hdf'
        info = {'sensor': 'AVHRR', 'processing_center': 'NSS', 'platform': 'NOAA 15',
                'data_type': 'GHRR',
                'begin_time': datetime(2009, 1, 19, 19, 38),
                'end_time': datetime(2009, 1, 19, 21, 19),
                'processing_block_id': 'B1169495', 'source': 'SV'}
        self.assertEqual(parse(file_name), info)

    def test_avhrr_from_metop(self):
        file_name = 'NSS.GHRR.M2.D09019.S1938.E2119.B1169495.SV.level2.hdf'
        self.assertEqual(parse(file_name)['platform'], 'METOP-A')

    def test_modis(self):
        file_name = 'MYD03.A2015011.1000.006.2015012154939.hdf'
        info = {'sensor': 'MODIS', 'platform': 'AQUA', 'product': 'MOD03',
                'begin_time': datetime(2015, 1, 11, 10), 'collection': '006',
                'creation_time': datetime(2015, 1, 12, 15, 49, 39)}
        self.assertEqual(parse(file_name), info)
        
    def test_idps(self):
        file_name = ('GMTCO_npp_d20150111_t1003524_e1005165_b16614_' + 
                         'c20150111163319313784_noaa_ops.h5')
        info = {'sensor': 'VIIRS', 'product': 'GMTCO', 'platform': 'SUOMI NPP',
                'begin_time': datetime(2015, 1, 11, 10, 3, 52, 400000),
                'end_time': datetime(2015, 1, 11, 10, 5, 16, 500000),
                'orbit_number': 16614,
                'creation_time': datetime(2015, 1, 11, 16, 33, 19, 313784),
                'origin': 'noaa', 'domain': 'ops'}
        self.assertEqual(parse(file_name), info)

    def test_iff(self):
        file_name = 'IFFSDR_aqua_d20150111_t233000_c20150325133431_ssec_dev.hdf'
        info = {'product': 'IFFSDR', 'sensor': 'MODIS', 'platform': 'AQUA',
                'begin_time': datetime(2015, 1, 11, 23, 30),
                'creation_time': datetime(2015, 3, 25, 13, 34, 31),
                'format': 'hdf'}
        self.assertEqual(parse(file_name), info)

    def test_caliop(self):
        file_name = 'CAL_LID_L2_05kmAPro-Prov-V3-30.2015-09-03T18-43-43ZD.hdf'
        info = {'platform': 'CALIPSO', 'sensor': 'CALIOP', 'product': 'L2_05kmAPro',
                'production_strategy': 'Prov', 'version': 'V3-30',
                'begin_time': datetime(2015, 9, 3, 18, 43, 43), 'day_night': 'D'}
        self.assertEqual(parse(file_name), info)

    def test_cloudsat(self):
        file_name = '2013001104718_35539_CS_1B-CPR_GRANULE_P_R04_E06.hdf'
        info = {'platform': 'CLOUDSAT', 'sensor': 'CLOUDSAT', 'product': '1B-CPR',
                'begin_time': datetime(2013, 1, 1, 10, 47, 18), 'granule_num': 35539,
                'processing_iteration': 'P_R04', 'epoch_num': 6}
        self.assertEqual(parse(file_name), info)

    def test_geocat_seviri(self):
        file_name = 'geocatNAV.Meteosat-10.xxxxxxx.000001.hdf'
        info = {'platform': 'METEOSAT-10', 'sensor': 'SEVIRI'}
        self.assertEqual(parse(file_name), info)

    def test_ahipack(self):
        self.assertEqual(parse('AHIPACK'), {'sensor': 'AHI'})

    def test_handles_paths_by_taking_basename(self):
        file_name = '/path/to/MYD03.A2015011.1000.006.2015012154939.hdf'
        self.assertEqual(parse(file_name)['sensor'], 'MODIS')
+4 −4
Original line number Original line Diff line number Diff line

from setuptools import setup, find_packages
from setuptools import setup, find_packages


setup(name='intercal',
setup(name='intercal',
      version='0.1.1',
      version='0.1.8_dev',
      packages=find_packages(),
      packages=find_packages(),
      package_data={'intercal': ['*.nc']},
      package_data={'intercal': ['*.nc']},
      install_requires=['numpy', 'netcdf4', 'python-hdf4'],
      install_requires=['numpy', 'netcdf4', 'pyhdf', 'h5py'],
      zip_safe=False)
      zip_safe=False,
      entry_points={'console_scripts': ['iasi_viirs=intercal.iasi_viirs:main']})