Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • develop
  • master
  • 0.1.5
  • v1-grouper
  • v1-historical
  • v1-i-band
  • v1-i5
  • v1-m-band
  • v1-master
  • v2-iasi-viirs
  • v2-master
  • v2-modisfuse
  • v3-cris_iasi_airs
  • v3-iasi_airs
  • v3-iasi_cris
  • v3-master
16 results

Target

Select target project
  • collocation/intercal
  • fshahriar/intercal
2 results
Select Git revision
  • develop
  • master
  • modissrf
  • v1-grouper
  • v1-historical
  • v1-i-band
  • v1-i5
  • v1-m-band
  • v1-master
  • v2-iasi-viirs
  • v2-master
  • v2-modisfuse
  • v3-cris_iasi_airs
  • v3-iasi_airs
  • v3-iasi_cris
  • v3-master
16 results
Show changes
Commits on Source (24)
Showing
with 1026 additions and 32 deletions
......@@ -22,11 +22,11 @@ def read_channel_properties(chan_prop_file):
break
offset = i
size = len(lines) - offset
mask = np.empty((size,), np.bool)
mask = np.empty((size,), bool)
wn = np.empty((size,), np.float64)
nedts = np.empty((size,), np.float64)
for i in range(offset, len(lines)):
mask[i - offset] = np.bool(lines[i][76] == '0')
mask[i - offset] = bool(lines[i][76] == '0')
wn[i - offset] = np.float64(lines[i][6:14])
nedts[i - offset] = np.float64(lines[i][26:32])
return wn, mask, nedts
......
......@@ -4,7 +4,14 @@ import numpy as np
def low_res_wavenumbers():
return np.concatenate([wn_low_res[b] for b in ['lw', 'mw', 'sw']])
def full_res_wavenumbers():
return np.concatenate([wn_full_res[b] for b in ['lw', 'mw', 'sw']])
wn_low_res = {'lw': np.linspace(648.75, 1096.25, 717),
'mw': np.linspace(1207.5, 1752.5, 437),
'sw': np.linspace(2150, 2555, 163)}
wn_full_res = {'lw': np.linspace(648.75, 1096.25, 717),
'mw': np.linspace(1207.5, 1752.5, 869),
'sw': np.linspace(2150, 2555, 637)}
#!/bin/bash
set -e
# run this from the top level of a crisahi flo link tree
for granule in */*/*/crisahi.*; do
echo -n "$granule "
cp $granule ./
granule=$(basename $granule)
tmp=tmp.$granule
python -m intercal.crisahi.same_path $granule $tmp
if [[ -e $tmp ]]; then
ncks -O --mk_rec_dmn obs $tmp $tmp
ncpdq -O -a obs,band $tmp $tmp
fi
rm $granule
done
ncrcat tmp.crisahi.* crisahi.nc
rm tmp.crisahi.*
ncpdq -O -a band,obs crisahi.nc crisahi.nc
......@@ -7,18 +7,17 @@ import h5py
import netCDF4
import numpy as np
from pkg_resources import resource_filename
import pyhdf.SD
from intercal import ahi_srf
from intercal.crisahi import ahi_srf
bands = [9, 10, 12, 13, 14, 15, 16]
def main():
colloc_file, gcrso_file = sys.argv[1:3]
ahi_files = [pair for pair in grouper(sys.argv[3:], 2)]
colloc_file, gcrso_file, ahi_geo_file = sys.argv[1:4]
ahi_files = [pair for pair in grouper(sys.argv[4:], 2)]
ahi_idx = arrange_follower_indexes_by_master(read_collocation(colloc_file),
master_dims=[60, 30, 9])
gcrso = read_gcrso(gcrso_file)
sensor_zenith, sensor_azimuth = determine_sensor_angles(ahi_idx)
sensor_zenith, sensor_azimuth = determine_sensor_angles(ahi_geo_file, ahi_idx)
output = Output(gcrso_file, sensor_zenith, sensor_azimuth)
for i, band in enumerate(bands):
ahi_1, ahi_2 = read_ahi(ahi_files[i][0]), read_ahi(ahi_files[i][1])
......@@ -30,17 +29,14 @@ def main():
output(band, t, rad, rad_std, bt, bt_std)
def read_collocation(file_path):
sd = pyhdf.SD.SD(file_path)
num_master_dims = 3
num_follower_dims = 2
num_master_idxs, max_followers_per_master = sd.datasets()['Follower_Index_1'][1]
master_idxs = np.empty([num_master_dims, num_master_idxs], np.int32)
follower_idxs = np.empty([num_follower_dims, num_master_idxs, max_followers_per_master],
np.int32)
for i in range(num_master_dims):
master_idxs[i] = sd.select('Master_Index_{}'.format(num_master_dims - i))[:] - 1
for i in range(num_follower_dims):
follower_idxs[i] = sd.select('Follower_Index_{}'.format(num_follower_dims - i))[:,:] - 1
d = netCDF4.Dataset(file_path)
num_master_dims = len(d.dimensions['master_dim'])
num_follower_dims = len(d.dimensions['follower_dim'])
num_master_idxs = len(d.dimensions['master_obs'])
max_followers_per_master = len(d.dimensions['follower_obs'])
master_idxs = np.ma.masked_array(d['master_index'][:]).filled(0).transpose([1, 0]) - 1
follower_idxs = np.ma.masked_array(d['follower_index'][:]).filled(0).transpose([2, 0, 1]) - 1
d.close()
return {'num_master_dims': num_master_dims, 'num_follower_dims': num_follower_dims,
'num_master_idxs': num_master_idxs,
'max_followers_per_master': max_followers_per_master,
......@@ -60,8 +56,8 @@ def read_ahi(file_path):
return {'time': line_time_offset.base_time + line_time_offset[:],
'radiance': d.variables['RAD'][:]}
def determine_sensor_angles(ahi_idx):
d = netCDF4.Dataset(resource_filename(__name__, 'AHI_geolocation2km.nc'))
def determine_sensor_angles(ahi_geo_file, ahi_idx):
d = netCDF4.Dataset(ahi_geo_file)
sz = d.variables['sensor_zenith'][:]
sa = d.variables['sensor_azimuth'][:]
ahi_nearest_idx = tuple(ahi_idx[:,:,:,:,0])
......
......@@ -34,13 +34,13 @@ def cat(dir_path):
basemap = Basemap(projection='geos', lon_0=140.7, resolution='c')
def make_mask(arrays):
mask = np.ones(arrays['cris_lat'].shape, np.bool)
mask = np.ones(arrays['cris_lat'].shape, bool)
mask &= (arrays['ahi_sen_zen'] != -999.)
mask &= (arrays['ahi_sen_zen'] <= 60.)
return mask
def make_band_mask(arrays, band_idx):
mask = np.ones(arrays['cris_lat'].shape, np.bool)
mask = np.ones(arrays['cris_lat'].shape, bool)
mask &= (arrays['ahi_sen_zen'] != -999.)
mask &= (arrays['ahi_sen_zen'] <= 60.)
mask &= (arrays['cris_bt'][band_idx] >= 160.)
......
......@@ -5,7 +5,7 @@ import sys
import h5py
import netCDF4
import numpy as np
from intercal import ahi_srf
from intercal.crisahi import ahi_srf
from intercal.util import wn2wl, rad_wn2wl
ahi_bands = [9, 10, 12, 13, 14, 15, 16]
......
"""Filter a crisahi granule to only contain data where sensor vectors align"""
import sys
import netCDF4
import numpy as np
from intercal.crisahi.cat import path_diff as calc_path_diff
path_diff_threshold = 5. # degrees
def main():
in_file, out_file = sys.argv[1:]
data = read_granule(in_file)
path_diff = calc_path_diff(data['cris_lat'], data['cris_lon'], data['cris_sen_zen'],
data['cris_sen_azm'], data['ahi_sen_zen'], data['ahi_sen_azm'])
mask = (path_diff <= path_diff_threshold)
if np.any(mask):
new_data = {var: data[var][...,mask] for var in data}
write_granule(new_data, out_file)
print 'Kept {} CrIS-AHI matchups'.format(mask.sum())
def read_granule(in_file):
d = netCDF4.Dataset(in_file)
data = {var: d[var][...] for var in d.variables}
data['cris_time'] = np.broadcast_arrays(data['cris_time'][...,None], data['cris_lat'])[0]
return data
def write_granule(data, out_file):
d = netCDF4.Dataset(out_file, 'w')
num_bands, num_obs = data['cris_bt'].shape
d.createDimension('band', num_bands)
d.createDimension('obs', num_obs)
dims = {1: ['obs'], 2: ['band', 'obs']}
for var, arr in data.items():
d.createVariable(var, arr.dtype, dims[arr.ndim], zlib=True)
d[var][...] = arr
d.close()
if __name__ == '__main__':
main()
netcdf sndrahi {{
dimensions:
band = {band};
master_obs = {master_obs};
follower_obs = {follower_obs};
master_dim = {master_dim};
follower_dim = {follower_dim};
input = {input};
filename_checksum = 2;
variables:
string band_name(band);
band_name:long_name = "AHI Band";
string input_file(input, filename_checksum);
input_file:info = "Filename and MD5 checksum of each input";
group: master {{
variables:
double time(master_obs);
time:long_name = "Master observation time";
time:units = "seconds since 1993-01-01 00:00";
time:info = "TAI93 format; leap seconds must be accounted for to convert to UTC";
time:standard_name = "time";
double bt(band, master_obs);
bt:long_name = "Master brightness temperature";
bt:units = "K";
bt:standard_name = "brightness_temperature";
float lat(master_obs);
lat:long_name = "Master latitude";
lat:units = "degrees_north";
float lat:valid_range = -90, 90;
lat:standard_name = "latitude";
float lon(master_obs);
lon:long_name = "Master latitude";
lon:units = "degrees_east";
float lon:valid_range = -180, 180;
lon:standard_name = "longitude";
float sat_zen(master_obs);
sat_zen:long_name = "Master satellite zenith angle";
sat_zen:units = "degree";
float sat_zen:valid_range = 0, 90;
sat_zen:standard_name = "sensor_zenith_angle";
float sat_azi(master_obs);
sat_azi:long_name = "Master satellite azimuth angle";
sat_azi:units = "degree";
sat_azi:info = "Positive clockwise, zero at north";
float sat_azi:valid_range = -180, 180;
sat_azi:standard_name = "sensor_azimuth_angle";
float sol_zen(master_obs);
sol_zen:long_name = "Master solar zenith angle";
sol_zen:units = "degree";
float sol_zen:valid_range = 0, 180;
sol_zen:standard_name = "solar_zenith_angle";
float sol_azi(master_obs);
sol_azi:long_name = "Master solar azimuth angle";
sol_azi:units = "degree";
float sol_azi:valid_range = -180, 180;
sol_azi:standard_name = "solar_azimuth_angle";
int obs_idx(master_obs, master_dim);
obs_idx:long_name = "Master data array indexes"; }}
group: follower {{
variables:
double time(master_obs);
time:long_name = "Follower mean observation time";
time:units = "seconds since 1993-01-01 00:00";
time:info = "TAI93 format; leap seconds must be accounted for to convert to UTC";
time:standard_name = "time";
double bt(band, master_obs);
bt:long_name = "Follower brightness temperature";
bt:info = "Computed from mean radiance of collocated follower observations";
bt:units = "K";
bt:standard_name = "brightness_temperature";
double bt_dev(band, master_obs);
bt_dev:long_name = "Follower brightness temperature deviation";
bt_dev:info = "Computed from radiance standard deviation of collocated follower observations";
bt_dev:units = "K";
float sat_zen(master_obs);
sat_zen:long_name = "Follower satellite zenith angle";
sat_zen:units = "degree";
float sat_zen:valid_range = 0, 90;
sat_zen:standard_name = "sensor_zenith_angle";
float sat_azi(master_obs);
sat_azi:long_name = "Follower satellite azimuth angle";
sat_azi:units = "degree";
sat_azi:info = "Positive clockwise, zero at north";
float sat_azi:valid_range = -180, 180;
sat_azi:standard_name = "sensor_azimuth_angle";
int obs_count(master_obs);
obs_count:long_name = "Number of collocated follower observations";
int obs_idx(master_obs, follower_obs, follower_dim);
obs_idx:long_name = "Follower data array indexes"; }} }}
"""Create a level 1 match file for hyperspectral sounder data versus AHI"""
from argparse import ArgumentParser
from intercal.fn import parse as parse_filename
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('colloc_file')
parser.add_argument('l1a_files', nargs='+', help='Sounder and AHI level 1 data files')
args = parser.parse_args()
if __name__ == '__main__':
main()
import numpy as np
import pyhdf.HDF
import pyhdf.SD
import pyhdf.VS
class AirsGranule(object):
num_fors_per_scan = 90
num_fovs_per_for = 1
has_imaginary_radiance = False
num_files = 1
def __init__(self, filename):
self.sd = pyhdf.SD.SD(filename)
self.wavenumber = self.read_wavenumber(filename)
def read_wavenumber(self, filename):
num_channels = 2378
nested_list = pyhdf.HDF.HDF(filename).vstart().attach('nominal_freq').read(num_channels)
return np.array(nested_list).flatten()
@property
def num_scans(self):
return self.sd.datasets()['radiances'][1][0]
def read_latitude(self, latitude):
latitude[:,:,0] = self.sd.select('Latitude')[:]
def read_longitude(self, longitude):
longitude[:,:,0] = self.sd.select('Longitude')[:]
def read_radiance(self, radiance, imaginary):
if imaginary:
raise ValueError('no imaginary radiances for AIRS')
radiance[:,:,0,:] = self.sd.select('radiances')[:]
import h5py
import numpy as np
class CrisGranule(object):
num_fors_per_scan = 30
num_fovs_per_for = 9
wavenumber = np.concatenate([np.linspace(648.75, 1096.25, 717),
np.linspace(1207.5, 1752.5, 437),
np.linspace(2150, 2555, 163)])
has_imaginary_radiance = True
num_files = 2
def __init__(self, gcrso_file, scris_file):
self.gcrso_file = gcrso_file
self.scris_file = scris_file
@property
def wavenumber(self):
bands = ['lw', 'mw', 'sw']
ranges = [(650., 1095.), (1210., 1750.), (2155., 2550.)]
guards = 2
with h5py.File(self.scris_file, 'r') as scris:
points = [scris['/All_Data/CrIS-SDR_All/ES_Real{}'.format(band.upper())].shape[-1]
for band in bands]
wn = []
for band, rng, sz in zip(bands, ranges, points):
spacing = (rng[1] - rng[0]) / (sz - 1 - 2*guards)
wn.append(np.linspace(rng[0] - guards*spacing, rng[1] + guards*spacing, sz))
return np.concatenate(wn)
@property
def num_scans(self):
with h5py.File(self.gcrso_file, 'r') as gcrso:
return len(gcrso['/All_Data/CrIS-SDR-GEO_All/Latitude'])
def read_latitude(self, latitude):
with h5py.File(self.gcrso_file, 'r') as gcrso:
latitude[:,:,:] = gcrso['/All_Data/CrIS-SDR-GEO_All/Latitude'][:,:,:]
def read_longitude(self, longitude):
with h5py.File(self.gcrso_file, 'r') as gcrso:
longitude[:,:,:] = gcrso['/All_Data/CrIS-SDR-GEO_All/Longitude'][:,:,:]
def read_radiance(self, radiance, imaginary):
with h5py.File(self.scris_file, 'r') as scris:
group = scris['/All_Data/CrIS-SDR_All']
wavenumber_idx_ini = 0
for band in ['LW', 'MW', 'SW']:
dataset = 'ES_' + {False: 'Real', True: 'Imaginary'}[imaginary] + band
wavenumber_idx_fin = wavenumber_idx_ini + group[dataset].shape[-1]
radiance[:,:,:,wavenumber_idx_ini:wavenumber_idx_fin] = group[dataset]
wavenumber_idx_ini = wavenumber_idx_fin
import functools
import coda
import numpy as np
class IasiGranule(object):
wavenumber = np.linspace(645.0, 2760.0, 8461)
num_fors_per_scan = 30
num_fovs_per_for = 4
has_imaginary_radiance = False
num_files = 1
def __init__(self, filename):
self.filename = filename
@property
def num_scans(self):
with CodaHandle(self.filename) as handle:
num_scans, = handle.get_size('MDR')
return num_scans
def read_latitude(self, latitude):
self._read_geolocation('latitude', latitude)
def read_longitude(self, longitude):
self._read_geolocation('longitude', longitude)
def read_radiance(self, radiance, imaginary):
if imaginary:
raise ValueError('Imaginary radiance not available for IASI')
with CodaHandle(self.filename) as handle:
scaled_radiances = (
np.array([a for a in handle.fetch('MDR', -1, 'MDR', 'GS1cSpect')]))
scale_factors = handle.fetch('GIADR_ScaleFactors')
quality_flags = (
np.array([a for a in handle.fetch('MDR', -1, 'MDR', 'GQisFlagQual')],
bool))
self._unscale_radiances(scaled_radiances, scale_factors, radiance)
self._set_low_quality_spectra_to_nan(quality_flags, radiance)
def _read_geolocation(self, variable, output_array):
latlon_idx = dict(latitude=1, longitude=0)[variable]
with CodaHandle(self.filename) as handle:
output_array[:,:,:] = [
a for a in handle.fetch('MDR', -1, 'MDR', 'GGeoSondLoc',
[-1, -1, latlon_idx])]
def _unscale_radiances(self, scaled_radiances, scale_factors, radiances):
num_scale_factors = scale_factors.IDefScaleSondNbScale
for scale_factor_idx in range(num_scale_factors):
scale_factor = scale_factors.IDefScaleSondScaleFactor[scale_factor_idx]
wavenumber_idx_ini = (scale_factors.IDefScaleSondNsfirst[scale_factor_idx] -
self._first_channel_number)
wavenumber_idx_fin = (scale_factors.IDefScaleSondNslast[scale_factor_idx] -
self._first_channel_number + 1)
radiances[:,:,:,wavenumber_idx_ini:wavenumber_idx_fin] = (
scaled_radiances[:,:,:,wavenumber_idx_ini:wavenumber_idx_fin] *
(10.0 ** -scale_factor * self._radiance_conversion_factor))
def _set_low_quality_spectra_to_nan(self, quality_flags, radiances):
radiances[quality_flags.any(axis=-1)] = np.nan
@property
def _first_channel_number(self):
with CodaHandle(self.filename) as handle:
return handle.fetch('MDR', 0, 'MDR', 'IDefNsfirst1b')
# to convert from W / m^2 / sr^1 / m^-1 to mW / m^2 / sr / cm^-1
_radiance_conversion_factor = 1e5
class CodaHandle(object):
def __init__(self, filename):
self.handle = coda.open(filename)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_trace):
coda.close(self.handle)
def __getattr__(self, attr):
return functools.partial(getattr(coda, attr), self.handle)
import argparse
from datetime import datetime
import os
import h5py
import numpy as np
from .airs import AirsGranule
from .cris import CrisGranule
from .iasi import IasiGranule
from .super_granule import SuperGranule
from .util import gcdist
def _parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('platform_1')
parser.add_argument('time_1', type=parse_datetime)
parser.add_argument('platform_2')
parser.add_argument('time_2', type=parse_datetime)
parser.add_argument('latitude', type=float)
parser.add_argument('longitude', type=float)
parser.add_argument('max_distance', type=float)
parser.add_argument('sensor', choices=['airs', 'iasi', 'cris', 'cris-fsr'])
parser.add_argument('files', nargs='+', metavar='file')
return parser.parse_args()
def parse_datetime(s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')
def _create_granule(sensor, files):
granule_cls = {'airs': AirsGranule, 'iasi': IasiGranule,
'cris': CrisGranule, 'cris-fsr': CrisGranule}[sensor]
return SuperGranule(granule_cls, files)
def _filter_by_proximity(granule, lat, lon, max_distance):
distance = gcdist(granule.latitude, granule.longitude, lat, lon)
return (distance <= max_distance).nonzero()
def _process_spectra(granule, radiance, scan_idx, for_idx, fov_idx,
output_file, dataset_prefix=None):
radiance = radiance.astype(np.float64)
R = _grab_spectra(granule, radiance, scan_idx, for_idx, fov_idx)
n, sum, sum_of_squares = _aggregate_by_fov(granule, fov_idx, R)
new_dataset_prefix = (
'Radiance' if dataset_prefix is None else dataset_prefix + '_Radiance')
_output(n, sum, sum_of_squares, output_file, 'FOV_' + new_dataset_prefix)
_output(n.sum(axis=0), sum.sum(axis=0), sum_of_squares.sum(axis=0),
output_file, new_dataset_prefix)
def _grab_spectra(granule, radiance, scan_idx, for_idx, fov_idx):
flat_fov_idx = ((scan_idx * granule.num_fors_per_scan +
for_idx) * granule.num_fovs_per_for +
fov_idx)
wn_idx = np.arange(granule.num_wavenumbers)
flat_sample_idx = (flat_fov_idx[:,None] * granule.num_wavenumbers + wn_idx).flatten()
return radiance.take(flat_sample_idx)
def _aggregate_by_fov(granule, fov_idx, radiance):
wn_idx = np.arange(granule.num_wavenumbers)
flat_output_idx = (fov_idx[:,None] * granule.num_wavenumbers + wn_idx).flatten()
num_output_idxs = granule.num_fovs_per_for * granule.num_wavenumbers
n = np.bincount(fov_idx, minlength=granule.num_fovs_per_for)
r = (np.bincount(flat_output_idx, weights=radiance, minlength=num_output_idxs)
.reshape([granule.num_fovs_per_for, granule.num_wavenumbers]))
r2 = (np.bincount(flat_output_idx, weights=(radiance * radiance),
minlength=num_output_idxs)
.reshape([granule.num_fovs_per_for, granule.num_wavenumbers]))
return n, r, r2
def _output(n, sum, sum_of_squares, output_file, dataset_prefix):
mean = sum / n[...,None]
std = np.sqrt(sum_of_squares / n[...,None] - mean * mean)
output_file.create_dataset(dataset_prefix + '_Count', data=n)
output_file.create_dataset(dataset_prefix + '_Mean', data=mean)
output_file.create_dataset(dataset_prefix + '_Std', data=std)
def unix_time(t):
return (t - datetime(1970, 1, 1)).total_seconds()
def output_file_name(args):
platform_1, platform_2 = sorted([args.platform_1, args.platform_2])
earlier_time = min(args.time_1, args.time_2)
return 'sno_{}_{}_{}_{}.h5'.format(platform_tag(platform_1).lower(),
platform_tag(platform_2).lower(),
args.sensor.lower(),
earlier_time.strftime('%Y-%m-%dT%H-%M'))
def platform_tag(platform):
tags = {'AQUA': 'Aqua', 'METOP-A': 'Metop-A', 'METOP-B': 'Metop-B', 'SUOMI NPP': 'NPP'}
return tags[platform]
def main():
args = _parse_args()
granule = _create_granule(args.sensor, args.files)
scan_idx, for_idx, fov_idx = (
_filter_by_proximity(granule, args.latitude, args.longitude, args.max_distance))
with h5py.File(output_file_name(args), 'w') as output_file:
_process_spectra(granule, granule.radiance, scan_idx, for_idx, fov_idx,
output_file)
if granule.has_imaginary_radiance:
_process_spectra(
granule, granule.imaginary_radiance, scan_idx, for_idx, fov_idx,
output_file, 'Imaginary')
output_file.create_dataset('{}_Time'.format(platform_tag(args.platform_1)),
data=unix_time(args.time_1))
output_file.create_dataset('{}_Time'.format(platform_tag(args.platform_2)),
data=unix_time(args.time_2))
output_file.create_dataset('SNO_Latitude', data=args.latitude)
output_file.create_dataset('SNO_Longitude', data=args.longitude)
output_file.create_dataset('Input_Files', data=[os.path.basename(f) for f in args.files])
if __name__ == '__main__':
main()
import numpy as np
class SuperGranule(object):
def __init__(self, granule_cls, files):
self.granules = [granule_cls(*args)
for args
in self._grouped(files, granule_cls.num_files)]
@property
def num_scans(self):
return sum(g.num_scans for g in self.granules)
@property
def num_fors_per_scan(self):
return self.granules[0].num_fors_per_scan
@property
def num_fovs_per_for(self):
return self.granules[0].num_fovs_per_for
@property
def num_wavenumbers(self):
return len(self.wavenumber)
@property
def wavenumber(self):
return self.granules[0].wavenumber
@property
def latitude(self):
if not hasattr(self, '_latitude'):
self._latitude = self._get_geolocation('latitude')
return self._latitude
@property
def longitude(self):
if not hasattr(self, '_longitude'):
self._longitude = self._get_geolocation('longitude')
return self._longitude
@property
def radiance(self):
if not hasattr(self, '_radiance'):
self._radiance = self._get_radiance()
return self._radiance
@property
def has_imaginary_radiance(self):
return self.granules[0].has_imaginary_radiance
@property
def imaginary_radiance(self):
if not hasattr(self, '_imaginary_radiance'):
self._imaginary_radiance = self._get_radiance(imaginary=True)
return self._imaginary_radiance
def _get_geolocation(self, variable):
output_array = (
np.empty([self.num_scans, self.num_fors_per_scan, self.num_fovs_per_for],
np.float32))
for granule, scan_idx_ini, scan_idx_fin in self._granules_with_indexes:
read_method = getattr(granule, 'read_' + variable)
read_method(output_array[scan_idx_ini:scan_idx_fin,:,:])
return output_array
def _get_radiance(self, imaginary=False):
output_array = np.empty([self.num_scans, self.num_fors_per_scan,
self.num_fovs_per_for, len(self.wavenumber)],
np.float32)
for granule, scan_idx_ini, scan_idx_fin in self._granules_with_indexes:
granule.read_radiance(output_array[scan_idx_ini:scan_idx_fin,:,:],
imaginary)
return output_array
@property
def _granules_with_indexes(self):
scan_idx = 0
for granule in self.granules:
yield granule, scan_idx, scan_idx + granule.num_scans
scan_idx += granule.num_scans
def _grouped(self, iterable, n):
return zip(*([iter(iterable)] * n))
from unittest import TestCase
from intercal.iasi import Iasi, CodaFile
class IasiTest(TestCase):
def test_latitude(self):
lat = Iasi(EXAMPLE_IASI_FILE).latitude
self.assertEqual(lat.shape, (22, 30, 4))
self.assertAlmostEqual(lat[0,0,0], 150.069, places=3)
def test_longitude(self):
lon = Iasi(EXAMPLE_IASI_FILE).longitude
self.assertEqual(lon.shape, (22, 30, 4))
self.assertAlmostEqual(lon[0,0,0], -50.907, places=3)
def test_radiance(self):
rad = Iasi(EXAMPLE_IASI_FILE).radiance
class CodaFileTest(TestCase):
def test_getattr(self):
with CodaFile(EXAMPLE_IASI_FILE) as coda_file:
self.assertEqual(coda_file.get_product_filename(), EXAMPLE_IASI_FILE)
EXAMPLE_IASI_FILE = ('/data/gregq/iasi/IASI_xxx_1C_M02_20121106234457Z_' +
'20121106234753Z_N_O_20121107004428Z__20121107004545')
"""Utility functions for inter-calibration"""
import numpy as np
# mean earth radius; km
r_earth_mean = 6371.0
# speed of light; m * s^-1
c = 2.99792458e8
# Planck constant; J * s
h = 6.62606957e-34
# Boltzmann constant; J * K^-1
k_B = 1.3806488e-23
# Planck function coefficients
c1 = 2.0 * h * c**2
c2 = h * c / k_B
def gcdist(lat_a, lon_a, lat_b, lon_b):
"""Great circle distance in km between lat lon pair"""
lat_a, lon_a, lat_b, lon_b = (
np.radians(x) for x in [lat_a, lon_a, lat_b, lon_b])
tmp = (np.sin(lat_a) * np.sin(lat_b) +
np.cos(lat_a) * np.cos(lat_b) * np.cos(lon_a - lon_b))
tmp[tmp > 1.0] = 1.0
return np.arccos(tmp) * r_earth_mean
def wl2wn(lambda_):
"""Convert from wavelength to wavenumber.
lambda_ given in microns. Result in inverse centimeters.
"""
return 1e4 / lambda_
def wn2wl(nu):
"""Convert from wavenumber to wavelength.
nu given in inverse centimeters. Result in microns.
"""
return 1e4 / nu
def planck_wl(lambda_, T):
"""Planck function for lambda_ in microns, T in Kelvin
Output in W per m^2 per sr per micron.
"""
lambda_ = lambda_ * 1e-6
return c1 / lambda_**5 / (np.exp(c2 / (lambda_ * T)) - 1.0) * 1e-6
def planck_wn(nu, T):
"""Planck function for nu in inverse centimeters, T in Kelvin
Output in mW per m^2 per sr per inverse centimeter.
"""
nu = nu * 1e2
return c1 * nu**3 / (np.exp(c2 * nu / T) - 1.0) * (1e2 * 1e3)
def inv_planck_wl(lambda_, B):
"""Inverse planck routine for wavelength in microns
B is radiance in W per m^2 per sr per micron.
"""
lambda_ = lambda_ * 1e-6
B = B * 1e6
return c2 / lambda_ / np.log(c1 / (lambda_**5 * B) + 1.0)
def inv_planck_wn(nu, B):
"""Inverse planck routine for wavenumber in inverse centimeters
B is radiance in mW per m^2 per sr per inverse centimeter.
"""
nu = nu * 1e2
B = B * (1e-2 * 1e-3)
return c2 * nu / np.log(c1 * nu**3 / B + 1.0)
def rad_wl2wn(lambda_, R):
"""Convert radiances from per-wavelength to per-wavenumber units
lambda_ is in microns and R is in W per m^2 per sr per micron.
Output is in mW per m^2 per sr per inverse centimeter.
"""
return R * lambda_**2 * (1e3 * 1e-4)
def rad_wn2wl(nu, R):
"""Convert radiances from per-wavenumber to per-wavelength units
nu is in inverse centimeters and R is in mW per m^2 per sr per
inverse centimeter. Output is in W per m^2 per sr per micron.
"""
return R * nu**2 * (1e-3 * 1e-4)
from datetime import datetime, timedelta
import os
import re
def parse(path):
"""Parse metadata from a satellite data file name"""
file_name = os.path.basename(path)
p, = [p for p in parsers if p.match(file_name)]
return p.parse(file_name)
def parser(cls):
"""Decorator to register a filename parser"""
parsers.append(cls())
parsers = []
@parser
class Iasi(object):
def match(self, file_name):
return re.search('^IASI_xxx_', file_name)
def parse(self, file_name):
def f():
x = file_name.split('_')
return {'sensor': 'IASI', 'level': x[2], 'platform': map_platform(x[3]),
'begin_time': parse_time(x[4]), 'end_time': parse_time(x[5]),
'processing_mode': x[6], 'disposition_mode': x[7],
'creation_time': parse_time(x[8])}
def map_platform(p):
return {'M02': 'METOP-A', 'M01': 'METOP-B'}[p]
def parse_time(s):
return datetime.strptime(s, '%Y%m%d%H%M%SZ')
return f()
@parser
class Avhrr(object):
def match(self, file_name):
return re.search(r'^[A-Z]{3}\.[A-Z]{4}\..{2}\.D[0-9]{5}\.S[0-9]{4}\.E[0-9]{4}\.',
file_name)
def parse(self, file_name):
def f():
x = file_name.split('.')
begin_time, end_time = parse_times(x[3], x[4], x[5])
return {'sensor': 'AVHRR', 'processing_center': x[0], 'data_type': x[1],
'platform': map_platform(x[2]), 'begin_time': begin_time,
'end_time': end_time, 'processing_block_id': x[6], 'source': x[7]}
def parse_times(day_str, begin_str, end_str):
begin_day = datetime.strptime(day_str, 'D%y%j')
begin_time = begin_day + timedelta(hours=int(begin_str[1:3]),
minutes=int(begin_str[3:5]))
end_time = begin_day + timedelta(hours=int(end_str[1:3]),
minutes=int(end_str[3:5]))
if end_time < begin_time:
end_time += timedelta(days=1)
return begin_time, end_time
def map_platform(p):
return {'N': map_noaa_platform, 'M': map_metop_platform}[p[0]](p)
def map_noaa_platform(p):
suffixes_by_letter = {'A': '6', 'B': 'B', 'C': '7', 'D': '12', 'E': '8', 'F': '9',
'G': '10', 'H': '11', 'I': '13', 'J': '14', 'K': '15', 'L': '16',
'M': '17', 'N': '18', 'P': '19'}
return 'NOAA ' + suffixes_by_letter[p[1]]
def map_metop_platform(p):
return {'M2': 'METOP-A', 'M1': 'METOP-B'}[p]
return f()
@parser
class Modis(object):
def match(self, file_name):
return re.search(r'^M[OY]D.*\.A[0-9]{7}\.[0-9]{4}\.[0-9]{3}\.[0-9]{13}\.', file_name)
def parse(self, file_name):
def f():
x = file_name.split('.')
return {'sensor': 'MODIS', 'platform': platform(x[0]),
'product': product(x[0]), 'begin_time': begin_time(x[1], x[2]),
'collection': x[3], 'creation_time': creation_time(x[4])}
def platform(s):
return {'MOD': 'TERRA', 'MYD': 'AQUA'}[s[:3]]
def product(s):
return s.replace('MYD', 'MOD')
def begin_time(d, t):
return datetime.strptime(d + t, 'A%Y%j%H%M')
def creation_time(s):
return datetime.strptime(s, '%Y%j%H%M%S')
return f()
@parser
class Iff(object):
def match(self, file_name):
return re.search(self.regex, file_name)
def parse(self, file_name):
m = re.search(self.regex, file_name)
return {'product': m.group('product'),
'platform': {'aqua': 'AQUA', 'npp': 'SUOMI NPP'}[m.group('platform')],
'sensor': {'aqua': 'MODIS', 'npp': 'VIIRS'}[m.group('platform')],
'begin_time': datetime.strptime(m.group('begin_time'), 'd%Y%m%d_t%H%M%S'),
'creation_time': datetime.strptime(m.group('creation_time'), 'c%Y%m%d%H%M%S'),
'format': m.group('format')}
regex = (r'^(?P<product>IFF[A-Z]+)_(?P<platform>[a-z]+)_(?P<begin_time>d[0-9]{8}_t[0-9]{6})_' +
r'(?P<creation_time>c[0-9]{14})_ssec_dev\.(?P<format>(hdf|nc))$')
@parser
class Idps(object):
def match(self, file_name):
regex = (r'[A-Z0-9]{5}_[a-z0-9]{3}_d[0-9]{8}_t[0-9]{7}_e[0-9]{7}_b[0-9]{5}_' +
r'c[0-9]{20}_[a-z]{4}_[a-z]{3}\.')
return re.search(regex, file_name)
def parse(self, file_name):
def f():
parts = file_name.split('.')[0].split('_')
begin_time, end_time = times(*parts[2:5])
return {'sensor': self.product_sensors[parts[0]], 'product': parts[0],
'platform': platform(parts[1]), 'begin_time': begin_time,
'end_time': end_time, 'orbit_number': orbit_number(parts[5]),
'creation_time': creation_time(parts[6]), 'origin': parts[7],
'domain': parts[8]}
def platform(s):
return {'npp': 'SUOMI NPP'}[s]
def times(day_str, begin_str, end_str):
day = datetime.strptime(day_str, 'd%Y%m%d')
def time(s):
return timedelta(hours=int(s[1:3]), minutes=int(s[3:5]), seconds=int(s[5:7]),
milliseconds=(100 * int(s[7:8])))
begin_time = day + time(begin_str)
end_time = day + time(end_str)
return begin_time, end_time
def orbit_number(s):
return int(s[1:])
def creation_time(s):
return datetime.strptime(s, 'c%Y%m%d%H%M%S%f')
return f()
sensor_products = {}
sensor_products['VIIRS'] = """
AVAFO GAERO GCLDO GDNBO GITCO GMODO GMTCO IICMO IVAOT
IVCBH IVCLT IVCOP IVCTP IVICC IVPCP IVPTP RNSCA RVIDU RVIRD RVIRS RVIRT
RVITD SVDNB SVI01 SVI02 SVI03 SVI04 SVI05 SVM01 SVM02 SVM03 SVM04 SVM05
SVM06 SVM07 SVM08 SVM09 SVM10 SVM11 SVM12 SVM13 SVM14 SVM15 SVM16 VAOOO
VCBHO VCCLO VCEPO VCOTO VCTHO VCTPO VCTTO VISAO VIVIO VSSTO VSUMO""".split()
sensor_products['CrIS'] = """
GCRIO GCRSO IIROO RCRID RCRIH RCRII RCRIM RCRIT RCRIU REDRO SCRIS""".split()
sensor_products['ATMS'] = """
GATMO GATRO RATMD RATMM RATMS RATMT RATMW SATMR SATMS""".split()
sensor_products['OMPS'] = """
GONPO GOTCO SOMPS SOMTC""".split()
product_sensors = {product: sensor
for sensor, products in sensor_products.items()
for product in products}
@parser
class Caliop(object):
def match(self, file_name):
return re.search(self.regex, file_name)
def parse(self, file_name):
m = re.search(self.regex, file_name)
d = m.groupdict()
d['platform'] = 'CALIPSO'
d['sensor'] = 'CALIOP' if d['sensor'] == 'LID' else d['sensor']
d['begin_time'] = datetime.strptime(d['begin_time'], '%Y-%m-%dT%H-%M-%SZ')
return d
regex = (r'^CAL_(?P<sensor>[A-Z]{3})_(?P<product>.*)-(?P<production_strategy>.*)-' +
r'(?P<version>V.*)\.(?P<begin_time>[-0-9T]{19}Z)(?P<day_night>D|N)\.hdf')
@parser
class Cloudsat(object):
def match(self, file_name):
return re.search(self.regex, file_name)
def parse(self, file_name):
m = re.search(self.regex, file_name)
rv = m.groupdict()
rv['platform'] = 'CLOUDSAT'
rv['sensor'] = 'CLOUDSAT'
rv['begin_time'] = datetime.strptime(rv['begin_time'], '%Y%j%H%M%S')
rv['granule_num'] = int(rv['granule_num'])
rv['epoch_num'] = int(rv['epoch_num'])
return rv
regex = (r'^(?P<begin_time>\d{13})_(?P<granule_num>\d{5})_CS_(?P<product>.*)_GRANULE_' +
r'(?P<processing_iteration>.*_R.*)_E(?P<epoch_num>\d{2}).hdf$')
@parser
class GeocatSeviri(object):
def match(self, file_name):
return re.search(self.regex, file_name)
def parse(self, file_name):
m = re.search(self.regex, file_name)
rv = m.groupdict()
rv['platform'] = rv['platform'].upper()
rv['sensor'] = 'SEVIRI'
return rv
regex = r'^geocatNAV\.(?P<platform>Meteosat-\d+)\..{7}\.\d{6}\.hdf'
@parser
class Ahipack(object):
def match(self, file_name):
return file_name == 'AHIPACK'
def parse(self, file_name):
return {'sensor': 'AHI'}
......@@ -5,11 +5,11 @@ from intercal.modis_srf import ModisSrfCatalog
class Airs2Modis(object):
def __init__(self, chan_prop_path, srf_path):
def __init__(self, chan_prop_path, srf_path, shift_id=0):
self.channel_mask, self.wavenumber_domain = (
self.parse_channel_props(chan_prop_path))
self.srf_catalog = ModisSrfCatalog(srf_path)
self.srf_catalog = ModisSrfCatalog(srf_path, shift_id=shift_id)
def __call__(self, band, rad):
......@@ -49,11 +49,11 @@ def read_airs_chan_data(filename):
break
offset = i
size = len(lines) - offset
mask = np.empty((size,), np.bool)
mask = np.empty((size,), bool)
wn = np.empty((size,), np.float64)
nedts = np.empty((size,), np.float64)
for i in range(offset, len(lines)):
mask[i - offset] = np.bool(lines[i][76] == '0')
mask[i - offset] = bool(lines[i][76] == '0')
wn[i - offset] = np.float64(lines[i][6:14])
nedts[i - offset] = np.float64(lines[i][26:32])
......@@ -61,12 +61,12 @@ def read_airs_chan_data(filename):
class Cris2Modis(object):
def __init__(self, srf_path):
self.srf_catalog = ModisSrfCatalog(srf_path)
def __init__(self, srf_path, shift_id=0, fsr=False):
self.srf_catalog = ModisSrfCatalog(srf_path, shift_id=shift_id)
self.fsr = fsr
def __call__(self, band, rad_lw, rad_mw, rad_sw):
nu = self.cris_wavenumber_domain()
rad_nu = np.concatenate([rad_lw, rad_mw, rad_sw], axis=-1)
......@@ -81,7 +81,13 @@ class Cris2Modis(object):
def cris_wavenumber_domain(self):
return np.concatenate([np.linspace(648.75, 1096.25, 717),
if self.fsr:
return np.concatenate([np.linspace(648.75, 1096.25, 717),
np.linspace(1207.5, 1752.5, 869),
np.linspace(2150, 2555, 637)],
axis=-1)
else:
return np.concatenate([np.linspace(648.75, 1096.25, 717),
np.linspace(1207.5, 1752.5, 437),
np.linspace(2150, 2555, 163)],
axis=-1)
......