-
Eva Schiffer authored
adding the possibility of turning off both time and other categories independantly; cleaning up what's printed when you ask for the version
Eva Schiffer authoredadding the possibility of turning off both time and other categories independantly; cleaning up what's printed when you ask for the version
viirs_io.py 7.46 KiB
#!/usr/bin/env python
# encoding: utf-8
"""
Handle parsing input from VIIRS files.
:author: Eva Schiffer (evas)
:contact: evas@ssec.wisc.edu
:organization: Space Science and Engineering Center (SSEC)
:copyright: Copyright (c) 2015 University of Wisconsin SSEC. All rights reserved.
:date: Nov 2015
:license: GNU GPLv3
:revision: $Id$
"""
__docformat__ = "restructuredtext en"
import sys
import logging
from stg.constants import *
import stg.viirs_guidebook as viirs_guidebook
import stg.hdf_io_util as hdf_io_util
LOG = logging.getLogger(__name__)
def open_file (file_path) :
"""given a file path that is a viirs file, open it
"""
return hdf_io_util.open_file(file_path)
def close_file (file_object) :
"""given a file object, close it
"""
hdf_io_util.close_file(file_object)
def load_aux_data (file_path, minimum_scan_angle, file_object=None) :
"""load the auxiliary data and process the appropriate masks from it
"""
return hdf_io_util.load_aux_data(file_path, minimum_scan_angle,
file_object=file_object,
lat_name=viirs_guidebook.LATITUDE_NAME,
lon_name=viirs_guidebook.LONGITUDE_NAME,
solar_zen_ang_name=viirs_guidebook.SOLAR_ZENITH_NAME,
sensor_zen_ang_name=viirs_guidebook.SENSOR_ZENITH_NAME,
sat_alt_in_km=viirs_guidebook.VIIRS_ALT_IN_KM,
scan_line_time_name=viirs_guidebook.SCAN_LINE_TIME_NAME,
pressure_var_name=viirs_guidebook.CLOUD_TOP_PRESS_NAME,
eff_emiss_var_name=viirs_guidebook.CLOUD_EMISS_NAME,
load_var_fn=load_variable_from_file,)
def load_variable_from_file (variable_name,
file_path=None, file_object=None,
fill_value_name=viirs_guidebook.FILL_VALUE_ATTR_NAME,
scale_name=viirs_guidebook.SCALE_ATTR_NAME,
offset_name=viirs_guidebook.ADD_OFFSET_ATTR_NAME,
data_type_for_output=DEFAULT_DATA_TYPE,
zero_cutoff_exceptions=viirs_guidebook.NAVIGATION_VAR_NAMES) :
"""load a given variable from a file path or file object
"""
return hdf_io_util.load_variable_from_file(variable_name,
file_path=file_path, file_object=file_object,
fill_value_name=fill_value_name,
scale_name=scale_name,
offset_name=offset_name,
range_min_name=viirs_guidebook.RANGE_MIN_NAME,
range_max_name=viirs_guidebook.RANGE_MAX_NAME,
scaled_min_name=viirs_guidebook.SCALED_MIN_NAME,
scaled_max_name=viirs_guidebook.SCALED_MAX_NAME,
data_type_for_output=data_type_for_output,
zero_cutoff_exceptions=zero_cutoff_exceptions,
unscale_data_fn=unscale_data,
subsampling_fn=subsample_every_nth_pt,) # TODO, do we need this to be the default?
def get_abstract_data_sets (do_separate_day_night=True) :
"""get a basic dictionary with space for each of the expected data sets
"""
return hdf_io_util.get_abstract_data_sets(do_separate_day_night)
def determine_data_sets(aux_data, do_separate_day_night=True) :
"""separate viirs data into day and night sets if desired
Each data set is defined by a constant name, a mask to select that set, the scan line times, lon, and lat data
for that set
"""
return hdf_io_util.determine_data_sets(aux_data, do_separate_day_night=do_separate_day_night)
def unscale_data (data, fill_mask=None,
scale_factor=None, offset=None,
range_min=None, range_max=None, scaled_min=None, scaled_max=None) :
"""give scaled data and the needed constants, return the raw form of the data
Data is modified in place and fill values will not be changed.
There are two types of scaling. When the scale_factor or offset are provided the
following unscaling will be used (if a scale factor or offset is given as None
or not given it will not be applied) :
final_data = (scale_factor * input_data) + offset
If the range and scaled attributes are all given as non-None, then they will instead
be applied as follows:
final_data = ((input_data - scaled_min) * (scaled_max - scaled_min)) * (range_max - range_min) + range_min
If constants are provided for both forms of scaling, scale factor and offset will be preferred.
"""
to_return = data
# invert our fill mask or generate an "include everything" mask
not_fill_mask = ~fill_mask if fill_mask is not None else numpy.ones(data.shape, dtype=numpy.bool)
# if we found a scale use it to scale the data
if (scale_factor is not None) and (scale_factor != 1.0) :
LOG.debug("Applying scale: " + str(scale_factor))
to_return[not_fill_mask] *= scale_factor
# if we have an offset use it to offset the data
if (offset is not None) and (offset != 0.0) :
LOG.debug("Applying offset: " + str(offset))
to_return[not_fill_mask] += offset
# apply the ranges if we have them
if scale_factor is None and offset is None :
if (range_min is not None and range_max is not None and scaled_min is not None and scaled_max is not None) :
LOG.debug("Applying scaled range (" + str(scaled_min) + ", " + str(scaled_max)
+ ") and range (" + str(range_min) + ", " + str(range_max) + ").")
to_return[not_fill_mask] = ((to_return[not_fill_mask] - scaled_min) / (scaled_max - scaled_min)) * (range_max - range_min) + range_min
return to_return
def subsample_every_nth_pt (data, rate=5) :
"""subsample every nth pt, where n is rate; default is every 5th pt
"""
assert(rate > 0)
to_return = data
if len(to_return.shape) == 1 :
to_return = to_return[::rate].copy()
elif len(to_return.shape) == 2 :
to_return = to_return[::rate, ::rate].copy()
else :
LOG.debug("Data to be subsampled is not 1 or 2 dimensional. Data will not be subsampled.")
return to_return
def main():
import optparse
#from pprint import pprint
usage = """
%prog [options] filename1.hdf
"""
parser = optparse.OptionParser(usage)
parser.add_option('-v', '--verbose', dest='verbosity', action="count", default=0,
help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG')
parser.add_option('-r', '--no-read', dest='read_hdf', action='store_false', default=True,
help="don't read or look for the hdf file, only analyze the filename")
(options, args) = parser.parse_args()
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
logging.basicConfig(level = levels[min(3, options.verbosity)])
LOG.info("Currently no command line tests are set up for this module.")
if __name__ == '__main__':
sys.exit(main())