-
Geoff Cureton authored
Initial commit of single-granule and global mosaic scripts which plot VIIRS RGB with atmospherically corrected reflectances. git-svn-id: https://svn.ssec.wisc.edu/repos/geoffc/Python@294 f45efd6a-04c6-4179-886d-a06c9f63a260
Geoff Cureton authoredInitial commit of single-granule and global mosaic scripts which plot VIIRS RGB with atmospherically corrected reflectances. git-svn-id: https://svn.ssec.wisc.edu/repos/geoffc/Python@294 f45efd6a-04c6-4179-886d-a06c9f63a260
grid_VIIRS_RGB.py 45.67 KiB
#!/usr/bin/env python
# encoding: utf-8
"""
grid_VIIRS_RGB.py
The purpose of this script is to remap the various VIIRS RGB
onto an equal-angle grid.
Minimum commandline...
python grid_VIIRS_RGB.py -g GEOGLOB --red_glob=REDGLOB --green_glob=GREENGLOB --blue_glob=BLUEGLOB -p RGBPROD
Created by Geoff Cureton on 2011-07-11.
Copyright (c) 2011 University of Wisconsin SSEC. All rights reserved.
"""
file_Date = '$Date$'
file_Revision = '$Revision$'
file_Author = '$Author$'
file_HeadURL = '$HeadURL$'
file_Id = '$Id$'
__author__ = 'G.P. Cureton <geoff.cureton@ssec.wisc.edu>'
__version__ = '$Id$'
__docformat__ = 'Epytext'
import os, sys, logging, traceback
from os import path,uname,environ
import string
import re
import uuid
import shlex, subprocess
from subprocess import CalledProcessError, call
from shutil import rmtree
from glob import glob
import optparse as optparse
import ast
from time import time
from datetime import datetime,timedelta
#import string, sys
#from glob import glob
#from os import path,uname
#from time import time
#from datetime import datetime
import numpy as np
from numpy import ma as ma
import scipy as scipy
import matplotlib
import matplotlib.cm as cm
from matplotlib.colors import ListedColormap
from matplotlib.figure import Figure
matplotlib.use('Agg')
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
#matplotlib.use('WXAgg')
#from matplotlib.backends.backend_wxagg import FigureCanvasAgg as FigureCanvas
# This must come *after* the backend is specified.
import matplotlib.pyplot as ppl
from mpl_toolkits.basemap import Basemap
import Image as Image
import optparse as optparse
import tables as pytables
from tables import exceptions as pyEx
from HDF5 import getobj
import VIIRS as VIIRS
from utils.snapToGrid import SnapToGrid
# every module should have a LOG object
sourcename= file_Id.split(" ")
LOG = logging.getLogger(sourcename[1])
###################################
# Common Setup #
###################################
shrink = 1
###################################
# Get File Lists #
###################################
def npp_Files(NPP_glob,NPP_path,preFix):
'''
Returns sorted lists of the GMODO and NPP files.
If there are no GMODO files, populate bith lists
with NPP filenames.
'''
viirsGeo_list = glob(path.abspath(path.expanduser(NPP_glob)))
viirsGeo_list.sort()
geoList = []
nppList = []
for files in viirsGeo_list :
NPP_arr = string.split(path.basename(files),"_")
globStr = "%s/%s*%s_%s*.h5" % (path.expanduser(NPP_path),preFix,NPP_arr[2],NPP_arr[3])
nppFiles = glob(globStr)
if (np.shape(nppFiles)[0] != 0) :
geoList.append(files)
nppList.append(path.abspath(path.expanduser(nppFiles[0])))
else :
print " ... no match found for %s" % ( files )
pass
geoList.sort()
nppList.sort()
return geoList,nppList
def granuleFiles(geoGlob,redGlob,greenGlob,blueGlob):
'''
Returns sorted lists of the geolocation and product files.
'''
colourList = ['red','green','blue']
geoDir = path.dirname(path.abspath(path.expanduser(geoGlob)))
redDir = path.dirname(path.abspath(path.expanduser(redGlob)))
greenDir = path.dirname(path.abspath(path.expanduser(greenGlob)))
blueDir = path.dirname(path.abspath(path.expanduser(blueGlob)))
print "Initial geoGlob = ",geoGlob
print "Initial prodGlob = ",redGlob
print "Initial prodGlob = ",greenGlob
print "Initial prodGlob = ",blueGlob
geoGlob = path.basename(path.abspath(path.expanduser(geoGlob)))
redGlob = path.basename(path.abspath(path.expanduser(redGlob)))
greenGlob = path.basename(path.abspath(path.expanduser(greenGlob)))
blueGlob = path.basename(path.abspath(path.expanduser(blueGlob)))
#geoPrefix = string.split(geoGlob,'_')[0]
#redPrefix = string.split(redGlob,'_')[0]
#greenPrefix = string.split(greenGlob,'_')[0]
#bluePrefix = string.split(blueGlob,'_')[0]
geoPrefix = geoGlob[:5]
redPrefix = redGlob[:5]
greenPrefix = greenGlob[:5]
bluePrefix = blueGlob[:5]
print "geoDir = ",geoDir
print "redDir = ",redDir
print "greenDir = ",greenDir
print "blueDir = ",blueDir
print "geoGlob = ",geoGlob
print "redGlob = ",redGlob
print "greenGlob = ",greenGlob
print "blueGlob = ",blueGlob
print "geoPrefix = ",geoPrefix
print "redPrefix = ",redPrefix
print "greenPrefix = ",greenPrefix
print "bluePrefix = ",bluePrefix
geoList_in = glob("%s/%s" % (geoDir,geoGlob))
geoList_in.sort()
geoList = []
redList = []
greenList = []
blueList = []
# Loop through he files in the master list...
for files in geoList_in :
# Date and time stamp of the master file...
field_arr = string.split(path.basename(files),"_")
dateStamp = field_arr[2]
timeStamp = field_arr[3]
#geoFileGlob="%s/%s%s_%s*.h5" % (geoDir,geoPrefix,dateStamp,timeStamp)
redFileGlob="%s/%s*%s_%s*.h5" % (redDir,redPrefix,dateStamp,timeStamp)
greenFileGlob="%s/%s*%s_%s*.h5" % (greenDir,greenPrefix,dateStamp,timeStamp)
blueFileGlob="%s/%s*%s_%s*.h5" % (blueDir,bluePrefix,dateStamp,timeStamp)
redFileGlob = string.replace(redFileGlob,"**","*")
greenFileGlob = string.replace(greenFileGlob,"**","*")
blueFileGlob = string.replace(blueFileGlob,"**","*")
#print "dateStamp = ",dateStamp
#print "timeStamp = ",timeStamp
##print "geoFileGlob = ",geoFileGlob
#print "redFileGlob = ",redFileGlob
#print "greenFileGlob = ",greenFileGlob
#print "blueFileGlob = ",blueFileGlob
redFile = glob(redFileGlob)
greenFile = glob(greenFileGlob)
blueFile = glob(blueFileGlob)
if (np.shape(redFile)[0] != 0 and np.shape(greenFile)[0] != 0 and np.shape(blueFile)[0] != 0) :
geoList.append(files)
redList.append(redFile[0])
greenList.append(greenFile[0])
blueList.append(blueFile[0])
#print "geoFile = ",files
#print "redFile = ",redFile[0]
#print "greenFile = ",greenFile[0]
#print "blueFile = ",blueFile[0]
else :
print " ... no match found for %s, appending %s" % ( files, files)
pass
#for geoFile,prodFile in zip(geoList,prodList):
#print geoFile,prodFile
return geoList,redList,greenList,blueList
###################################
# Grid Files #
###################################
def grid_RGB(geoList,redList,greenList,blueList,prod,prodName=None,isRadiance=False,oldGridObj=None):
'''
Returns the gridded SDR dataset
'''
print ">>>>>>>>>>>>>>>>>>"
print " Gridding SDR "
print ">>>>>>>>>>>>>>>>>>"
try :
reload(VIIRS.viirsSDR)
reload(VIIRS.ViirsData)
del(viirsSdrObj)
del(latArr)
del(lonArr)
del(sdrArr)
except :
pass
print "Creating viirsSdrObj..."
viirsSdrObj = VIIRS.viirsSDR.viirsSDR()
print "done"
print ""
# Create the gridding object
if (oldGridObj != None) :
gridObj = oldGridObj
###
gridLat = gridObj.gridLat
gridLon = gridObj.gridLon
###
else :
gridObj = SnapToGrid()
gridObj.gridLat = gridLat
gridObj.gridLon = gridLon
if (prodName == None) :
prodName = '%s' % (prod)
else :
pass
# Determine the correct fillValue
trimObj = VIIRS.ViirsData.ViirsTrimTable()
fillVal = trimObj.sdrTypeFill['ONBOARD_PT_FILL']['float32']
fileIdxFillVal = trimObj.sdrTypeFill['ONBOARD_PT_FILL']['int32']
idxFillVal = trimObj.sdrTypeFill['ONBOARD_PT_FILL']['int64']
print "fillVal = ",fillVal
print "fileIdxFillVal = ",fileIdxFillVal
print "idxFillVal = ",idxFillVal
eps = 1.e-6
# Initialise the output data arrays with fill data
print "prodName = %s" %(prodName)
#gridObj.dataFileList[prodName] = []
gridObj.gridData[prodName] = np.ones(np.shape(gridLat),dtype=np.float32) * fillVal
gridObj.gridData['red'] = np.ones(np.shape(gridLat),dtype=np.float32) * fillVal
gridObj.gridData['green'] = np.ones(np.shape(gridLat),dtype=np.float32) * fillVal
gridObj.gridData['blue'] = np.ones(np.shape(gridLat),dtype=np.float32) * fillVal
gridObj.grid2GranFileIdx = np.ones(np.shape(gridLat),dtype=np.int32) * fileIdxFillVal
gridObj.grid2GranIdx = np.ones(np.shape(gridLat),dtype=np.int64) * idxFillVal
gridData = np.ones(np.shape(gridLat),dtype=np.float32) * fillVal
grid2GranFileIdx = np.ones(np.shape(gridLat),dtype=np.int32) * fileIdxFillVal
grid2GranIdx = np.ones(np.shape(gridLat),dtype=np.int64) * idxFillVal
rgbKeys = ['lat','lon','lat_0','lon_0','red','green','blue','redName','greenName','blueName']
RGBdict = {}
t1 = time()
for grans in np.arange(len(geoList)):
print "\n>>>>>>>>>>>>\n\nIngesting granule %d ..." % (grans)
geoFile = geoList[grans]
redFile = redList[grans]
greenFile = greenList[grans]
blueFile = blueList[grans]
RGBfiles = {'red':redFile,'green':greenFile,'blue':blueFile}
for chans,chanFileName in zip(['red','green','blue'],['redName','greenName','blueName']):
print "\n>>>>>>>>>>>>>>>>>>>>>>>>"
print "Ingesting %s channel ..." % (chans)
print ">>>>>>>>>>>>>>>>>>>>>>>>\n"
retArr = viirsSdrObj.ingest(geoFile,RGBfiles[chans],isRadiance,1)
print "done\n"
try :
RGBdict['lat'] = viirsSdrObj.Lat[:,:]
RGBdict['lon'] = viirsSdrObj.Lon[:,:]
RGBdict[chans] = viirsSdrObj.ViirsSDR[:,:]
RGBdict[chanFileName] = RGBfiles[chans]
except Exception, err:
print "%s" % (str(err))
sys.exit(1)
print "shape( RGBdict['lat'] ) = %s" % (repr(np.shape( RGBdict['lat'])))
print "shape( RGBdict['lon'] ) = %s" % (repr(np.shape( RGBdict['lon'])))
print "shape( RGBdict['%s'] ) = %s" % (chans,repr(np.shape( RGBdict[chans])))
# Determine masks for each fill type, for the red channel
sdrFillMasks = {}
for fillType in trimObj.sdrTypeFill.keys() :
fillValue = trimObj.sdrTypeFill[fillType][RGBdict[chans].dtype.name]
if 'float' in fillValue.__class__.__name__ :
sdrFillMasks[fillType] = ma.masked_inside(RGBdict[chans],fillValue-eps,fillValue+eps).mask
if (sdrFillMasks[fillType].__class__.__name__ != 'ndarray') :
sdrFillMasks[fillType] = None
elif 'int' in fillValue.__class__.__name__ :
sdrFillMasks[fillType] = ma.masked_equal(RGBdict[chans],fillValue).mask
if (sdrFillMasks[fillType].__class__.__name__ != 'ndarray') :
sdrFillMasks[fillType] = None
else :
print "Dataset was neither int not float... a worry"
pass
print ""
for fillType in trimObj.sdrTypeFill.keys() :
if sdrFillMasks[fillType] is not None :
print "There are %d pixels for %s" % (sdrFillMasks[fillType].sum(),fillType)
print ""
# Construct the total mask from all of the various fill values
totalMask = ma.array(np.zeros(RGBdict[chans].shape,dtype=np.bool))
for fillType in trimObj.sdrTypeFill.keys() :
if sdrFillMasks[fillType] is not None :
totalMask = totalMask * ma.array(np.zeros(RGBdict[chans].shape,dtype=np.bool),\
mask=sdrFillMasks[fillType])
# Apply the pixel trim mask to all arrays, and compress...
try :
data = ma.array(RGBdict[chans], mask=totalMask.mask)
lats = ma.array(RGBdict['lat'], mask=totalMask.mask)
lons = ma.array(RGBdict['lon'], mask=totalMask.mask)
except ma.core.MaskError :
print ">> error: Mask Error, probably mismatched geolocation and product array sizes, aborting..."
sys.exit(1)
print "shape(lats) = ", np.shape(lats)
print "shape(lons) = ", np.shape(lons)
print "shape(data) = ",np.shape(data)
# Because we are sending a compressed version of the data array to the
# gridding method, apply the mask to an index vector, and compress.
# Returned dataIdx should address this vector to get the correct
# indices in the original data array.
dataIdxOrig = np.arange(data.size,dtype=np.int64)
print "Original shape(dataIdxOrig) = ",np.shape(dataIdxOrig)
dataIdxOrig = ma.array(dataIdxOrig,mask=totalMask.mask)
dataIdxOrig = dataIdxOrig.compressed()
print "Compressed shape(dataIdxOrig) = ",np.shape(dataIdxOrig)
# Get gridded data for just this granule...
gridData = np.ones(np.shape(gridLat),dtype=np.float32) * fillVal
grid2GranFileIdx = np.ones(np.shape(gridLat),dtype=np.int32) * fileIdxFillVal
grid2GranIdx = np.ones(np.shape(gridLat),dtype=np.int64) * idxFillVal
# If the compressed dataset is empty (i.e.: our data was all fill), skip this
# granule...
if (dataIdxOrig.shape[0]==0) :
print ">> warning: This granule is all fill, skipping..."
else :
gridList = gridObj.snapGrid_ctypes(
lats.compressed(), lons.compressed(), data.compressed(),
gridLat, gridLon, gridData, grid2GranIdx)
newGridData,newGrid2GranIdx = gridList[0],gridList[1]
print "shape(newGridData) = ", np.shape(newGridData)
print "shape(newGrid2GranIdx) = ", np.shape(newGrid2GranIdx)
# The gridcells populated by this granule
newGridCells = np.where(newGrid2GranIdx > idxFillVal)
# Correct grid2GranIdx for the granule compression
grid2GranIdx[newGridCells] = dataIdxOrig[grid2GranIdx[newGridCells]]
# Copy result for this granule to the aggregate gridObj.gridData
print gridObj.gridData.keys()
gridObj.gridData[chans][newGridCells] = newGridData[newGridCells]
gridObj.grid2GranIdx[newGridCells] = newGrid2GranIdx[newGridCells]
gridObj.grid2GranFileIdx[newGridCells] = grans
# Append the data file names to dataFileList
#print "appending %s to dataFileList" % (RGBfiles[chans])
#gridObj.dataFileList[chans].append(RGBfiles[chans])
# Append the geolocation file names to geoFileList
#if (oldGridObj == None) :
#gridObj.geoFileList.append(geoList[grans])
#except Exception, err:
#print "%s" % (str(err))
t2 = time()
elapsedTime = t2-t1
print "\n>>> Gridding %d granules took %f seconds\n" % (len(geoList),elapsedTime)
return gridObj
def viirs_rgb(gridObj,rgbProd,imageMap,imageMapName=None):
R = ma.masked_less(gridObj.gridData['red'],-1.5)
G = ma.masked_less(gridObj.gridData['green'],-1.5)
B = ma.masked_less(gridObj.gridData['blue'],-1.5)
gridMask = R.mask
print "min RGBdict['red'] = %f" % (np.min(R))
print "min RGBdict['green'] = %f" % (np.min(G))
print "min RGBdict['blue'] = %f" % (np.min(B))
print "max RGBdict['red'] = %f" % (np.max(R))
print "max RGBdict['green'] = %f" % (np.max(G))
print "max RGBdict['blue'] = %f" % (np.max(B))
bandMin = max(np.min(R),np.min(G),np.min(B))
bandMax = max(np.max(R),np.max(G),np.max(B))
band_R_scaled = 255. * (R - np.min(R)) / np.max(R - np.min(R))
band_G_scaled = 255. * (G - np.min(G)) / np.max(G - np.min(G))
band_B_scaled = 255. * (B - np.min(B)) / np.max(B - np.min(B))
gridRows = gridObj.gridData['red'].shape[0]
gridCols = gridObj.gridData['red'].shape[1]
newimg = np.zeros((gridRows,gridCols,3),dtype=np.float32)
newimg[:,:,0] = band_R_scaled[:,:]
newimg[:,:,1] = band_G_scaled[:,:]
newimg[:,:,2] = band_B_scaled[:,:]
# Make an image object from a numpy array
im = Image.fromarray(newimg.astype(np.uint8))
# Limits for differential scaling
if rgbProd == 'TrueColor' or rgbProd == 'Vegetation' :
# True Colour
input = np.array([0, 30, 60, 120, 190, 255],dtype=np.float32)
output = np.array([0, 110, 160, 210, 240, 255],dtype=np.float32) # Original scaling
#output = np.array([0, 180, 200, 220, 240, 255],dtype=np.float32) # New scaling
elif rgbProd == 'SnowIce' :
# Snow/Ice
input = np.array([0, 25, 55, 100, 255],dtype=np.float32)
output = np.array([0, 90, 140, 175, 255],dtype=np.float32)
elif rgbProd == 'CloudPhase' :
newimg[:,:,1] = 255. - band_B_scaled
input = np.array([0, 30, 60, 120, 190, 255],dtype=np.float32)
output = np.array([0, 30, 60, 120, 190, 255],dtype=np.float32) # New scaling
else :
print "error: invalid band combination. Try 'TrueColor','SnowIce','Vegetation'"
return 1
inputRange = input[1 :]-input[:-1]
outputRange = output[1 :]-output[:-1]
inputMin,inputMax = input[:-1], input[1 :]
outputMin,outputMax = output[:-1], output[1 :]
slope = (outputMax-outputMin)/(inputMax-inputMin)
intercept = outputMax - (slope*inputMax)
scaledImg = np.zeros((gridRows,gridCols,3),dtype=np.float32)
print "Scaling red channel..."
for pair in np.arange(np.size(inputRange)):
inputIdx = np.where((newimg[:,:,0]>=inputMin[pair])*(newimg[:,:,0]<inputMax[pair]))
rows,cols = inputIdx[0],inputIdx[1]
print "min,max of unscaled red channel:",np.min(newimg[rows,cols,0]),np.max(newimg[rows,cols,0])
scaledImg[rows,cols,0] = slope[pair] * newimg[rows,cols,0] + intercept[pair]
print "min,max of scaled red channel:",np.min(newimg[rows,cols,0]),np.max(newimg[rows,cols,0])
print "\nScaling green channel..."
for pair in np.arange(np.size(inputRange)):
inputIdx = np.where((newimg[:,:,1]>=inputMin[pair])*(newimg[:,:,1]<inputMax[pair]))
rows,cols = inputIdx[0],inputIdx[1]
print "min,max of unscaled green channel:",np.min(newimg[rows,cols,1]),np.max(newimg[rows,cols,1])
scaledImg[rows,cols,1] = slope[pair] * newimg[rows,cols,1] + intercept[pair]
print "min,max of scaled green channel:",np.min(newimg[rows,cols,1]),np.max(newimg[rows,cols,1])
print "\nScaling blue channel..."
for pair in np.arange(np.size(inputRange)):
inputIdx = np.where((newimg[:,:,2]>=inputMin[pair])*(newimg[:,:,2]<inputMax[pair]))
rows,cols = inputIdx[0],inputIdx[1]
print "min,max of unscaled blue channel:",np.min(newimg[rows,cols,2]),np.max(newimg[rows,cols,2])
scaledImg[rows,cols,2] = slope[pair] * newimg[rows,cols,2] + intercept[pair]
print "min,max of scaled blue channel:",np.min(newimg[rows,cols,2]),np.max(newimg[rows,cols,2])
RGB_gridMask = np.zeros((gridRows,gridCols,3),dtype=np.bool)
RGB_gridMask[:,:,0] = gridMask[:,:]
RGB_gridMask[:,:,1] = gridMask[:,:]
RGB_gridMask[:,:,2] = gridMask[:,:]
trimObj = VIIRS.ViirsData.ViirsTrimTable()
gridDataType = 'float32'
#gridDataType = 'uint8'
scaledImg = scaledImg.astype(gridDataType)
onboard_pt_value = trimObj.sdrTypeFill['ONBOARD_PT_FILL'][gridDataType]
scaledImg_masked = np.zeros((gridRows,gridCols,3),dtype=gridDataType)
scaledImg_masked = ma.array(scaledImg,mask=RGB_gridMask,fill_value=onboard_pt_value)
scaledImg = scaledImg_masked.filled()
gridObj.gridData[rgbProd] = scaledImg
scaledImg_masked_flip = scaledImg_masked[::-1,:,:]
if imageMap :
im = Image.fromarray(scaledImg_masked_flip.astype('uint8'))
LOG.info("Initial imageMapName = %r" % (imageMapName))
if imageMapName == None :
imageMapName = 'Viirs_imagemap_truecolor.png'
else :
imageMapName = string.replace(imageMapName,'.png','_imagemap.png')
LOG.info("Saving imagemap image to %r" % (imageMapName))
im.save(imageMapName)
return gridObj
#def mollPlot_RGB(gridLat,gridLon,gridData,lon_0=0., \
def mollPlot_RGB(gridObj,rgbProd,lon_0=0., \
vmin=0.,vmax=1.,scale=None, \
titleStr=r'MODIS COT: Jan 2003 025', \
pngDpi=300, cmap=None, pointSize=0.15, mapRes='c', mapProj='cyl', \
pngName='MYD06_L2.AYYYYDDD.hhmm.ccc.flo.png'):
gridLat = gridObj.gridLat[:,:]
gridLon = gridObj.gridLon[:,:]
gridData = gridObj.gridData[rgbProd][:,:,:]
gridData = ma.masked_less(gridData,-800.)
figWidth,figHeight = 5.,3.2 # inches
#figWidth,figHeight = 5.,5.4 # inches
# Create figure with default size, and create canvas to draw on
fig = Figure(figsize=((figWidth,figHeight)))
canvas = FigureCanvas(fig)
# Create main axes instance, leaving room for colorbar at bottom,
# and also get the Bbox of the axes instance
ax_rect = [0.02, 0.22, 0.96 , 0.80 ] # [left,bottom,width,height]
ax = fig.add_axes(ax_rect)
# Create Basemap instance
# set 'ax' keyword so pylab won't be imported.
print "mapProj = ",mapProj
print "gridLat.shape = ",gridLat.shape
print "gridLon.shape = ",gridLon.shape
print "gridData.shape = ",gridData.shape
m = Basemap(projection=mapProj,lon_0=lon_0,ax=ax,fix_aspect=True,resolution=mapRes)
#ll_lat,ur_lat=np.min(gridLat),np.max(gridLat)
#ll_lon,ur_lon=np.min(gridLon),np.max(gridLon)
#print "ll_lat,ur_lat = ",ll_lat,ur_lat
#print "ll_lon,ur_lon = ",ll_lon,ur_lon
#m = Basemap(projection=mapProj,llcrnrlon=ll_lon, llcrnrlat=ll_lat, urcrnrlon=ur_lon, urcrnrlat=ur_lat,ax=ax,resolution=mapRes)
x,y=m(np.array(gridLon),np.array(gridLat))
# Some map style configuration stufff
# FIXME : Provide coastline width and lat/lon limits as options, so we can
# FIXME : do close-in work without the results looking shite.
m.drawmapboundary(linewidth=0.1,fill_color='black')
m.drawcoastlines(linewidth=0.05,color='gray')
m.fillcontinents(ax=ax,color='gray',lake_color='black',zorder=0)
#m.drawparallels(np.arange(-90.,120.,30.),color='white')
#m.drawmeridians(np.arange(0.,420.,60.),color='white')
# If our array is all fill, repopulate a couple of points with the plot limits and plot
# a tiiiiny point.
unmaskedCells = len(np.where(np.ravel(gridData.mask)==False)[0])
if (unmaskedCells == 0):
# Red
gridData.data[0,0,0],gridData.mask[0,0,0] = 0.,False
gridData.data[0,1,0],gridData.mask[0,1,0] = 255.,False
# Green
gridData.data[0,0,1],gridData.mask[0,0,1] = 0.,False
gridData.data[0,1,1],gridData.mask[0,1,1] = 255.,False
# Blue
gridData.data[0,0,2],gridData.mask[0,0,2] = 0.,False
gridData.data[0,1,2],gridData.mask[0,1,2] = 255.,False
pointSize = 0.001
# Plot the granule data
print "shape of gridData = ",gridData.shape
gridMask = gridData.mask[:,:,0]
print "shape of gridMask = ",gridMask.shape
# Mask the geolocation
x = ma.array(np.ravel(x),mask=gridMask)
y = ma.array(np.ravel(y),mask=gridMask)
# Scale channels to be 0..1
R = gridData[:,:,0].astype('float')/255.
G = gridData[:,:,1].astype('float')/255.
B = gridData[:,:,2].astype('float')/255.
# Ravel the channel arrays
R = np.ravel(R)
G = np.ravel(G)
B = np.ravel(B)
# Mask the RGB arrays
R = ma.array(R,mask=gridMask)
G = ma.array(G,mask=gridMask)
B = ma.array(B,mask=gridMask)
# Compress the geolocation and RGB arrays
x = x.compressed()
y = y.compressed()
R = R.compressed()
G = G.compressed()
B = B.compressed()
# Construct the data array
gridData = np.transpose(np.array([R,G,B]))
#cs = m.scatter(x,y,s=pointSize,c=gridData,axes=ax,faceted=False,vmin=vmin,vmax=vmax)
cs = m.scatter(x,y,s=pointSize,c=gridData,axes=ax,edgecolors='none',vmin=0.,vmax=1.)
# RGB information at the bottom of the plot.
timeString = 'Creation date: %s' %(datetime.strftime(datetime.utcnow(),"%Y-%m-%d %H:%M:%S Z"))
fig.text(0.98, 0.01, timeString,fontsize=5, color='gray',ha='right',va='bottom',alpha=0.9)
productString = titleStr
fig.text(0.5, 0.15, productString,fontsize=10, color='black',ha='center',va='bottom',alpha=1.0)
# FIXME : This fails when I am doing an average because the input file(s) do not have the
# FIXME : required prefixes.
try :
channelDict = {'red':string.split(path.basename(gridObj.dataFileList['red'][0]),'_')[0][2:],\
'green':string.split(path.basename(gridObj.dataFileList['green'][0]),'_')[0][2:],\
'blue':string.split(path.basename(gridObj.dataFileList['blue'][0]),'_')[0][2:]}
RGBstring = "R : %s (%s $\mu\mathrm{m}$); G : %s (%s $\mu\mathrm{m}$); B : %s (%s $\mu\mathrm{m}$)" % \
(channelDict['red'],\
str(VIIRS.ViirsData.ViirsBandCenter[channelDict['red']]),\
channelDict['green'],\
str(VIIRS.ViirsData.ViirsBandCenter[channelDict['green']]),\
channelDict['blue'],\
str(VIIRS.ViirsData.ViirsBandCenter[channelDict['blue']])
)
except :
channelDict = {'red': "Red",\
'green': "Green",\
'blue': "Blue"}
RGBstring = "R : %s (%s $\mu\mathrm{m}$); G : %s (%s $\mu\mathrm{m}$); B : %s (%s $\mu\mathrm{m}$)" % \
(channelDict['red'],\
"",\
channelDict['green'],\
"",\
channelDict['blue'],\
"")
fig.text(0.5, 0.07, RGBstring,fontsize=9, color='black',ha='center',va='bottom',alpha=1.0)
# Redraw the figure
canvas.draw()
# save image
canvas.print_figure(pngName,dpi=pngDpi)
def makeGridIdxPng(gridFile,idxType='file',pngDpi=200) :
try :
del(gridObj)
except :
pass
gridObj = SnapToGrid.fileToGridObj(gridFile)
pngFile = string.replace(gridFile,".h5","_"+idxType+"Idx.png")
print "pngFile: ",pngFile
if idxType=='file' :
idxData = gridObj.grid2GranFileIdx
vmin,vmax = 0,len(gridObj.geoFileList)
cbTitle = 'File Index'
elif idxType=='gran' :
idxData = gridObj.grid2GranIdx
vmin,vmax = 0,np.max(idxData)
cbTitle = 'Granule Index'
mollPlot_Cld(gridObj.gridLat,gridObj.gridLon,idxData,lon_0=0., \
titleStr=cbTitle , \
vmin=vmin,vmax=vmax,pngDpi=pngDpi, \
pngName=pngFile)
del(gridObj)
def makeMODpng(gridFile,prodName,vmin,vmax,scale,pngDpi,cbTitle, \
cmap=None, pointSize=0.15, mapRes='c',mapProj='cyl',pngName=None) :
try :
del(gridObj)
except :
pass
gridObj = SnapToGrid.fileToGridObj(gridFile)
if (pngName == None) :
pngName = string.replace(gridFile,".h5","."+prodName+".png")
else :
pass
print "pngName: ",pngName
if scale=='log' :
print "Log scaling the data"
gridData = np.log10(gridObj.gridData[prodName])
else :
gridData = gridObj.gridData[prodName]
mollPlot_Cld(gridObj.gridLat,gridObj.gridLon,gridData,lon_0=0., \
vmin=vmin,vmax=vmax,scale=scale, \
titleStr=cbTitle, \
pngDpi=pngDpi,cmap=cmap, pointSize=pointSize, mapRes=mapRes, \
mapProj=mapProj, pngName=pngName)
del(gridObj)
def makeGridAverage(gridFiles,avgGridFile,prodName=None) :
# Create a new gridObj
print "Creating new grid object..."
newGridObj = SnapToGrid()
# Loop throught the grid files
gridFiles.sort()
for gridFile in gridFiles :
try :
del(gridObj)
except :
pass
print "gridFile: ",gridFile
try :
gridObj = SnapToGrid.fileToGridObj(gridFile)
# Add this gridFile to the new grid object geo file list
newGridObj.geoFileList.append(gridFile)
# Set the dataset(s) names
if (prodName != None) :
dataSets = [prodName]
else :
dataSets = gridObj.gridData.keys()
# Loop through the datasets in this file
for dset in dataSets:
print "Dataset: ",dset
# Add the grid filename to the filelist for this dataset
try :
newGridObj.dataFileList[dset].append(gridFile)
except KeyError :
newGridObj.dataFileList[dset] = []
newGridObj.dataFileList[dset].append(gridFile)
# Is this the first array?
try :
vars()[dset+'_sum'].shape
except KeyError :
#vars()[dset+'_sum'] = np.zeros(gridObj.gridLat.shape,dtype=float)
#vars()[dset+'_count_total'] = np.zeros(gridObj.gridLat.shape,dtype=int)
vars()[dset+'_sum'] = np.zeros(gridObj.gridData[dset].shape,dtype=float)
vars()[dset+'_count_total'] = np.zeros(gridObj.gridData[dset].shape,dtype=int)
gridLat = gridObj.gridLat
gridLon = gridObj.gridLon
newGridObj.gridLat = gridLat
newGridObj.gridLon = gridLon
# Set the masked elements to zero
print "Setting masked elements to zero."
vars()[dset+'_gridData_masked'] = ma.masked_less(gridObj.gridData[dset],-800.)
vars()[dset+'_gridData_count']= np.array((vars()[dset+'_gridData_masked'].mask!=True),dtype=int)
vars()[dset+'_gridData_filled'] = ma.filled(vars()[dset+'_gridData_masked'],0.)
print "Shape of %s is %r" %(dset+'_sum',vars()[dset+'_sum'].shape)
print "Shape of %s is %r" %(dset+'_count_total',vars()[dset+'_count_total'].shape)
print "Shape of %s is %r" %(dset+'_gridData_masked',vars()[dset+'_gridData_masked'].shape)
print "Shape of %s is %r" %(dset+'_gridData_count',vars()[dset+'_gridData_count'].shape)
print "Shape of %s is %r" %(dset+'_gridData_filled',vars()[dset+'_gridData_filled'].shape)
# Accumulate to the sum and count arrays for this dataset
print "Accumulating the sum."
vars()[dset+'_sum'] = vars()[dset+'_sum'] + vars()[dset+'_gridData_filled']
vars()[dset+'_count_total'] = vars()[dset+'_count_total'] + vars()[dset+'_gridData_count']
try :
del(vars()[dset+'_gridData_masked'])
del(vars()[dset+'_gridData_count'])
del(vars()[dset+'_gridData_filled'])
except :
pass
# Delete the grid object for this file
try :
del(gridObj)
except :
pass
except Exception, err:
print "Problem with file %s\n%s" % (gridFile,err)
# Loop through the datasets, calculating the averages and copying to gridObj
try :
for dset in dataSets:
print "Assigning %s to new grid object" % (dset)
vars()[dset+'_avg'] = vars()[dset+'_sum'] / vars()[dset+'_count_total']
# Fill any NaN or Inf values with the correct fill value...
nanMask = np.isnan(vars()[dset+'_avg'])
nanInf = np.isinf(vars()[dset+'_avg'])
totalMask = np.bitwise_or(nanMask,nanInf)
vars()[dset+'_avg'] = ma.masked_array(vars()[dset+'_avg'],mask=totalMask)
vars()[dset+'_avg'] = ma.filled(vars()[dset+'_avg'],-999.7)
newGridObj.gridData[dset] = vars()[dset+'_avg']
except Exception, err:
print "Problem with dataset %s\n%s" % (dset,err)
# Copy the new gridObj to a file
#print "Writing new grid object to file %s" % (avgGridFile)
#newGridObj.gridObjToFile(gridFile=avgGridFile)
return newGridObj
###################################################
# Main Function #
###################################################
def main():
prodChoices=['TrueColor','SnowIce','CloudPhase','Vegetation']
mapRes = ['c','l','i']
mapProjections = {
'moll' : 'Mollweide',
'robin' : 'Robinson',
'sinu' : 'Sinusoidal',
'cyl' : 'Equidistant Cylindrical',
#'merc' : 'Mercator',
#'mill' : 'Miller',
'gall' : 'Gall Stereographic',
'mbtfpq' : 'McBryde-Thomas'
}
description = \
'''
This is a brief description of %prog
'''
usage = "usage: %prog [mandatory args] [options]"
version = version="%prog NCT3"
parser = optparse.OptionParser(description=description,usage=usage,version=version)
# Mandatory arguments
mandatoryGroup = optparse.OptionGroup(parser, "Mandatory Arguments",
"At a minimum these arguments must be specified")
mandatoryGroup.add_option('-g','--geo_glob',
action="store",
dest="geoGlob" ,
type="string",
help="The glob wildcard specifying the MODIS geolocation files")
mandatoryGroup.add_option('--red_glob',
action="store",
dest="redGlob",
type="string",
help="The full path of the VIIRS SDR file corresponding to the red channel.")
mandatoryGroup.add_option('--green_glob',
action="store",
dest="greenGlob",
type="string",
help="The full path of the VIIRS SDR file corresponding to the green channel.")
mandatoryGroup.add_option('--blue_glob',
action="store",
dest="blueGlob",
type="string",
help="The full path of the VIIRS SDR file corresponding to the blue channel.")
mandatoryGroup.add_option('-p','--product',
action="store",
dest="rgbProd",
type="choice",
choices=prodChoices,
help='''The VIIRS RGB product.\n\n
Possible values are...
%s
''' % (prodChoices.__str__()[1:-1]))
parser.add_option_group(mandatoryGroup)
# Optional arguments
optionalGroup = optparse.OptionGroup(parser, "Extra Options",
"These options may be used to customize plot characteristics.")
optionalGroup.add_option('-r','--svn_revision',
action="store",
dest="svnRevision",
default=string.split(__version__," ")[2],
type="string",
help="The Subversion revision number/tag of this script")
optionalGroup.add_option('--radiance',
action="store_true",
dest="isRadiance",
help="Show radiance for the VIIRS SDR.")
optionalGroup.add_option('-R','--svn_repo_path',
action="store",
dest="svnRepoPath",
default="https://svn.ssec.wisc.edu/repos/geoffc/Python/VIIRS/"+path.basename(sys.argv[0]),
type="string",
help="The full Subversion repository path of this script [default is %default].")
optionalGroup.add_option('-d','--dpi',
action="store",
dest="dpi",
default='200.',
type="float",
help="The resolution in dots per inch of the output png file. [default: %default]")
optionalGroup.add_option('--dtheta',
action="store",
dest="degInc",
default='1.',
type="float",
help="The resolution in degrees of the lat/lon grids. [default: %default]")
optionalGroup.add_option('-S','--stride',
action="store",
dest="stride",
default='1',
type="int",
help="Sample every STRIDE pixels in the VIIRS product. [default: %default]")
optionalGroup.add_option('-P','--pointSize',
action="store",
dest="pointSize",
default='0.15',
type="float",
help="Size of the plot point used to represent each pixel. [default: %default]")
optionalGroup.add_option('-m','--map_res',
action="store",
dest="mapRes",
default='c',
type="choice",
choices=mapRes,
help="The map coastline resolution. Possible values are 'c' (coarse),'l' (low) and 'i' (intermediate). [default: %default]")
optionalGroup.add_option('--map_proj',
action="store",
dest="mapProj",
default='cyl',
type="choice",
choices=mapProjections.keys(),
help='''The map projection. Possible values are...\n\n
%s. [default: '%s']
''' % ( ', '.join(["'%s' (%s)"%(a,b) for a,b in zip(mapProjections.keys(),mapProjections.values())]),'merc' ))
optionalGroup.add_option('-a','--map_annotation',
action="store",
dest="mapAnn",
#default='',
type="string",
help="The map legend describing the dataset being shown. [default: IPPROD]")
optionalGroup.add_option('-o','--output_file',
action="store",
dest="outputFile",
default="outGrid.h5",
type="string",
help="The full path of the output HDF5 file. [default: %default]")
optionalGroup.add_option('--png-file',
action="store_true",
dest="makePngFile",
help="Generate a png plot of the grid file.")
optionalGroup.add_option('--average',
action="store_true",
dest="makeAverage",
help="Average a series of previously generated grid files.")
optionalGroup.add_option('--latMax',
action="store",
default='90.',
type="float",
dest="latMax",
help="Maximum value of latitude.")
optionalGroup.add_option('--latMin',
action="store",
default='-90.',
type="float",
dest="latMin",
help="Minimum value of latitude.")
optionalGroup.add_option('--lonMax',
action="store",
default='179.5',
type="float",
dest="lonMax",
help="Maximum value of longitude.")
optionalGroup.add_option('--lonMin',
action="store",
default='-180.',
type="float",
dest="lonMin",
help="Minimum value of longitude.")
optionalGroup.add_option('--image_map',
action="store_true",
dest="createImageMap",
help="Produce an unprojected image file of the RGB scene.")
optionalGroup.add_option('-v', '--verbose',
dest='verbosity',
action="count",
default=0,
help='each occurrence increases verbosity 1 level from ERROR: -v=WARNING -vv=INFO -vvv=DEBUG')
parser.add_option_group(optionalGroup)
# Parse the arguments from the command line
(options, args) = parser.parse_args()
# Copy the Product name to the map annotation
if options.mapAnn == None :
options.mapAnn = options.rgbProd
else :
pass
options.mapAnn = string.replace(options.mapAnn,'\\n','\n')
# Set up the logging levels
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
output_logFile = "plot_viirs_RGB_crefl.py.log"
try :
logFormat = '(%(levelname)s): %(filename)s ; %(module)s::%(funcName)s ; line %(lineno)d -> %(message)s'
#logging.basicConfig(format=logFormat, filename=output_logFile, filemode='w', level = levels[options.verbosity])
logging.basicConfig(format=logFormat, level = levels[options.verbosity])
except IndexError :
print "ERROR : Invalid verbosity flag. Only -v, -vv, -vvv allowed, exiting..."
sys.exit(1)
verbosity = '-'
for v in range(options.verbosity) :
verbosity += 'v'
# Check that all of the mandatory options are given. If one or more
# are missing, print error message and exit...
mandatories = ['geoGlob','redGlob','greenGlob','blueGlob','rgbProd']
mand_errors = ["Missing mandatory argument [-g GEOGLOB | --geo_glob=GEOGLOB]",
"Missing mandatory argument [ --red_glob=REDGLOB]",
"Missing mandatory argument [ --green_glob=GREENGLOB]",
"Missing mandatory argument [ --blue_glob=BLUEGLOB]",
"Missing mandatory argument [-p RGBPROD | --product=RGBPROD]"
]
isMissingMand = False
for m,m_err in zip(mandatories,mand_errors):
if not options.__dict__[m]:
isMissingMand = True
print m_err
if isMissingMand :
parser.error("Incomplete mandatory arguments, aborting...")
#prodFileName='''%s\n%s REV:%s''' % (path.basename(options.ipFile),options.svnRepoPath,str(options.svnRevision))
# Get list of the matching geolocation and product files
geoList,redList,greenList,blueList = granuleFiles(options.geoGlob,options.redGlob,options.greenGlob,options.blueGlob)
#print "The geo and prod lists are..."
#for geoFile,redFile,greenFile,blueFile in zip(geoList,redList,greenList,blueList):
#print path.basename(geoFile), \
#path.basename(redFile),path.basename(greenFile),path.basename(blueFile)
# Check that we have some files to work with
if (len(geoList)==0 or len(redList)==0 or len(greenList)==0 or len(blueList)==0) :
print "There are no files with the globs \n%s or \n%s or \n%s or \n%s, exiting..." % (options.geoGlob,\
options.redGlob,options.greenGlob,options.blueGlob)
sys.exit(1)
# Get the current path to output and files
if options.outputFile is not None :
outDir = path.dirname(options.outputFile)
gridFile = options.outputFile
pngFile = string.replace(gridFile,'.h5','.png')
print "outDir = ",outDir
print "gridFile = ",gridFile
print "pngFile = ",pngFile
else :
outDir = path.abspath(path.curdir)
gridFile = options.outputFile
pngFile = string.replace(gridFile,'.h5','.png')
print "outDir = ",outDir
print "gridFile = ",gridFile
print "pngFile = ",pngFile
# Either make an average of previously generated grid files, or generate a new grid file
if options.makeAverage :
print "We are making an average..."
# Average a set of previously generated grid files
gridObj = makeGridAverage(redList,gridFile,prodName=options.rgbProd)
else :
print "We are not making an average..."
# Initialise the grid arrays and grid object
degInc = options.degInc
#grids = np.mgrid[-90.:90.+degInc:degInc,-180:180.:degInc]
print "latMin,latMax = ",options.latMin,options.latMax
print "lonMin,lonMax = ",options.lonMin,options.lonMax
grids = np.mgrid[options.latMin:options.latMax+degInc:degInc,options.lonMin:options.lonMax:degInc]
gridLat,gridLon = grids[0],grids[1]
gridObj = SnapToGrid()
gridObj.gridLat = gridLat
gridObj.gridLon = gridLon
# Append the geolocation and product file names to grid object
# FIXME : Is this duplicated in the grid method ?
gridObj.dataFileList['red'] = []
gridObj.dataFileList['green'] = []
gridObj.dataFileList['blue'] = []
for geoFile,redFile,greenFile,blueFile in zip(geoList,redList,greenList,blueList) :
gridObj.geoFileList.append(geoFile)
gridObj.dataFileList['red'].append(redFile)
gridObj.dataFileList['green'].append(greenFile)
gridObj.dataFileList['blue'].append(blueFile)
# Call the required gridding routine
gridObj = grid_RGB(geoList,redList,greenList,blueList,'sdr',prodName=options.rgbProd,oldGridObj=gridObj)
# Make the TrueColour array and copy to the grid object
gridObj = viirs_rgb(gridObj,options.rgbProd,options.createImageMap,imageMapName=pngFile)
# Copy the gridObj object to a HDF5 file.
gridObj.gridObjToFile(gridFile=gridFile)
# Make a plot if desired
if options.makePngFile :
print "mapAnn= ",options.mapAnn
print "mapProj= ",options.mapProj
dpi = options.dpi
mapAnn = options.mapAnn
pointSize = options.pointSize
mapRes = options.mapRes
mapProj = options.mapProj
#mollPlot_RGB(gridObj.gridLat,gridObj.gridLon,gridObj.gridData[options.rgbProd], \
mollPlot_RGB(gridObj,options.rgbProd,titleStr=mapAnn,\
pngDpi=dpi, pointSize=pointSize, mapRes=mapRes, \
mapProj=mapProj, pngName=pngFile)
print "Exiting..."
sys.exit(0)
if __name__ == '__main__':
main()