diff --git a/pyglance/glance/compare.py b/pyglance/glance/compare.py index daad73e6fb79f6ff4300db8d00d4baf740505a4b..95bc2e480c63c5c0e8d4ab8d7a6d62b2a05261fc 100644 --- a/pyglance/glance/compare.py +++ b/pyglance/glance/compare.py @@ -11,6 +11,7 @@ Copyright (c) 2009 University of Wisconsin SSEC. All rights reserved. import os, sys, logging, re, subprocess, datetime import imp as imp +from pprint import pprint, pformat from numpy import * import pkg_resources from pycdf import CDFError @@ -19,12 +20,12 @@ from urllib import quote import glance.io as io import glance.delta as delta +import glance.data as dataobj import glance.plot as plot import glance.report as report import glance.stats as statistics -import glance.data as dataobj -import glance.collocation as collocation import glance.plotcreatefns as plotcreate +import glance.collocation as collocation LOG = logging.getLogger(__name__) @@ -939,11 +940,11 @@ def colocateToFile_library_call(a_path, b_path, var_list=[ ], # handle the longitude and latitude colocation LOG.info("Colocating raw longitude and latitude information") aColocationInfomation, bColocationInformation, totalNumberOfMatchedPoints = \ - collocation.create_colocation_mapping_within_epsilon((lon_lat_data['a']['lon'], lon_lat_data['a']['lat']), - (lon_lat_data['b']['lon'], lon_lat_data['b']['lat']), - runInfo['lon_lat_epsilon'], - invalidAMask=lon_lat_data['a']['inv_mask'], - invalidBMask=lon_lat_data['b']['inv_mask']) + collocation.create_colocation_mapping_within_epsilon((lon_lat_data['a']['lon'], lon_lat_data['a']['lat']), + (lon_lat_data['b']['lon'], lon_lat_data['b']['lat']), + runInfo['lon_lat_epsilon'], + invalidAMask=lon_lat_data['a']['inv_mask'], + invalidBMask=lon_lat_data['b']['inv_mask']) (colocatedLongitude, colocatedLatitude, (numMultipleMatchesInA, numMultipleMatchesInB)), \ (unmatchedALongitude, unmatchedALatitude), \ (unmatchedBLongitude, unmatchedBLatitude) = \ @@ -1196,9 +1197,7 @@ def reportGen_library_call (a_path, b_path, var_list=[ ], varRunInfo['time'] = datetime.datetime.ctime(datetime.datetime.now()) # todo is this needed? didPass, epsilon_failed_fraction, \ non_finite_fail_fraction, \ - r_squared_value = _check_pass_or_fail(varRunInfo, - variable_stats, - defaultValues) + r_squared_value = _check_pass_or_fail(varRunInfo, variable_stats, defaultValues) varRunInfo['did_pass'] = didPass # based on the settings and whether the variable passsed or failed, @@ -1387,12 +1386,10 @@ def stats_library_call(afn, bfn, var_list=[ ], else: amiss,bmiss = missing,missing LOG.debug('comparing %s with epsilon %s and missing %s,%s' % (name,epsilon,amiss,bmiss)) - aval = aData - bval = bData print >> output_channel, '-'*32 print >> output_channel, name print >> output_channel, '' - lal = list(statistics.summarize(aval,bval,epsilon,(amiss,bmiss)).items()) + lal = list(statistics.summarize(aData, bData, epsilon, (amiss,bmiss)).items()) lal.sort() for dictionary_title, dict_data in lal: print >> output_channel, '%s' % dictionary_title @@ -1748,6 +1745,7 @@ python -m glance if (not args) or (args[0] not in commands): parser.print_help() + # TODO more descriptions? help() return 9 else: diff --git a/pyglance/glance/data.py b/pyglance/glance/data.py index a6bd5fbeeb40a723ebaebf415ec3a51b8c44e2ac..b138e74006775b9428143fed4adda3e651146065 100644 --- a/pyglance/glance/data.py +++ b/pyglance/glance/data.py @@ -148,6 +148,15 @@ class DiffInfoObject (object) : (if both a value and percent are present, two epsilon tests will be done) """ + # Upcasts to be used in difference computation to avoid overflow. Currently only unsigned + # ints are upcast. + # FUTURE: handle uint64s as well (there is no int128, so might have to detect overflow) + DATATYPE_UPCASTS = { + np.uint8: np.int16, + np.uint16: np.int32, + np.uint32: np.int64 + } + def __init__(self, aDataObject, bDataObject, epsilonValue=0.0, epsilonPercent=None) : """ @@ -165,15 +174,6 @@ class DiffInfoObject (object) : self.diff_data_object = DiffInfoObject.analyze(aDataObject, bDataObject, epsilonValue, epsilonPercent) - # Upcasts to be used in difference computation to avoid overflow. Currently only unsigned - # ints are upcast. - # FUTURE: handle uint64s as well (there is no int128, so might have to detect overflow) - DATATYPE_UPCASTS = { - np.uint8: np.int16, - np.uint16: np.int32, - np.uint32: np.int64 - } - @staticmethod def _get_shared_type_and_fill_value(data1, data2, fill1=None, fill2=None) : """ diff --git a/pyglance/glance/figures.py b/pyglance/glance/figures.py index 83a554d938d9c20dd63ec17eac155d81d1a43acb..537d9f2418bdb65713993e42940621606eda7858 100644 --- a/pyglance/glance/figures.py +++ b/pyglance/glance/figures.py @@ -46,7 +46,7 @@ greenColorMap = colors.LinearSegmentedColormap('greenColorMap', greenColorMapDat # todo, the use of the offset here is covering a problem with # contourf hiding data exactly at the end of the range and should # be removed if a better solution can be found -def _make_range(data_a, invalid_a_mask, num_intervals, offset_to_range=0.0, data_b=None, invalid_b_mask=None) : +def _make_range(data_a, valid_a_mask, num_intervals, offset_to_range=0.0, data_b=None, valid_b_mask=None) : """ get an array with numbers representing the bounds of a set of ranges that covers all the data present in data_a @@ -55,13 +55,13 @@ def _make_range(data_a, invalid_a_mask, num_intervals, offset_to_range=0.0, data if the b data is passed, a total range that encompasses both sets of data will be used """ - minVal = delta.min_with_mask(data_a, ~invalid_a_mask) - maxVal = delta.max_with_mask(data_a, ~invalid_a_mask) + minVal = delta.min_with_mask(data_a, valid_a_mask) + maxVal = delta.max_with_mask(data_a, valid_a_mask) # if we have a second set of data, include it in the min/max calculations if (data_b is not None) : - minVal = min(delta.min_with_mask(data_b, ~invalid_b_mask), minVal) - maxVal = max(delta.max_with_mask(data_b, ~invalid_b_mask), maxVal) + minVal = min(delta.min_with_mask(data_b, valid_b_mask), minVal) + maxVal = max(delta.max_with_mask(data_b, valid_b_mask), maxVal) minVal = minVal - offset_to_range maxVal = maxVal + offset_to_range @@ -318,7 +318,7 @@ def create_mapped_figure(data, latitude, longitude, baseMapInstance, boundingAxe # this is controllable with the "dataRanges" parameter for discrete data display if not (data is None) : if dataRanges is None : - dataRanges = _make_range(data, invalidMask, 50, offset_to_range=offsetToRange) + dataRanges = _make_range(data, ~invalidMask, 50, offset_to_range=offsetToRange) else: # make sure the user range will not discard data TODO, find a better way to handle this dataRanges[0] = dataRanges[0] - offsetToRange dataRanges[len(dataRanges) - 1] = dataRanges[len(dataRanges) - 1] + offsetToRange @@ -340,7 +340,7 @@ def create_mapped_figure(data, latitude, longitude, baseMapInstance, boundingAxe # show a generic color bar doLabelRanges = False if not (data is None) : - cbar = colorbar(format='%.3f') + cbar = colorbar(format='%.3g') # if there are specific requested labels, add them if not (dataRangeNames is None) : @@ -433,7 +433,7 @@ def create_simple_figure(data, figureTitle, invalidMask=None, tagData=None, colo # draw our data im = imshow(cleanData, **kwargs) # make a color bar - cbar = colorbar(format='%.3f') + cbar = colorbar(format='%.3g') # and some informational stuff axes.set_title(figureTitle) diff --git a/pyglance/glance/plotcreatefns.py b/pyglance/glance/plotcreatefns.py index a98bb2c994327a7b23b02e2a1a149766b40244a0..38ae2342008c600d7c918cc96e76b7b87c40a573 100644 --- a/pyglance/glance/plotcreatefns.py +++ b/pyglance/glance/plotcreatefns.py @@ -84,6 +84,11 @@ def select_projection(boundingAxes) : # a wider range of projections. projToUse = 'cyl' + # TODO, the cylindrical projections now have some sort of bizarre behavior where they + # show crazy things in the empty space in soundings data. instead we are moving back to + # conics for the moment and additional testing has been added to widen the viewing window + #projToUse = 'lcc' + # how big is the field of view? longitudeRange = abs(boundingAxes[1] - boundingAxes[0]) latitudeRange = abs(boundingAxes[3] - boundingAxes[2]) @@ -128,6 +133,51 @@ def _make_axis_and_basemap(lonLatDataDict, goodInAMask, goodInBMask, shouldUseSh baseMapInstance, fullAxis = maps.create_basemap(lonLatDataDict['common']['lon'], lonLatDataDict['common']['lat'], fullAxis, select_projection(fullAxis)) + """ TODO, this doesn't work, but we will need something eventually + if (projection is 'lcc') : + # TODO this is a hack to make sure all my data is visible in a lcc projection + # otherwise the conic projection may cause part of the data to curve + # out of the field of view + # at some point in the future this should be integrated in a more elegant way + + # preprocess a copy of our lon/lat data + lonACopy = lonLatDataDict['a']['lon'].copy() + lonACopy[~goodInAMask] = maps.badLonLat + latACopy = lonLatDataDict['a']['lat'].copy() + latACopy[~goodInAMask] = maps.badLonLat + lonBCopy = lonLatDataDict['b']['lon'].copy() + lonBCopy[~goodInBMask] = maps.badLonLat + latBCopy = lonLatDataDict['b']['lat'].copy() + latBCopy[~goodInBMask] = maps.badLonLat + + # find out where the longitude and latitude data would be in x and y + xTempA, yTempA = baseMapInstance(lonACopy, latACopy) + xTempB, yTempB = baseMapInstance(lonBCopy, latBCopy) + maxX = max(max(xTempA[goodInAMask]), max(xTempB[goodInBMask])) + minX = min(min(xTempA[goodInAMask]), min(xTempB[goodInBMask])) + maxY = max(max(yTempA[goodInAMask]), max(yTempB[goodInBMask])) + minY = min(min(yTempA[goodInAMask]), min(yTempB[goodInBMask])) + + # the corners of a bounding box (starting at the upper right going clockwise) + cornerX = [maxX, maxX, minX, minX] + cornerY = [maxY, minY, minY, maxY] + + # now where is this in the lon / lat space? + newLon, newLat = baseMapInstance(cornerX, cornerY, inverse=True) + newLon = np.array(newLon) + newLat = np.array(newLat) + # use this to make a new axis that will include all the data + borderAxis = get_visible_axes(newLon, newLat, ones(newLon.shape, dtype=bool)) + fullAxis = borderAxis + #fullAxis = [min(borderAxis[0], fullAxis[0]), max(borderAxis[1], fullAxis[1]), + # min(borderAxis[2], fullAxis[2]), max(borderAxis[3], fullAxis[3])] + + # make our new and improved basemap + baseMapInstance, fullAxis = maps.create_basemap(lonLatDataDict['common']['lon'], + lonLatDataDict['common']['lat'], + fullAxis, projection) + """ + # figure out the shared range for A and B's data, by default don't share a range sharedRange = None if (shouldUseSharedRangeForOriginal) : @@ -316,6 +366,7 @@ class MappedContourPlotFunctionFactory (PlottingFunctionFactory) : assert(goodInAMask is not None) assert(goodInBMask is not None) + # TODO, do I also need to encorporate the lon/lat invalid masks with the good masks? fullAxis, baseMapInstance, sharedRange = _make_axis_and_basemap(lonLatDataDict, goodInAMask, goodInBMask, shouldUseSharedRangeForOriginal, @@ -490,7 +541,9 @@ class MappedQuiverPlotFunctionFactory (PlottingFunctionFactory) : assert(goodInAMask is not None) assert(goodInBMask is not None) - fullAxis, baseMapInstance, _ = _make_axis_and_basemap(lonLatDataDict, goodInAMask, goodInBMask, variableDisplayName=variableDisplayName) + # TODO, do I also need to encorporate the lon/lat invalid masks with the good masks? + fullAxis, baseMapInstance, _ = _make_axis_and_basemap(lonLatDataDict, goodInAMask, goodInBMask, + variableDisplayName=variableDisplayName) # make the plotting functions @@ -776,9 +829,11 @@ class BinTupleAnalysisFunctionFactory (PlottingFunctionFactory) : assert(tupleIndex < len(aData.shape)) # reorder and reshape our data into the [bin][case][tuple] form - reorderMapObject = delta.BinTupleMapping(aData.shape, binIndexNumber=binIndex, tupleIndexNumber=tupleIndex) - aData = reorderMapObject.reorder_for_bin_tuple(aData) - bData = reorderMapObject.reorder_for_bin_tuple(bData) + reorderMapObject = delta.BinTupleMapping(aData.shape, + binIndexNumber=binIndex, + tupleIndexNumber=tupleIndex) + aData = reorderMapObject.reorder_for_bin_tuple(aData) + bData = reorderMapObject.reorder_for_bin_tuple(bData) goodInAMask = reorderMapObject.reorder_for_bin_tuple(goodInAMask) goodInBMask = reorderMapObject.reorder_for_bin_tuple(goodInBMask) absDiffData = reorderMapObject.reorder_for_bin_tuple(absDiffData) @@ -804,7 +859,7 @@ class BinTupleAnalysisFunctionFactory (PlottingFunctionFactory) : goodInBothMask[binNumber][caseNumber]) # make the basic histogram for this binNumber - dataForHistogram = rmsDiffValues[isfinite(rmsDiffValues)] # remove any invalid data "nan" values + dataForHistogram = rmsDiffValues[np.isfinite(rmsDiffValues)] # remove any invalid data "nan" values if ('do_plot_histogram' not in doPlotSettingsDict) or (doPlotSettingsDict['do_plot_histogram']) : def make_histogram(binNumber=binNumber, dataForHistogram=dataForHistogram): return figures.create_histogram(dataForHistogram, numHistogramSections,