diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..da13992faa7ff5bc07fec2cf22bb5f33e493f479 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +*.swp + + +__pycache__ diff --git a/main.py b/main.py new file mode 100644 index 0000000000000000000000000000000000000000..2836c5971f522439a795a51028b078582f7aeac6 --- /dev/null +++ b/main.py @@ -0,0 +1,40 @@ +import tests +import ruamel_yaml as yml +import numpy as np + + +def main(): + + rad1 = [[255, 260, 265, 248, 223], + [278, 285, 270, 268, 256], + [275, 273, 266, 254, 259]] + rad2 = [[270, 273, 271, 268, 265], + [277, 286, 275, 277, 269], + [280, 281, 272, 270, 267]] + thresh_file = '/home/pveglio/mvcm_leo/thresholds/new_thresholds.mvcm.snpp.v1.0.0.yaml' + + + with open(thresh_file) as f: + text = f.read() + + rad1 = np.array(rad1) + rad2 = np.array(rad2) + + confidence = np.zeros((2, rad1.shape[0], rad1.shape[1])) + + thresholds = yml.safe_load(text) + thr_11um = thresholds['Daytime_Ocean'] + thr_11_4diff = thresholds['Daytime_Ocean'] + + confidence[0, :, :] = tests.test_11um(rad1, thr_11um['bt11']) + confidence[1, :, :] = tests.test_11_4diff(rad1, rad2, thr_11_4diff['test11_4lo']) + + + print(f'Confidence[0,:,:]: \n {confidence[0, :, :]}') + print(f'Confidence[1,:,:]: \n {confidence[1, :, :]}') + + return confidence + + +if __name__ == "__main__": + main() diff --git a/read_data.py b/read_data.py new file mode 100644 index 0000000000000000000000000000000000000000..90037500d114439738d51760654cb6f9b1a62bad --- /dev/null +++ b/read_data.py @@ -0,0 +1,33 @@ +from netCDF4 import Dataset + +import xarray as xr +import numpy as np + + +def read_data(sensor: str, l1b_filename: str, geo_filename: str): + + data = xr.open_dataset(l1b_filename, group='observation_data', decode_cf=False) + in_data = xr.Dataset() + if sensor.lower() == 'viirs': + for band in list(data.variables): + if 'reflectance' in data[band].long_name: + if hasattr(data[band], 'VCST_scale_factor'): + scale_factor = data[band].VCST_scale_factor * data[band].bias_correction + else: + scale_factor = data[band].radiance_scale_factor + in_data[band] = (('number_of_lines', 'number_of_pixels'), + data[band].values * scale_factor) + + elif 'radiance' in data[band].long_name: + in_data[band] = (('number_of_lines', 'number_of_pixels'), + data[f'{band}_brightness_temperature_lut'].values[data[band].values]) + else: + pass + + data = xr.open_dataset(geo_filename, group='geolocation_data') + + in_data = in_data.merge(data) + + return in_data + + diff --git a/tests.py b/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..a5070ee1d7adbd3a8813235be3c6e606a1beea82 --- /dev/null +++ b/tests.py @@ -0,0 +1,238 @@ +import numpy as np + + +_test_rad = np.random.randint(25, size=[6, 8]) +#_test_thr = [15, 10, 5, 1, 1] +_test_thr = [5, 10, 15, 1, 1] + + +def test_11um(rad, threshold): + + #if (~np.isnan(rad) or threshold[4] == 1.0): + confidence = conf_test(rad, threshold) + + return confidence + + +def test_11_4diff(rad1, rad2, threshold): + + raddiff = rad1 - rad2; + confidence = conf_test(raddiff, threshold) + + return confidence + + +def simple_threshold_test(rad, threshold): + return conf_test(rad, threshold) + + +class CloudMaskTests(object): + + def __init__(self, scene, radiance, coefficients): + self.scene = scene + self.coefficients = coefficients + + def select_coefficients(self): + pass + + def test_G1(self): + pass + + def test_G2(self): + pass + + def test_G3(self): + pass + + def test_G4(self): + pass + + def overall_confidence(self): + pass + + +def conf_test(rad=_test_rad, thr=_test_thr): + ''' + Assuming a linear function between min and max confidence level, the plot below shows + how the confidence (y axis) is computed as function of radiance (x axis). + This case illustrates alpha < gamma, obviously in case alpha > gamma, the plot would be + flipped. + gamma + c 1 ________ + o | / + n | / + f | / + i | beta / + d 1/2 |....../ + e | / + n | / + c | / + e 0________/ + | alpha + --------- radiance ----------> + ''' + + coeff = np.power(2, (thr[3] - 1)) + hicut = thr[0] + beta = thr[1] + locut = thr[2] + power = thr[3] + radshape = rad.shape + rad = rad.reshape((rad.shape[0]*rad.shape[1])) + c = np.zeros(rad.shape) + + if hicut > locut: + gamma = thr[0] + alpha = thr[2] + flipped = False + else: + gamma = thr[2] + alpha = thr[0] + flipped = True + + # Rad between alpha and beta + range_ = 2. * (beta - alpha) + s1 = (rad[rad <= beta] - alpha) / range_ + if flipped is False: + c[rad <= beta] = coeff * np.power(s1, power) + if flipped is True: + c[rad <= beta] = 1. - (coeff * np.power(s1, power)) + + # Rad between beta and gamma + range_ = 2. * (beta - gamma) + s1 = (rad[rad > beta] - gamma) / range_ + if flipped is False: + c[rad > beta] = 1. - (coeff * np.power(s1, power)) + if flipped is True: + c[rad > beta] = coeff * np.power(s1, power) + + # Rad outside alpha-gamma interval + if flipped is False: + c[rad > gamma] = 1 + c[rad < alpha] = 0 + if flipped is True: + c[rad > gamma] = 0 + c[rad < alpha] = 1 + + c[c > 1] = 1 + c[c < 0] = 0 + + confidence = c.reshape(radshape) + + return confidence + + +def conf_test_dble(rad, coeffs): + ''' + + gamma1 gamma2 + c 1_______ ________ + o | \ / + n | \ / + f | \ / + i | \ beta1 beta2 / + d 1/2 \....| |...../ + e | \ / + n | \ / + c | \ / + e 0 \_____________/ + | alpha1 alpha2 + --------------------- radiance -------------------------> + ''' + + hicut = [coeffs[0], coeffs[1]] + locut = [coeffs[2], coeffs[3]] + midpt = [coeffs[4], coeffs[5]] + power = coeffs[6] + + gamma1 = hicut[0] + gamma2 = hicut[1] + alpha1 = locut[0] + alpha2 = locut[1] + beta1 = midpt[0] + beta2 = midpt[1] + + coeff = np.power(2, (power - 1)) + radshape = rad.shape + rad = rad.reshape((rad.shape[0]*rad.shape[1])) + c = np.zeros(rad.shape) + + # Find if interval between inner cutoffs passes or fails test + if (alpha1 - gamma1 > 0): + + # Value is within range of lower set of limits + range_ = 2 * (beta1 - alpha1) + s1 = (rad[(rad <= alpha1) & (rad >= beta1)] - alpha1) / range_ + c[(rad <= alpha1) & (rad >= beta1)] = coeff * np.power(s1, power) + + range_ = 2 * (beta1 - gamma1) + s1 = (rad[(rad <= alpha1) & (rad < beta1)] - gamma1) / range_ + c[(rad <= alpha1) & (rad < beta1)] = coeff * np.power(s1, power) + + # Value is within range of upper set of limits + range_ = 2 * (beta2 - alpha2) + s1 = (rad[(rad > alpha1) & (rad <= beta2)] - alpha2) / range_ + c[(rad > alpha1) & (rad <= beta2)] = coeff * np.power(s1, power) + + range_ = 2 * (beta2 - gamma2) + s1 = (rad[(rad > alpha1) & (rad > beta2)] - gamma2) / range_ + c[(rad > alpha1) & (rad > beta2)] = coeff * np.power(s1, power) + + # Inner region fails test + # Check for value beyond function range + c[(rad > alpha1) & (rad < alpha2)] = 0 + c[(rad < gamma1) | (rad > gamma2)] = 1 + + else: + + # Value is withing range of lower set of limits + range_ = 2 * (beta1 - alpha1) + s1 = (rad[(rad <= gamma1) & (rad <= beta1)] - alpha1) / range_ + c[(rad <= gamma1) & (rad <= beta1)] = coeff * np.power(s1, power) + + range_ = 2 * (beta1 - gamma1) + s1 = (rad[(rad <= gamma1) & (rad > beta1)] - gamma1) / range_ + c[(rad <= gamma1) & (rad > beta1)] = coeff * np.power(s1, power) + + # Value is within range of upper set of limits + range_ = 2 * (beta2 - alpha2) + s1 = (rad[(rad > gamma1) & (rad >= beta2)] - alpha2) / range_ + c[(rad > gamma1) & (rad >= beta2)] = coeff * np.power(s1, power) + + range_ = 2 * (beta2 - gamma2) + s1 = (rad[(rad > gamma1) & (rad < beta2)] - gamma2) / range_ + c[(rad > gamma1) & (rad < beta2)] = coeff * np.power(s1, power) + + # Inner region passes test + # Check for value beyond function range + c[(rad > gamma1) & (rad < gamma2)] = 1 + c[(rad < alpha1) | (rad > alpha2)] = 0 + + c[c>1] = 1 + c[c<0] = 0 + + confidence = c.reshape(radshape) + + return confidence + + +def test(): + rad = np.random.randint(50, size=[4, 8]) + #coeffs = [5, 42, 20, 28, 15, 35, 1] + #coeffs = [20, 28, 5, 42, 15, 35, 1] + coeffs = [35, 15, 20, 1, 1] + #confidence = conf_test_dble(rad, coeffs) + confidence = test_11um(rad, coeffs) + print(rad) + print('\n') + print(confidence) + + +if __name__ == "__main__": + test() + + + + + +