Skip to content
Snippets Groups Projects
Commit ed45e299 authored by Paolo Veglio's avatar Paolo Veglio
Browse files

code now saves fill values at night for high res run. Reynolds SST removed from inputs

parent a378b809
No related branches found
No related tags found
No related merge requests found
"""Main function for MVCM.""" """Main function for MVCM."""
import argparse import argparse
import gc
import logging import logging
import os import os
...@@ -13,7 +14,9 @@ from rich.logging import RichHandler ...@@ -13,7 +14,9 @@ from rich.logging import RichHandler
from ruamel.yaml import YAML from ruamel.yaml import YAML
import mvcm.read_data as rd import mvcm.read_data as rd
import mvcm.temp_spectral_tests as tst
# import mvcm.temp_spectral_tests as tst
import mvcm.spectral_tests as tst
import mvcm.utility_functions as utils import mvcm.utility_functions as utils
from mvcm.constants import SceneConstants, SensorConstants from mvcm.constants import SceneConstants, SensorConstants
from mvcm.restoral import Restoral from mvcm.restoral import Restoral
...@@ -42,7 +45,7 @@ def main( ...@@ -42,7 +45,7 @@ def main(
geos_ocean: str = "", geos_ocean: str = "",
geos_constants: str = "", geos_constants: str = "",
ndvi_file: str = "", ndvi_file: str = "",
sst_file: str = "", # sst_file: str = "",
eco_file: str = "", eco_file: str = "",
out_file: str = "", out_file: str = "",
exe_path: str = "/home/pveglio/mvcm/mvcm", exe_path: str = "/home/pveglio/mvcm/mvcm",
...@@ -50,7 +53,7 @@ def main( ...@@ -50,7 +53,7 @@ def main(
*, *,
debug: bool = False, debug: bool = False,
hires_only: bool = False, hires_only: bool = False,
mvcm_threshold_file: str | None, mvcm_threshold_file: str,
) -> None: ) -> None:
"""MVCM main function. """MVCM main function.
...@@ -122,7 +125,7 @@ def main( ...@@ -122,7 +125,7 @@ def main(
"GEOS_ocean": f"{geos_ocean}", "GEOS_ocean": f"{geos_ocean}",
"GEOS_constants": f"{geos_constants}", "GEOS_constants": f"{geos_constants}",
"NDVI": f"{ndvi_file}", "NDVI": f"{ndvi_file}",
"SST": f"{sst_file}", "SST": "",
"ECO": f"{eco_file}", "ECO": f"{eco_file}",
"ANC_DIR": f"{data_path}", "ANC_DIR": f"{data_path}",
} }
...@@ -146,6 +149,17 @@ def main( ...@@ -146,6 +149,17 @@ def main(
vnp02[b] vnp02[b]
except KeyError: except KeyError:
logger.info(f"Band {b} not found in file. No output will be written.") logger.info(f"Band {b} not found in file. No output will be written.")
with Dataset(file_names["IMG03"]) as f:
attrs = {attr: f.getncattr(attr) for attr in f.ncattrs()}
shape = f["geolocation_data/latitude"][:].shape
save_output(
None,
attrs,
out_file,
compression=5,
debug=debug,
shape=shape,
)
return return
logger.info(f"All bands found in file {file_names['IMG02']}. The code will run.") logger.info(f"All bands found in file {file_names['IMG02']}. The code will run.")
else: else:
...@@ -155,6 +169,17 @@ def main( ...@@ -155,6 +169,17 @@ def main(
vnp02[b] vnp02[b]
except KeyError: except KeyError:
logger.info(f"Band {b} not found in file. No output will be written.") logger.info(f"Band {b} not found in file. No output will be written.")
with Dataset(file_names["MOD03"]) as f:
attrs = {attr: f.getncattr(attr) for attr in f.ncattrs()}
shape = f["geolocation_data/latitude"][:].shape
save_output(
None,
attrs,
out_file,
compression=5,
debug=debug,
shape=shape,
)
return return
logger.info(f"All bands found in file {file_names['MOD02']}. The code will run.") logger.info(f"All bands found in file {file_names['MOD02']}. The code will run.")
...@@ -168,7 +193,7 @@ def main( ...@@ -168,7 +193,7 @@ def main(
# viirs_data = xr.open_dataset("input_data.nc") # viirs_data = xr.open_dataset("input_data.nc")
logger.info(f"Memory usage #3: {proc.memory_info().rss / 1e6} MB") logger.info(f"Memory usage #3: {proc.memory_info().rss / 1e6} MB")
cmin_3d = np.ones((18, viirs_data.M11.shape[0], viirs_data.M11.shape[1]), dtype=np.float32) # cmin_3d = np.ones((18, viirs_data.M11.shape[0], viirs_data.M11.shape[1]), dtype=np.float32)
# cmin_3d = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto") # cmin_3d = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto")
logger.info(f"Memory usage #4: {proc.memory_info().rss / 1e6} MB") logger.info(f"Memory usage #4: {proc.memory_info().rss / 1e6} MB")
...@@ -206,7 +231,8 @@ def main( ...@@ -206,7 +231,8 @@ def main(
"qa": np.zeros(viirs_data.latitude.shape, dtype=np.ubyte), "qa": np.zeros(viirs_data.latitude.shape, dtype=np.ubyte),
# "qa": xr.zeros_like(viirs_data.latitude, dtype=np.ubyte, chunks="auto"), # "qa": xr.zeros_like(viirs_data.latitude, dtype=np.ubyte, chunks="auto"),
} }
i = 0
cmin = np.ones(viirs_data.latitude.shape, dtype=np.float32)
viirs_data["locut"] = np.full_like( viirs_data["locut"] = np.full_like(
viirs_data.latitude.shape, fill_value=np.nan, dtype=np.float32 viirs_data.latitude.shape, fill_value=np.nan, dtype=np.float32
...@@ -241,7 +267,7 @@ def main( ...@@ -241,7 +267,7 @@ def main(
# cmin_g3 = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto") # cmin_g3 = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto")
# cmin_g4 = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto") # cmin_g4 = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto")
# cmin_g5 = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto") # cmin_g5 = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto")
cmin_temp = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto") # cmin_temp = xr.ones_like(viirs_data.latitude, dtype=np.float32, chunks="auto")
cmin_g1 = np.ones(viirs_data.latitude.shape, dtype=np.float32) cmin_g1 = np.ones(viirs_data.latitude.shape, dtype=np.float32)
cmin_g2 = np.ones(viirs_data.latitude.shape, dtype=np.float32) cmin_g2 = np.ones(viirs_data.latitude.shape, dtype=np.float32)
...@@ -290,7 +316,7 @@ def main( ...@@ -290,7 +316,7 @@ def main(
cmin_g4, bits["15"] = my_scene.test_1_38um_high_clouds("M09", cmin_g4, bits["15"]) cmin_g4, bits["15"] = my_scene.test_1_38um_high_clouds("M09", cmin_g4, bits["15"])
# Group 5 # Group 5
# cmin_g5, bits["16"] = my_scene.thin_cirrus_4_12um_BTD_test("M13-M16", cmin_g5, bits["16"]) cmin_g5, bits["16"] = my_scene.thin_cirrus_4_12um_BTD_test("M13-M16", cmin_g5, bits["16"])
# logger.debug(f"Memory: {tracemalloc.get_traced_memory()}") # logger.debug(f"Memory: {tracemalloc.get_traced_memory()}")
bit = {} bit = {}
...@@ -302,17 +328,21 @@ def main( ...@@ -302,17 +328,21 @@ def main(
# restoral_bits[b] = utils.restoral_flag(bits[f"{b}"]) # restoral_bits[b] = utils.restoral_flag(bits[f"{b}"])
# # if utils.group_count(bit) != 0: # # if utils.group_count(bit) != 0:
cmin = np.power( # cmin = np.power(
cmin_g1 * cmin_g2 * cmin_g3 * cmin_g4 * cmin_g5, 1 / utils.group_count(qabit) # cmin_g1 * cmin_g2 * cmin_g3 * cmin_g4 * cmin_g5, 1 / utils.group_count(qabit)
) # )
cmin_3d[i, :, :] = cmin cmin = np.minimum(
# cmin_3d = np.minimum(cmin_3d, cmin) np.power(cmin_g1 * cmin_g2 * cmin_g3 * cmin_g4 * cmin_g5, 1 / utils.group_count(qabit)),
i += 1 cmin,
)
# cmin_3d[i, :, :] = cmin
# # cmin_3d = np.minimum(cmin_3d, cmin)
# i += 1
logger.info(f"Memory usage #7: {proc.memory_info().rss / 1e6} MB") logger.info(f"Memory usage #7: {proc.memory_info().rss / 1e6} MB")
cmin = np.min(cmin_3d, axis=0) # cmin = np.min(cmin_3d, axis=0)
# cmin = cmin_3d # cmin = cmin_3d
# pixel_type = xr.open_dataset("pixels_data.nc") # pixel_type = xr.open_dataset("pixels_data.nc")
restore = Restoral(data=viirs_data, thresholds=thresholds, scene_flags=pixel_type) restore = Restoral(data=viirs_data, thresholds=thresholds, scene_flags=pixel_type)
...@@ -579,7 +609,7 @@ def main( ...@@ -579,7 +609,7 @@ def main(
"bands": {"dims": ("x", "y", "nbands"), "data": all_bands}, "bands": {"dims": ("x", "y", "nbands"), "data": all_bands},
"ecosystem": {"dims": ("x", "y"), "data": viirs_data.ecosystem.values}, "ecosystem": {"dims": ("x", "y"), "data": viirs_data.ecosystem.values},
"ndvi": {"dims": ("x", "y"), "data": viirs_data.ndvi.values}, "ndvi": {"dims": ("x", "y"), "data": viirs_data.ndvi.values},
"sst": {"dims": ("x", "y"), "data": viirs_data.sst.values}, # "sst": {"dims": ("x", "y"), "data": viirs_data.sst.values},
"tpw": {"dims": ("x", "y"), "data": viirs_data.geos_tpw.values}, "tpw": {"dims": ("x", "y"), "data": viirs_data.geos_tpw.values},
"snow_fraction": {"dims": ("x", "y"), "data": viirs_data.geos_snow_fraction.values}, "snow_fraction": {"dims": ("x", "y"), "data": viirs_data.geos_snow_fraction.values},
"ice_fraction": {"dims": ("x", "y"), "data": viirs_data.geos_ice_fraction.values}, "ice_fraction": {"dims": ("x", "y"), "data": viirs_data.geos_ice_fraction.values},
...@@ -648,7 +678,7 @@ if __name__ == "__main__": ...@@ -648,7 +678,7 @@ if __name__ == "__main__":
parser.add_argument("--ocean", help="file name of GEOS-IT ocean parameters") parser.add_argument("--ocean", help="file name of GEOS-IT ocean parameters")
parser.add_argument("--constants", help="file name of GEOS-IT constants") parser.add_argument("--constants", help="file name of GEOS-IT constants")
parser.add_argument("--ndvi", help="NDVI file name") parser.add_argument("--ndvi", help="NDVI file name")
parser.add_argument("--sst", help="Sea surface temperature file name") # parser.add_argument("--sst", help="Sea surface temperature file name")
parser.add_argument("--eco", help="Ecosystem file") parser.add_argument("--eco", help="Ecosystem file")
parser.add_argument("-o", "--out", help="output file name") parser.add_argument("-o", "--out", help="output file name")
parser.add_argument( parser.add_argument(
...@@ -684,7 +714,7 @@ if __name__ == "__main__": ...@@ -684,7 +714,7 @@ if __name__ == "__main__":
geos_ocean = args.ocean geos_ocean = args.ocean
constants = args.constants constants = args.constants
ndvi_file = args.ndvi ndvi_file = args.ndvi
sst_file = args.sst # sst_file = args.sst
eco_file = args.eco eco_file = args.eco
out_file = args.out out_file = args.out
verbose = args.verbose or False verbose = args.verbose or False
...@@ -705,7 +735,7 @@ if __name__ == "__main__": ...@@ -705,7 +735,7 @@ if __name__ == "__main__":
geos_ocean=geos_ocean, geos_ocean=geos_ocean,
geos_constants=constants, geos_constants=constants,
ndvi_file=ndvi_file, ndvi_file=ndvi_file,
sst_file=sst_file, # sst_file=sst_file,
eco_file=eco_file, eco_file=eco_file,
out_file=out_file, out_file=out_file,
verbose=verbose, verbose=verbose,
......
"""Write the output data in a netcdf file.""" """Write the output data in a netcdf file."""
import logging import logging
from os.path import basename from os.path import basename
from subprocess import run from subprocess import run
...@@ -25,7 +26,13 @@ logger = logging.getLogger(__name__) ...@@ -25,7 +26,13 @@ logger = logging.getLogger(__name__)
def save_output( def save_output(
data: xr.Dataset, attrs: dict, fname: str, compression: int, *, debug: bool = False data: xr.Dataset | None,
attrs: dict,
fname: str,
compression: int,
*,
debug: bool = False,
shape: tuple = (0, 0),
) -> None: ) -> None:
"""Save the output data in a netcdf file. """Save the output data in a netcdf file.
...@@ -38,6 +45,9 @@ def save_output( ...@@ -38,6 +45,9 @@ def save_output(
""" """
logger.info("Running save_output() function...\n") logger.info("Running save_output() function...\n")
if data is None:
data = create_empty_dataset(shape)
scan_line_attributes = { scan_line_attributes = {
"scan_start_time": { "scan_start_time": {
"dims": ("number_of_scans"), "dims": ("number_of_scans"),
...@@ -310,6 +320,30 @@ def save_output( ...@@ -310,6 +320,30 @@ def save_output(
logger.info("Output written to netcdf file successfully") logger.info("Output written to netcdf file successfully")
def create_empty_dataset(lat_shape: tuple) -> xr.Dataset:
"""Create an empty dataset."""
output_dict = {
"latitude": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"longitude": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"sensor_zenith": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"sensor_azimuth": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"solar_zenith": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"solar_azimuth": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"confidence": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
"cloud_mask": {
"dims": ("bits", "x", "y"),
"data": np.full((8, lat_shape[0], lat_shape[1]), -9999),
},
"quality_assurance": {
"dims": ("x", "y", "qa"),
"data": np.full((lat_shape[0], lat_shape[1], 10), -9999),
},
"integer_cloud_mask": {"dims": ("x", "y"), "data": np.full(lat_shape, -9999)},
}
return xr.Dataset.from_dict(output_dict)
def run_mvcm( def run_mvcm(
file_names: dict, file_names: dict,
thresholds_file: str, thresholds_file: str,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment