Skip to content
Snippets Groups Projects
Commit ffcd06fe authored by tomrink's avatar tomrink
Browse files

snapshot...

parent de80f19a
No related branches found
No related tags found
No related merge requests found
...@@ -7,9 +7,9 @@ import xarray as xr ...@@ -7,9 +7,9 @@ import xarray as xr
import pandas as pd import pandas as pd
import rasterio import rasterio
from PIL import Image from PIL import Image
import matplotlib.pyplot as plt
import matplotlib.image as mpimg import matplotlib.image as mpimg
import h5py import h5py
from util.GFSDataset import GFSData
from util.util import get_grid_values_all from util.util import get_grid_values_all
from util.gfs_reader import get_volume, volume_np_to_xr from util.gfs_reader import get_volume, volume_np_to_xr
from util.geos_nav import GEOSNavigation from util.geos_nav import GEOSNavigation
...@@ -48,8 +48,8 @@ def get_contrail_mask_image(image, thresh=0.157): ...@@ -48,8 +48,8 @@ def get_contrail_mask_image(image, thresh=0.157):
def extract(mask_image, image_ts, clavrx_path): def extract(mask_image, image_ts, clavrx_path):
# The GFS file closest to image_ts # The GFS file closest to image_ts
gfs_file, _, _ = gfs_files.get_file(image_ts) gfs_file, _, _ = gfs_files.get_file(image_ts)
xr_dataset = xr.open_dataset(gfs_file)
# Open the CLAVRx file
clvrx_h5f = h5py.File(clavrx_path, 'r') clvrx_h5f = h5py.File(clavrx_path, 'r')
cloud_top_press = get_grid_values_all(clvrx_h5f, 'cld_press_acha').flatten() cloud_top_press = get_grid_values_all(clvrx_h5f, 'cld_press_acha').flatten()
clvrx_lons = get_grid_values_all(clvrx_h5f, 'longitude').flatten() clvrx_lons = get_grid_values_all(clvrx_h5f, 'longitude').flatten()
...@@ -78,30 +78,32 @@ def extract(mask_image, image_ts, clavrx_path): ...@@ -78,30 +78,32 @@ def extract(mask_image, image_ts, clavrx_path):
lon_range = [np.min(contrail_lons), np.max(contrail_lons)] lon_range = [np.min(contrail_lons), np.max(contrail_lons)]
lat_range = [np.min(contrail_lats), np.max(contrail_lats)] lat_range = [np.min(contrail_lats), np.max(contrail_lats)]
uwind_3d = get_volume(xr_dataset, 'u-wind', 'm s-1', lon_range=lon_range, lat_range=lat_range) # create an instance of the file data accessor and initialize the region of interest
vwind_3d = get_volume(xr_dataset, 'v-wind', 'm s-1', lon_range=lon_range, lat_range=lat_range) with GFSData(gfs_file, lon_range=lon_range, lat_range=lat_range) as gfs_dataset:
temp_3d = get_volume(xr_dataset, 'temperature', 'degK', lon_range=lon_range, lat_range=lat_range) uwind_3d = gfs_dataset.get_volume('u-wind', 'm s-1')
rh_3d = get_volume(xr_dataset, 'rh', '%', lon_range=lon_range, lat_range=lat_range) vwind_3d = gfs_dataset.get_volume('v-wind', 'm s-1')
temp_3d = gfs_dataset.get_volume('temperature', 'degK')
uwind_3d = uwind_3d.transpose('Pressure', 'Latitude', 'Longitude') rh_3d = gfs_dataset.get_volume('rh', '%')
vwind_3d = vwind_3d.transpose('Pressure', 'Latitude', 'Longitude')
temp_3d = temp_3d.transpose('Pressure', 'Latitude', 'Longitude') uwind_3d = uwind_3d.transpose('Pressure', 'Latitude', 'Longitude')
rh_3d = rh_3d.transpose('Pressure', 'Latitude', 'Longitude') vwind_3d = vwind_3d.transpose('Pressure', 'Latitude', 'Longitude')
temp_3d = temp_3d.transpose('Pressure', 'Latitude', 'Longitude')
horz_shear_3d = shearing_deformation(uwind_3d, vwind_3d) rh_3d = rh_3d.transpose('Pressure', 'Latitude', 'Longitude')
static_3d = static_stability(temp_3d.coords['Pressure'] * units.hPa, temp_3d)
horz_wind_spd_3d = wind_speed(uwind_3d, vwind_3d) horz_shear_3d = shearing_deformation(uwind_3d, vwind_3d)
static_3d = static_stability(temp_3d.coords['Pressure'] * units.hPa, temp_3d)
# This one's a bit more work: `first_derivative` only returns a ndarray with no units, so we use the horz_wind_spd_3d = wind_speed(uwind_3d, vwind_3d)
# helper function to create a DataArray and add units via metpy's pint support
vert_shear_3d = first_derivative(horz_wind_spd_3d, axis=0, x=temp_3d.coords['Pressure']) # This one's a bit more work: `first_derivative` only returns a ndarray with no units, so we use the
vert_shear_3d = volume_np_to_xr(vert_shear_3d, ['Pressure', 'Latitude', 'Longitude'], lon_range=lon_range, lat_range=lat_range) # helper function to create a DataArray and add units via metpy's pint support
vert_shear_3d = vert_shear_3d / units.hPa vert_shear_3d = first_derivative(horz_wind_spd_3d, axis=0, x=temp_3d.coords['Pressure'])
vert_shear_3d = gfs_dataset.volume_np_to_xr(vert_shear_3d, ['Pressure', 'Latitude', 'Longitude'])
# Form a new Dataset for interpolation below vert_shear_3d = vert_shear_3d / units.hPa
da_dct = {'horz_shear_3d': horz_shear_3d, 'static_3d': static_3d, 'horz_wind_spd_3d': horz_wind_spd_3d,
'vert_shear_3d': vert_shear_3d, 'temp_3d': temp_3d, 'rh_3d': rh_3d} # Form a new Dataset for interpolation below
xr_ds = xr.Dataset(da_dct) da_dct = {'horz_shear_3d': horz_shear_3d, 'static_3d': static_3d, 'horz_wind_spd_3d': horz_wind_spd_3d,
'vert_shear_3d': vert_shear_3d, 'temp_3d': temp_3d, 'rh_3d': rh_3d}
xr_ds = xr.Dataset(da_dct)
all_list = [] all_list = []
levels_dict = {press_bins[key]: [] for key in bins_dict.keys()} levels_dict = {press_bins[key]: [] for key in bins_dict.keys()}
...@@ -147,13 +149,11 @@ def extract(mask_image, image_ts, clavrx_path): ...@@ -147,13 +149,11 @@ def extract(mask_image, image_ts, clavrx_path):
columns=["pressure_level", "pressure", "lat", "lon", "temperature", "relative_humidity", columns=["pressure_level", "pressure", "lat", "lon", "temperature", "relative_humidity",
"horz_shear_deform", "static_stability", "horz_wind_speed", "vert_wind_shear"]) "horz_shear_deform", "static_stability", "horz_wind_speed", "vert_wind_shear"])
xr_dataset.close()
return all_df return all_df
def analyze(dataFrame, column, value): def analyze(pd_df, column, value):
result_df = dataFrame[dataFrame[column] == value] # get rows where column has a certain value result_df = pd_df[pd_df[column] == value] # get rows where column has a certain value
mean = result_df.mean() # calculate mean for other columns mean = result_df.mean() # calculate mean for other columns
stddev = result_df.std() # calculate standard deviation for other columns stddev = result_df.std() # calculate standard deviation for other columns
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment