Skip to content
Snippets Groups Projects
Commit c1f5d51e authored by Matthew Westphall's avatar Matthew Westphall
Browse files

updated netCDF4 datatypes to match ARM standard

parent 0cae1fd7
No related branches found
No related tags found
No related merge requests found
This diff is collapsed.
...@@ -4,7 +4,6 @@ import logging ...@@ -4,7 +4,6 @@ import logging
import pandas as pd import pandas as pd
from datetime import datetime as dt from datetime import datetime as dt
from aosstower.l00 import parser from aosstower.l00 import parser
import avg_database
from netCDF4 import Dataset from netCDF4 import Dataset
import numpy as np import numpy as np
import platform import platform
...@@ -14,6 +13,18 @@ import calc ...@@ -14,6 +13,18 @@ import calc
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
#create the '_mean','_low','_high' file structure
def make_mean_dict(source_dict):
dest_dict = {}
for key in source_dict:
dest_dict[key+'_high'] = source_dict[key]
dest_dict[key+'_mean'] = source_dict[key]
dest_dict[key+'_low'] = source_dict[key]
return dest_dict
mean_database = make_mean_dict(parser.database)
def filterArray(array, valid_min, valid_max): def filterArray(array, valid_min, valid_max):
qcControl = [] qcControl = []
...@@ -67,8 +78,8 @@ def createVariables(ncFile, firstStamp, chunksizes, zlib, database=parser.databa ...@@ -67,8 +78,8 @@ def createVariables(ncFile, firstStamp, chunksizes, zlib, database=parser.databa
'lon': [np.float32, None, float(-999), '-180L', 'longitude', None, 'degrees_east', '180L', None], 'lon': [np.float32, None, float(-999), '-180L', 'longitude', None, 'degrees_east', '180L', None],
'lat': [np.float32, None, float(-999), '-90L', 'latitude', None, 'degrees_north', '90L', None], 'lat': [np.float32, None, float(-999), '-90L', 'latitude', None, 'degrees_north', '90L', None],
'alt': [np.float32, None, float(-999), None, 'height', 'vertical distance', 'm', None, None], 'alt': [np.float32, None, float(-999), None, 'height', 'vertical distance', 'm', None, None],
'base_time': [np.float32, None, float(-999), None, 'time', btln, btu, None, None], 'base_time': [np.int32, None, float(-999), None, 'time', btln, btu, None, None],
'time_offset': [np.float32, 'time', float(-999), None, 'time', tln, tu, None, None], 'time_offset': [np.float64, 'time', float(-999), None, 'time', tln, tu, None, None],
'station_name': ['c', 'max_len_station_name', '-', None, None, 'station name', None, None, 'timeseries_id'], 'station_name': ['c', 'max_len_station_name', '-', None, None, 'station name', None, None, 'timeseries_id'],
'time': [np.float32, 'time', float(-999), None, None, "Time offset from epoch", "seconds since 1970-01-01 00:00:00Z", None, None, None] 'time': [np.float32, 'time', float(-999), None, None, "Time offset from epoch", "seconds since 1970-01-01 00:00:00Z", None, None, None]
} }
...@@ -312,7 +323,7 @@ def writeVars(ncFile, frame, database=parser.database): ...@@ -312,7 +323,7 @@ def writeVars(ncFile, frame, database=parser.database):
baseTimeValue = baseTimeValue.total_seconds() baseTimeValue = baseTimeValue.total_seconds()
#create time numpy #create time numpy
timeNumpy = np.empty(len(stamps), dtype='float32') timeNumpy = np.empty(len(stamps), dtype='float64')
counter = 0 counter = 0
...@@ -328,7 +339,7 @@ def writeVars(ncFile, frame, database=parser.database): ...@@ -328,7 +339,7 @@ def writeVars(ncFile, frame, database=parser.database):
fileVar = ncFile.variables fileVar = ncFile.variables
fileVar['base_time'].assignValue(baseTimeValue) fileVar['base_time'].assignValue(baseTimeValue)
fileVar['time_offset'][:] = timeNumpy fileVar['time_offset'][:] = timeNumpy
fileVar['time'][:] = timeNumpy + baseTimeValue fileVar['time'][:] = timeNumpy
#write coordinate var values to file #write coordinate var values to file
#alt might not be right, need to verify #alt might not be right, need to verify
...@@ -497,7 +508,7 @@ def main(): ...@@ -497,7 +508,7 @@ def main():
logging.basicConfig(level=level) logging.basicConfig(level=level)
database = avg_database.AOSS_VARS if args.interval else parser.database database = mean_database if args.interval else parser.database
if(args.start_time and args.end_time): if(args.start_time and args.end_time):
result = createGiantNetCDF(args.start_time, args.end_time, args.input_files, args.output[0], args.zlib, args.chunk_size, result = createGiantNetCDF(args.start_time, args.end_time, args.input_files, args.output[0], args.zlib, args.chunk_size,
args.interval, database) args.interval, database)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment