Newer
Older
import os
import sys
import logging
import pandas as pd
from datetime import datetime as dt
from aosstower.l00 import parser
from netCDF4 import Dataset
import numpy as np
import platform
from aosstower import station
LOG = logging.getLogger(__name__)
# The purpose of this function is to write the dimensions
# for the nc file
# no parameters
# no returns
ncFile.createDimension('max_len_station_name', 32)
def createVariables(ncFile, firstStamp, chunksizes, zlib):
#base_time long name
btln = 'base time as unix timestamp'
#base time units
btu = 'seconds since 1970-01-01 00:00:00'
#base time string
bts = firstStamp.strftime('%Y-%m-%d 00:00:00Z')
#time long name
tln = 'time offset from base_time'
#time units
tu = 'seconds since ' + firstStamp.strftime('%Y-%m-%d 00:00:00Z')
#fields: type, dimension, fill, valid_min, std_name, longname, units, valid_max, cf_role, axis
'lon': [np.float32, None, float(-999), '-180L', 'longitude', None, 'degrees_east', '180L', None],
'lat': [np.float32, None, float(-999), '-90L', 'latitude', None, 'degrees_north', '90L', None],
'alt': [np.float32, None, float(-999), None, 'height', 'vertical distance', 'm', None, None],
'base_time': [np.float32, None, float(-999), None, 'time', btln, btu, None, None],
'time_offset': [np.float32, 'time', float(-999), None, 'time', tln, tu, None, None],
'station_name': ['c', 'max_len_station_name', '-', None, None, 'station name', None, None, 'timeseries_id'],
'time': [np.float32, 'time', float(-999), None, None, "Time offset from epoch", "seconds since 1970-01-01 00:00:00Z", None, None, None]
}
for key in coordinates:
attr = coordinates[key]
if(attr[1]):
if attr[1] == 'max_len_station_name':
if (chunksizes) and chunksizes[0] > 32:
variable = ncFile.createVariable(key, attr[0], dimensions=(attr[1]), fill_value=attr[2], zlib=zlib, chunksizes=[32])
else:
variable = ncFile.createVariable(key, attr[0], dimensions=(attr[1]), fill_value=attr[2], zlib=zlib, chunksizes=chunksizes)
else:
variable = ncFile.createVariable(key, attr[0], dimensions=(attr[1]), fill_value=attr[2], zlib=zlib, chunksizes=chunksizes)
variable = ncFile.createVariable(key, attr[0], fill_value=attr[1], zlib=zlib, chunksizes=chunksizes)
#create var attributes
if key == 'alt':
variable.positive = 'up'
variable.axis = 'Z'
if(attr[3]):
variable.valid_min = attr[3]
variable.valid_max = attr[7]
if(attr[4]):
variable.standard_name = attr[4]
if(attr[5]):
variable.long_name = attr[5]
if(attr[6]):
variable.units = attr[6]
if(attr[8]):
variable.cf_role = attr[8]
if key == 'base_time':
variable.string = bts
for entry in parser.database:
if(entry == 'stamp'):
continue
varTup = parser.database[entry]
variable = ncFile.createVariable(entry, np.float32,
dimensions=('time'), fill_value=float(-999), zlib=zlib, chunksizes=chunksizes)
variable.standard_name = varTup[1]
variable.description = varTup[3]
variable.units = varTup[4]
#create global attributes
ncFile.source = 'surface observation'
ncFile.institution = 'UW SSEC'
ncFile.featureType = 'timeSeries'
ncFile.data_level = 'a0'
ncFile.datastream = 'aoss.tower.nc.la0.v00'
ncFile.software_version = '00'
#generate history
ncFile.history = ' '.join(platform.uname()) + " " + os.path.basename(__file__)
return ncFile
def minuteAverages(frame):
frame['minute'] = [ts.replace(second=0) for ts in frame.index]
newFrame = frame.groupby('minute').mean()
newFrame.index.names = ['']
columns = list(newFrame.columns.values)
if 'wind_speed' in columns:
del newFrame['wind_speed']
if 'wind_direction' in columns:
del newFrame['wind_direction']
del frame['minute']
return newFrame.fillna(-99999)
getFrames = list(parser.read_frames(filename))
for frame in getFrames:
if 'stamp' not in frame:
continue
stamp = frame['stamp']
del frame['stamp']
dictData[stamp] = frame
return pd.DataFrame(dictData).transpose().replace(-99999, np.nan)
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
def writeVars(ncFile, frame):
stamps = list(frame.index)
baseDTObj = dt.strptime(str(stamps[0]).split(' ')[0], '%Y-%m-%d')
#find out how much time elapsed
#since the origin to the start of the day
#in seconds
baseTimeValue = baseDTObj - dt(1970,1,1)
baseTimeValue = baseTimeValue.total_seconds()
#create time numpy
timeNumpy = np.empty(len(stamps), dtype='float32')
counter = 0
#write stamps in, yo
for stamp in stamps:
stampObj = dt.strptime(str(stamp), '%Y-%m-%d %H:%M:%S')
timeValue = (stampObj - baseDTObj).total_seconds()
timeNumpy[counter] = timeValue
counter += 1
fileVar = ncFile.variables
fileVar['base_time'].assignValue(baseTimeValue)
fileVar['time_offset'][:] = timeNumpy
fileVar['time'][:] = timeNumpy + baseTimeValue
#write coordinate var values to file
#alt might not be right, need to verify
fileVar['lon'].assignValue(station.LONGITUDE)
fileVar['lat'].assignValue(station.LATITUDE)
fileVar['alt'].assignValue(328)
#might change
stationName = ("AOSS Tower")
#transfer station name into array of chars
statChars = list(stationName)
statNumpy = np.asarray(statChars)
#write station name to file
fileVar['station_name'][0:len(statNumpy)] = statNumpy
#writes data into file
for varName in frame:
dataList = frame[varName].tolist()
dataArray = np.asarray(dataList)
fileVar[varName][:] = dataArray
return ncFile
#The purpose of this method is to take a begin date, and end date
# input filenames and output filename and create a netCDF file
# based upon that
# @param start time - a start datetime object
# @param end time - an end datetime object
# @param input filenames - list of filenames
# @param output filename - filename of the netcdf file
def createGiantNetCDF(start, end, inputFiles, outputName, zlib, chunkSize):
if(chunkSize):
chunksizes = [chunkSize]
else:
newFrame = minuteAverages(frame)
print(newFrame)
else:
if(start and end):
frame = frame[start.strftime('%Y-%m-%d %H:%M:%S'): end.strftime('%Y-%m-%d %H:%M:%S')]
if(default):
chunksizes = [len(list(frame.index))]
firstStamp = dt.strptime(str(list(frame.index)[0]), '%Y-%m-%d %H:%M:%S')
ncFile = Dataset(outputName, 'w', format='NETCDF4_CLASSIC')
ncFile = createVariables(ncFile, firstStamp, chunksizes, zlib)
ncFile.inputFiles = ', '.join(inputFiles)
def createMultiple(filenames, outputFilenames, zlib, chunkSize):
if(outputFilenames and len(filenames) != len(outputFilenames)):
print('USAGE: number of output filenames must equal number of input filenames when start and end times are not specified')
exit(0)
for idx, filename in enumerate(filenames):
results.append(createGiantNetCDF(None, None, [filename], outputFilenames[idx], zlib, chunkSize))
print(results)
allFalse = True
for result in results:
if result == True:
print('here')
print(result)
allFalse = False
if allFalse == True:
raise IOError('All ASCII files were empty')
#The purpose of this method is to take a string in the format
# YYYY-mm-ddTHH:MM:SS and convert that to a datetime object
# used in coordination with argparse -s and -e params
# @param datetime string
# @return datetime object
def _dt_convert(datetime_str):
#parse datetime string, return datetime object
try:
return dt.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
except:
return dt.strptime(datetime_str, '%Y-%m-%d')
def main():
import argparse
#argparse description
parser = argparse.ArgumentParser(description="Convert level_00 aoss tower data to level_a0")
#argparse verbosity info
parser.add_argument('-v', '--verbose', action="count", default=int(os.environ.get("VERBOSITY", 2)),
dest='verbosity',
help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
#argparse start and end times
parser.add_argument('-s', '--start-time', type=_dt_convert,
help="Start time of massive netcdf file, if only -s is given, a netcdf file for only that day is given" +
". Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
parser.add_argument('-e', '--end-time', type=_dt_convert, help='End time of massive netcdf file. Formats allowed:' +
"\'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
parser.add_argument('-cs', '--chunk-size', type=int, help='chunk Size for the netCDF file')
parser.add_argument('-z', '--zlib', action='store_true', help='compress netCDF file with zlib')
parser.add_argument("input_files", nargs="+",
help="aoss_tower level_00 paths")
parser.add_argument('-o', '--output', required=True, nargs="+", help="filename pattern or filename. " +
"Should be along the lines of <filepath>/aoss_tower.YYYY-MM-DD.nc")
args = parser.parse_args()
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
level=levels[min(3, args.verbosity)]
logging.basicConfig(level=level)
result = createGiantNetCDF(args.start_time, args.end_time, args.input_files, args.output[0], args.zlib, args.chunk_size)
if(result == False):
raise IOError('An empty ASCII file was found')
elif(args.start_time):
end_time = args.start_time.replace(hour=23, minute=59, second=59)
result = createGiantNetCDF(args.start_time, end_time, args.input_files, args.output[0], args.zlib, args.chunk_size)
if(result == False):
raise IOError('An empty ASCII file was found')
elif(args.end_time):
print('USAGE: start time must be specified when end time is specified')
createMultiple(args.input_files, args.output, args.zlib, args.chunk_size)