io.py 29.8 KB
Newer Older
(no author)'s avatar
(no author) committed
1
2
3
4
5
6
7
8
9
#!/usr/bin/env python
# encoding: utf-8
"""
I/O routines supporting reading a number of file formats.

Created by rayg Apr 2009.
Copyright (c) 2009 University of Wisconsin SSEC. All rights reserved.
"""

10
import os, logging
(no author)'s avatar
   
(no author) committed
11
12
13
import numpy as np

LOG = logging.getLogger(__name__)
(no author)'s avatar
(no author) committed
14

(no author)'s avatar
   
(no author) committed
15
16
17
18
19
20
21
22
23
try:
    import pyhdf
    from pyhdf.SD import SD,SDC, SDS, HDF4Error
except:
    LOG.info('no pyhdf module available for HDF4')
    pyhdf = None
    SD = SDC = SDS = object
    HDF4Error = EnvironmentError
    
24
25
try:
    import h5py
26
    from h5py import h5d
27
except ImportError:
(no author)'s avatar
   
(no author) committed
28
29
    LOG.info('no h5py module available for reading HDF5')
    h5py = None
(no author)'s avatar
(no author) committed
30

(no author)'s avatar
   
(no author) committed
31
32
33
34
35
36
37
38
39
try:    
    import pycdf
    from pycdf import CDF, NC, strerror
except:
    LOG.info('no pycdf module available')
    pycdf = None
    CDF = NC = object
    def strerror(*args):
        return 'no pycdf module installed'
(no author)'s avatar
(no author) committed
40

(no author)'s avatar
(no author) committed
41
42
43
44
45
46
47
try:
    import dmv as dmvlib
    LOG.info('loaded dmv module for AERI data file access')
except ImportError:
    LOG.info('no AERI dmv data file format module')
    dmvlib = None

(no author)'s avatar
   
(no author) committed
48
49
50
51
52
53
54
try:
    import adl_blob
    LOG.info('adl_blob module found for JPSS ADL data file access')
except ImportError:
    LOG.info('no adl_blob format handler available')
    adl_blob = None

55
UNITS_CONSTANT = "units"
(no author)'s avatar
(no author) committed
56

57
58
59
fillValConst1 = '_FillValue'
fillValConst2 = 'missing_value'

60
61
62
63
64
65
66
67
68
69
70
class IOUnimplimentedError(Exception):
    """
    The exception raised when a requested io operation is not yet available.
    
        msg  -- explanation of the problem
    """
    def __init__(self, msg):
        self.msg = msg
    def __str__(self):
        return self.msg

(no author)'s avatar
(no author) committed
71
72
73
74
75
76
class hdf(SD):
    """wrapper for HDF4 dataset for comparison
    __call__ yields sequence of variable names
    __getitem__ returns individual variables ready for slicing to numpy arrays
    """
    
77
    def __init__(self, filename, allowWrite=False):
(no author)'s avatar
   
(no author) committed
78
79
80
        if pyhdf is None:
            LOG.error('pyhdf is not installed and is needed in order to read hdf4 files')
            assert(pyhdf is not None)
81
82
83
84
        mode = SDC.READ
        if allowWrite:
            mode = mode | SDC.WRITE
        super(self.__class__,self).__init__(filename, mode)
(no author)'s avatar
(no author) committed
85
86
87
88
89

    def __call__(self):
        "yield names of variables to be compared"
        return self.datasets().keys()
    
90
91
92
93
    # this returns a numpy array with a copy of the full, scaled
    # data for this variable, if the data type must be changed to allow
    # for scaling it will be (so the return type may not reflect the
    # type found in the original file)
(no author)'s avatar
(no author) committed
94
    def __getitem__(self, name):
95
96
97
        # defaults
        scale_factor = 1.0
        add_offset = 0.0
98
        data_type = None 
(no author)'s avatar
(no author) committed
99
        scaling_method = None
100
        
101
        #print ("***** getting " + name + " from file")
102
        
103
104
105
        # get the variable object and use it to
        # get our raw data and scaling info
        variable_object = self.get_variable_object(name)
(no author)'s avatar
(no author) committed
106
        #print ("****** variable object gotten")
107
        raw_data_copy = variable_object[:]
108
        #print ("****** raw data loaded")
109
110
111
112
113
114
115
116
        try :
            # TODO, this currently won't work with geocat data, work around it for now
            scale_factor, scale_factor_error, add_offset, add_offset_error, data_type = SDS.getcal(variable_object)
        except HDF4Error:
            # load just the scale factor and add offset
            temp_attributes = variable_object.attributes()
            if ('add_offset' in temp_attributes) :
                add_offset = temp_attributes['add_offset']
117
118
119
120
                data_type = np.dtype(type(add_offset))
            if ('scale_factor' in temp_attributes) :
                scale_factor = temp_attributes['scale_factor']
                data_type = np.dtype(type(scale_factor))
121
            if ('scaling_method' in temp_attributes) :
(no author)'s avatar
(no author) committed
122
                scaling_method = temp_attributes['scaling_method']
123
        SDS.endaccess(variable_object)
(no author)'s avatar
(no author) committed
124
        
125
        #print ("***** scaling information loaded")
126
        
127
128
129
130
        # don't do lots of work if we don't need to scale things
        if (scale_factor == 1.0) and (add_offset == 0.0) :
            return raw_data_copy
        
131
132
133
134
135
136
137
138
139
140
        # at the moment geocat has several scaling methods that don't match the normal standards for hdf
        """
        please see constant.f90 for a more up to date version of this information:
            INTEGER(kind=int1) :: NO_SCALE              ! 0
            INTEGER(kind=int1) :: LINEAR_SCALE          ! 1
            INTEGER(kind=int1) :: LOG_SCALE             ! 2
            INTEGER(kind=int1) :: SQRT_SCALE            ! 3 
        """
        if (scaling_method == 0) :
            return raw_data_copy
141
        if not ((scaling_method is None) or (int(scaling_method) <= 1)) :
142
143
            LOG.warn ('Scaling method of \"' + str(scaling_method) + '\" will be ignored in favor of hdf standard method. '
                      + 'This may cause problems with data consistency')
144
        
145
146
147
        # if we don't have a data type something strange has gone wrong
        assert(not (data_type is None))
        
148
149
        # get information about where the data is the missing value
        missing_val = self.missing_value(name)
150
151
        missing_mask = np.zeros(raw_data_copy.shape, dtype=np.bool)
        missing_mask[raw_data_copy == missing_val] = True
152
        
153
154
        # create the scaled version of the data
        scaled_data_copy = np.array(raw_data_copy, dtype=data_type)
155
156
        #scaled_data_copy[~missing_mask] = (scaled_data_copy[~missing_mask] - add_offset) * scale_factor #TODO, type truncation issues?
        scaled_data_copy[~missing_mask] = (scaled_data_copy[~missing_mask] * scale_factor) + add_offset #TODO, type truncation issues?
157
158
159
160
161
162
        
        return scaled_data_copy 
    
    def get_variable_object(self, name):
        return self.select(name)
    
(no author)'s avatar
(no author) committed
163
    def missing_value(self, name):
164
165
166
        variable_object = self.select(name)
        
        to_return = None
167
168
        if hasattr(variable_object, fillValConst1) :
            to_return = getattr(variable_object, fillValConst1, None)
169
170
171
        SDS.endaccess(variable_object)
        
        return to_return
172
173
174
175
176
177
178
179
180
    
    def create_new_variable(self, variablename, missingvalue=None, data=None, variabletocopyattributesfrom=None):
        """
        create a new variable with the given name
        optionally set the missing value (fill value) and data to those given
        
        the created variable will be returned, or None if a variable could not
        be created
        """
(no author)'s avatar
(no author) committed
181
        
182
        raise IOUnimplimentedError('Unable to create variable in hdf file, this functionality is not yet available.')
183
184
185
186
187
188
189
190
        
        return None
    
    def add_attribute_data_to_variable(self, variableName, newAttributeName, newAttributeValue) :
        """
        if the attribute exists for the given variable, set it to the new value
        if the attribute does not exist for the given variable, create it and set it to the new value
        """
191
192
        
        raise IOUnimplimentedError('Unable add attribute to hdf file, this functionality is not yet available.')
193
194
        
        return
195
    
196
197
198
199
200
201
202
    def get_variable_attributes (self, variableName) :
        """
        returns all the attributes associated with a variable name
        """
        
        return self.get_variable_object(variableName).attributes()
    
203
204
205
206
207
    def get_attribute(self, variableName, attributeName) :
        """
        returns the value of the attribute if it is available for this variable, or None
        """
        toReturn = None
208
        temp_attributes = self.get_variable_attributes(variableName)
209
210
211
212
213
        
        if attributeName in temp_attributes :
            toReturn = temp_attributes[attributeName]
        
        return toReturn
(no author)'s avatar
(no author) committed
214
215
216
217
218
219
220
221
222
223
224
225
    
    def get_global_attribute(self, attributeName) :
        """
        returns the value of a global attribute if it is available or None
        """
        
        toReturn = None
        
        if attributeName in self.attributes() :
            toReturn = self.attributes()[attributeName]
        
        return toReturn
(no author)'s avatar
(no author) committed
226

(no author)'s avatar
(no author) committed
227
228
229
230
231
232
class nc(CDF):
    """wrapper for NetCDF3/4/opendap dataset for comparison
    __call__ yields sequence of variable names
    __getitem__ returns individual variables ready for slicing to numpy arrays
    """
    
233
234
    def __init__(self, filename, allowWrite=False):
        
(no author)'s avatar
   
(no author) committed
235
236
237
238
239
        if pycdf is None:
            LOG.error('pycdf is not installed and is needed in order to read NetCDF files')
            assert(pycdf is not None)

        
240
241
242
243
244
        mode = NC.NOWRITE
        if allowWrite :
            mode = NC.WRITE
        
        super(self.__class__,self).__init__(filename, mode)
245

(no author)'s avatar
(no author) committed
246
247
248
249
    def __call__(self):
        "yield names of variables to be compared"
        return self.variables().keys()
    
250
251
252
253
    # this returns a numpy array with a copy of the full, scaled
    # data for this variable, if the data type must be changed to allow
    # for scaling it will be (so the return type may not reflect the
    # type found in the original file)
(no author)'s avatar
(no author) committed
254
    def __getitem__(self, name):
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
        # defaults
        scale_factor = 1.0
        add_offset = 0.0
        data_type = np.float32 # TODO temporary
        
        # get the variable object and use it to
        # get our raw data and scaling info
        variable_object = self.get_variable_object(name)
        raw_data_copy = variable_object[:]
        # load the scale factor and add offset
        temp_attributes = variable_object.attributes()
        if ('scale_factor' in temp_attributes) :
            scale_factor = temp_attributes['scale_factor']
        if ('add_offset' in temp_attributes) :
            add_offset = temp_attributes['add_offset']
        # todo, does cdf have an equivalent of endaccess to close the variable?
        
        # don't do lots of work if we don't need to scale things
        if (scale_factor == 1.0) and (add_offset == 0.0) :
            return raw_data_copy
(no author)'s avatar
(no author) committed
275
        
276
277
        # get information about where the data is the missing value
        missing_val = self.missing_value(name)
278
279
        missing_mask = np.zeros(raw_data_copy.shape, dtype=np.bool)
        missing_mask[raw_data_copy == missing_val] = True
280
        
281
282
        # create the scaled version of the data
        scaled_data_copy = np.array(raw_data_copy, dtype=data_type)
283
284
        #scaled_data_copy[~missing_mask] = (scaled_data_copy[~missing_mask] - add_offset) * scale_factor #TODO, type truncation issues?
        scaled_data_copy[~missing_mask] = (scaled_data_copy[~missing_mask] * scale_factor) + add_offset #TODO, type truncation issues?
285
286
287
288
289
290
        
        return scaled_data_copy 
    
    def get_variable_object(self, name):
        return self.var(name)
    
(no author)'s avatar
(no author) committed
291
    def missing_value(self, name):
292
293
294
295
        
        variable_object = self.var(name)
        
        to_return = None
296
        if hasattr(variable_object, fillValConst1) \
297
           or \
298
299
300
           hasattr(variable_object, fillValConst2) :
            to_return = getattr(variable_object, fillValConst1,
                                getattr(variable_object, fillValConst2, None))
301
302
        
        return to_return
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
    
    def create_new_variable(self, variablename, missingvalue=None, data=None, variabletocopyattributesfrom=None):
        """
        create a new variable with the given name
        optionally set the missing value (fill value) and data to those given
        
        the created variable will be returned, or None if a variable could not
        be created
        """
        
        self.redef()
        
        # if the variable already exists, stop with a warning
        if variablename in self.variables().keys() :
            LOG.warn("New variable name requested (" + variablename + ") is already present in file. " +
                     "Skipping generation of new variable.")
            return None
320
321
322
323
324
        # if we have no data we won't be able to determine the data type to create the variable
        if (data is None) or (len(data) <= 0) :
            LOG.warn("Data type for new variable (" + variablename + ") could not be determined. " +
                     "Skipping generation of new variable.")
            return None
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
        
        dataType = None
        if np.issubdtype(data.dtype, int) :
            dataType = NC.INT
            #print("Picked INT")
        # TODO, at the moment the fill type is forcing me to use a double, when sometimes I want a float
        #elif np.issubdtype(data.dtype, np.float32) :
        #    dataType = NC.FLOAT
        #    print("Picked FLOAT")
        elif np.issubdtype(data.dtype, float) :
            dataType = NC.DOUBLE
            #print("Picked DOUBLE")
        # what do we do if it's some other type?
        
        # create and set all the dimensions
        dimensions = [ ]
        dimensionNum = 0
        for dimSize in data.shape :
            dimensions.append(self.def_dim(variablename + '-index' + str(dimensionNum), dimSize))
            dimensionNum = dimensionNum + 1
        
        # create the new variable
347
348
349
        #print('variable name: ' + variablename)
        #print('data type:     ' + str(dataType))
        #print('dimensions:    ' + str(dimensions))
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
        newVariable = self.def_var(variablename, dataType, tuple(dimensions))
        
        # if a missing value was given, use that
        if missingvalue is not None :
            newVariable._FillValue = missingvalue
        
        # if we have a variable to copy attributes from, do so
        if variabletocopyattributesfrom is not None :
            tocopyfrom = self.get_variable_object(variabletocopyattributesfrom)
            attributes = tocopyfrom.attributes()
            for attribute in attributes.keys() :
                newVariable.__setattr__(attribute, attributes[attribute])
        
        self.enddef()
        
        # if data was given, use that
        if data is not None :
            newVariable.put(data.tolist()) 
        
        return newVariable
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
    
    def add_attribute_data_to_variable(self, variableName, newAttributeName, newAttributeValue) :
        """
        if the attribute exists for the given variable, set it to the new value
        if the attribute does not exist for the given variable, create it and set it to the new value
        """
        variableObject = self.get_variable_object(variableName)
        
        self.redef()
        
        variableObject.__setattr__(newAttributeName, newAttributeValue)
        
        self.enddef()
        
        return
385
    
386
387
388
389
390
391
392
    def get_variable_attributes (self, variableName) :
        """
        returns all the attributes associated with a variable name
        """
        
        return self.get_variable_object(variableName).attributes()
    
393
394
395
396
397
398
    def get_attribute(self, variableName, attributeName) :
        """
        returns the value of the attribute if it is available for this variable, or None
        """
        toReturn = None
        
399
        temp_attributes = self.get_variable_attributes(variableName)
400
401
402
403
404
        
        if attributeName in temp_attributes :
            toReturn = temp_attributes[attributeName]
        
        return toReturn
405
406
407
408
409
410
411
412
413
414
415
416
    
    def get_global_attribute(self, attributeName) :
        """
        returns the value of a global attribute if it is available or None
        """
        
        toReturn = None
        
        if attributeName in self.attributes() :
            toReturn = self.attributes()[attributeName]
        
        return toReturn
417

418

(no author)'s avatar
(no author) committed
419
420
421
nc4 = nc
cdf = nc

422
423
# TODO remove
#FIXME_IDPS = [ '/All_Data/CrIS-SDR_All/ES' + ri + band for ri in ['Real','Imaginary'] for band in ['LW','MW','SW'] ] 
424

(no author)'s avatar
(no author) committed
425
class h5(object):
426
427
428
429
    """wrapper for HDF5 datasets
    """
    _h5 = None
    
430
431
432
433
    def __init__(self, filename, allowWrite=False):
        mode = 'r'
        if allowWrite :
            mode = 'r+'
(no author)'s avatar
   
(no author) committed
434
435
436
        if h5py is None:
            LOG.error('h5py module is not installed and is needed in order to read h5 files')
            assert(h5py is not None)
437
        self._h5 = h5py.File(filename, mode)
438
439
    
    def __call__(self):
440
441
442
443
        
        variableList = [ ]
        def testFn (name, obj) :
            #print ('checking name: ' + name)
444
            #print ('object: ' + str(obj))
445
446
447
448
            
            if isinstance(obj, h5py.Dataset) :
                try :
                    tempType = obj.dtype # this is required to provoke a type error for closed data sets
449
                    
450
                    #LOG.debug ('type: ' + str(tempType))
451
452
453
454
455
456
457
458
459
460
                    variableList.append(name)
                except TypeError :
                    LOG.debug('TypeError prevents the use of variable ' + name
                              + '. This variable will be ignored')
        
        self._h5.visititems(testFn)
        
        LOG.debug('variables from visiting h5 file structure: ' + str(variableList))
        
        return(variableList)
461
462
463
464
465
    
    @staticmethod
    def trav(h5,pth): 
        return reduce( lambda x,a: x[a] if a else x, pth.split('/'), h5)
        
466
467
468
469
470
    # this returns a numpy array with a copy of the full, scaled
    # data for this variable, if the data type must be changed to allow
    # for scaling it will be (so the return type may not reflect the
    # type found in the original file)
    def __getitem__(self, name):
471
        
472
473
474
475
476
477
478
479
480
        # defaults
        scale_factor = 1.0
        add_offset = 0.0
        data_type = np.float32 # TODO temporary
        
        # get the variable object and use it to
        # get our raw data and scaling info
        variable_object = self.get_variable_object(name)
        raw_data_copy = variable_object[:]
481
482
483
484
485
        
        #print ('*************************')
        #print (dir (variable_object.id)) # TODO, is there a way to get the scale and offset through this?
        #print ('*************************')
        
486
        # load the scale factor and add offset
487
488
489
490
        if ('scale_factor' in variable_object.attrs) :
            scale_factor = variable_object.attrs['scale_factor']
        if ('add_offset' in variable_object.attrs) :
            add_offset = variable_object.attrs['add_offset']
491
492
493
494
495
496
        # todo, does cdf have an equivalent of endaccess to close the variable?
        
        # don't do lots of work if we don't need to scale things
        if (scale_factor == 1.0) and (add_offset == 0.0) :
            return raw_data_copy
        
497
498
        # get information about where the data is the missing value
        missing_val = self.missing_value(name)
499
500
        missing_mask = np.zeros(raw_data_copy.shape, dtype=np.bool)
        missing_mask[raw_data_copy == missing_val] = True
501
        
502
503
        # create the scaled version of the data
        scaled_data_copy = np.array(raw_data_copy, dtype=data_type)
504
505
        #scaled_data_copy[~missing_mask] = (scaled_data_copy[~missing_mask] - add_offset) * scale_factor #TODO, type truncation issues?
        scaled_data_copy[~missing_mask] = (scaled_data_copy[~missing_mask] * scale_factor) + add_offset #TODO, type truncation issues?
506
507
508
509
        
        return scaled_data_copy
    
    def get_variable_object(self,name):
510
511
512
        return h5.trav(self._h5, name)
    
    def missing_value(self, name):
513
514
515
516
517
518
519
520
521
522
523
524
525
        
        toReturn = None
        
        # get the missing value if it has been set
        variableObject = self.get_variable_object(name)
        pListObj = variableObject.id.get_create_plist()
        fillValueStatus = pListObj.fill_value_defined()
        if (h5d.FILL_VALUE_DEFAULT is fillValueStatus) or (h5d.FILL_VALUE_USER_DEFINED is fillValueStatus) :
            temp = np.array((1), dtype=variableObject.dtype)
            pListObj.get_fill_value(temp)
            toReturn = temp
        
        return toReturn
526
527
528
529
530
531
532
533
534
    
    def create_new_variable(self, variablename, missingvalue=None, data=None, variabletocopyattributesfrom=None):
        """
        create a new variable with the given name
        optionally set the missing value (fill value) and data to those given
        
        the created variable will be returned, or None if a variable could not
        be created
        """
535
        
536
        raise IOUnimplimentedError('Unable to create variable in hdf 5 file, this functionality is not yet available.')
537
538
539
540
541
542
543
544
        
        return None
    
    def add_attribute_data_to_variable(self, variableName, newAttributeName, newAttributeValue) :
        """
        if the attribute exists for the given variable, set it to the new value
        if the attribute does not exist for the given variable, create it and set it to the new value
        """
545
546
        
        raise IOUnimplimentedError('Unable to add attribute to hdf 5 file, this functionality is not yet available.')
547
548
        
        return
549
    
550
551
552
553
554
555
556
    def get_variable_attributes (self, variableName) :
        """
        returns all the attributes associated with a variable name
        """
        
        return self.get_variable_object(variableName).attrs
    
557
558
559
560
561
562
    def get_attribute(self, variableName, attributeName) :
        """
        returns the value of the attribute if it is available for this variable, or None
        """
        toReturn = None
        
563
564
565
566
        temp_attrs = self.get_variable_attributes(variableName)
        
        if (attributeName in temp_attrs) :
            toReturn = temp_attrs[attributeName]
567
568
        
        return toReturn
(no author)'s avatar
(no author) committed
569
570
571
572
573
574
575
576
577
578
579
580
    
    def get_global_attribute(self, attributeName) :
        """
        returns the value of a global attribute if it is available or None
        """
        
        toReturn = None
        
        if attributeName in self._h5.attrs :
            toReturn = self._h5.attrs[attributeName]
        
        return toReturn
(no author)'s avatar
(no author) committed
581

(no author)'s avatar
(no author) committed
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653



class aeri(object):
    """wrapper for AERI RNC/SUM/CXS/etc datasets
    """
    _dmv = None
    _vectors = { }
    _scalars = { }
    
    @staticmethod
    def _meta_mapping(fp):
        ids = fp.metaIDs()
        names = [fp.queryMetaDescString(1, id_, fp.SHORTNAME) for id_ in ids]
        assert len(ids) == len(names)
        return (dict((n, i) for n, i in zip(names, ids)))
    
    def _inventory(self):
        fp = self._dmv
        assert(fp is not None)
        # get list of vectors and scalars
        self._vectors = dict( (fp.queryVectorDescString(n,fp.SHORTNAME), n) for n in fp.vectorIDs() )
        self._scalars = self._meta_mapping(fp)

    def __init__(self, filename, allowWrite=False):
        assert(allowWrite==False)
        if dmvlib is None:
            LOG.error('cannot open AERI files without dmv module being available')
            return
        self._dmv = dmvlib.dmv()
        rc = self._dmv.openFile(filename)
        if rc!=0:
            LOG.error("unable to open file, rc=%d" % rc)
            self._dmv = None        
        else:
            self._inventory()
    
    def __call__(self):
        return list(self._vectors.keys()) + list(self._scalars.keys())
        
    def __getitem__(self, name):
        fp = self._dmv
        assert(fp is not None)
        if 'DMV_RECORDS' in os.environ:
            nrecs = int(os.environ['DMV_RECORDS'])
            LOG.warning('overriding dmv record count to %d' % nrecs)
        else:
            nrecs = self._dmv.recordCount()
        recrange = range(1, nrecs+1)
        if name in self._vectors:
            vid = self._vectors[name]
            vdata = [ fp.vectorDepValues(rec, vid) for rec in recrange ]
            return np.array(vdata)
        elif name in self._scalars:
            vdata = fp.metaValueMatrix(recrange, [self._scalars[name]])
            return np.array(vdata)
        else:
            raise LookupError('cannot find variable %s' % name)
       
    def get_variable_object(self,name):
        return None
    
    def missing_value(self, name):
        return float('nan')
    
    def create_new_variable(self, variablename, missingvalue=None, data=None, variabletocopyattributesfrom=None):
        """
        create a new variable with the given name
        optionally set the missing value (fill value) and data to those given
        
        the created variable will be returned, or None if a variable could not
        be created
654
655
656
657
        """
        
        raise IOUnimplimentedError('Unable to create variable in aeri file, this functionality is not yet available.')
        
(no author)'s avatar
(no author) committed
658
659
660
661
662
663
664
665
        return None
    
    def add_attribute_data_to_variable(self, variableName, newAttributeName, newAttributeValue) :
        """
        if the attribute exists for the given variable, set it to the new value
        if the attribute does not exist for the given variable, create it and set it to the new value
        """
        
666
667
        raise IOUnimplimentedError('Unable to add attribute to aeri file, this functionality is not yet available.')
        
(no author)'s avatar
(no author) committed
668
        return
669
    
670
671
672
673
674
675
676
677
678
679
680
    def get_variable_attributes (self, variableName) :
        """
        returns all the attributes associated with a variable name
        """
        toReturn = { }
        
        # TODO
        LOG.warn('Glance does not yet support attribute retrieval in AERI files. None will be used.')
        
        return toReturn
    
681
682
683
684
685
686
687
688
689
690
    def get_attribute(self, variableName, attributeName) :
        """
        returns the value of the attribute if it is available for this variable, or None
        """
        toReturn = None
        
        # TODO
        LOG.warn('Glance does not yet support attribute retrieval in AERI files. None will be used.')
        
        return toReturn
(no author)'s avatar
(no author) committed
691
692
693
694
695
696
697
698
699
700
701
702
    
    def get_global_attribute(self, attributeName) :
        """
        returns the value of a global attribute if it is available or None
        """
        
        toReturn = None
        
        # TODO
        LOG.warn('Glance does not yet support attribute retrieval in AERI files. None will be used.')
        
        return toReturn
(no author)'s avatar
(no author) committed
703
704
705
706
707

# handle the variety of file suffixes by building aliases to aeri class
cxs = rnc = cxv = csv = spc = sum = uvs = aeri


(no author)'s avatar
   
(no author) committed
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
def _search_xml(pathname):
    xs = '.xml'
    yield pathname + xs
    yield os.path.splitext(pathname)[0] + xs
    yield pathname.replace('-', '_') + xs
    yield os.path.splitext(pathname)[0].replace('-', '_') + xs

class jpss_adl(object):
    """wrapper for JPSS ADL BLOBs 
    This is a somewhat unique case in that the BLOB loader requires both an XML path and a BLOB path.
    In this case, it is assumed that a softlinked pathname.xml exists for a given pathname.
    FORMAT=jpss_adl glance stats truth/ATMS-FSDR.BE ATMS-FSDR
    """
    _blob = None

    def __init__(self, filename, allowWrite=False):
        assert(allowWrite==False)
        for xmlname in _search_xml(filename):
            if not os.path.exists(xmlname): 
                continue
            LOG.info('using %s for %s' % (xmlname, filename))
            break
        if not os.path.exists(xmlname):
            LOG.error(xmlname + ' needs to provide layout for ' + filename)
            return            
        if adl_blob is None:
            LOG.error('cannot open JPSS ADL files without adl_blob module in $PYTHONPATH')
            return
        if filename.lower().endswith('.be'):
            endian = adl_blob.BIG_ENDIAN
        elif filename.lower().endswith('.be'):
            endian = adl_blob.LITTLE_ENDIAN
        else:
            endian = adl_blob.NATIVE_ENDIAN
        LOG.debug('endianness of %s is %s' % (filename, endian))
        self._blob = adl_blob.map(xmlname, filename, writable=False, endian=endian)        
    
    def __call__(self):
        fieldnames = [name for name,field in self._blob._fields_]
        return fieldnames
        
    def __getitem__(self, name):
        field = getattr(self._blob, name)
        if not hasattr(field,'_length_'): # FUTURE: is this rigorous? 
            LOG.info('creating numpy array out of singleton value for %s' % name)
            return np.array([field])
        return np.array(field)
       
    def get_variable_object(self,name):
        return None
    
    def missing_value(self, name):
        return float('nan')
    
    def create_new_variable(self, variablename, missingvalue=None, data=None, variabletocopyattributesfrom=None):
        """
        create a new variable with the given name
        optionally set the missing value (fill value) and data to those given
        
        the created variable will be returned, or None if a variable could not
        be created
        """
        
        raise IOUnimplimentedError('Unable to create variable in JPSS ADL file, this functionality is not yet available.')
        
        return None
    
    def add_attribute_data_to_variable(self, variableName, newAttributeName, newAttributeValue) :
        """
        if the attribute exists for the given variable, set it to the new value
        if the attribute does not exist for the given variable, create it and set it to the new value
        """
        
        raise IOUnimplimentedError('Unable to add attribute to JPSS ADL file, this functionality is not yet available.')
        
        return
    
785
786
787
788
789
790
791
792
793
794
795
    def get_variable_attributes (self, variableName) :
        """
        returns all the attributes associated with a variable name
        """
        toReturn = { }
        
        # TODO
        LOG.warn('Glance does not yet support attribute retrieval in JPSS ADL files. None will be used.')
        
        return toReturn
    
(no author)'s avatar
   
(no author) committed
796
797
798
799
800
801
802
803
804
805
    def get_attribute(self, variableName, attributeName) :
        """
        returns the value of the attribute if it is available for this variable, or None
        """
        toReturn = None
        
        # TODO
        LOG.warn('Glance does not yet support attribute retrieval in JPSS ADL files. None will be used.')
        
        return toReturn
(no author)'s avatar
(no author) committed
806
807
808
809
810
811
812
813
814
815
816
817
    
    def get_global_attribute(self, attributeName) :
        """
        returns the value of a global attribute if it is available or None
        """
        
        toReturn = None
        
        # TODO
        LOG.warn('Glance does not yet support attribute retrieval in JPSS ADL files. None will be used.')
        
        return toReturn
(no author)'s avatar
   
(no author) committed
818
819
820



(no author)'s avatar
(no author) committed
821

822
def open(pathname, allowWrite=False):
(no author)'s avatar
   
(no author) committed
823
824
825
826
827
    suffix = os.path.splitext(pathname)[1][1:].lower()
    if (not suffix) or (suffix not in globals()):
        suffix = os.environ.get('FORMAT', None)
        LOG.info('overriding unknown load format to "%s"' % suffix)
    cls = globals()[suffix]
828
    return cls(pathname, allowWrite=allowWrite)
(no author)'s avatar
(no author) committed
829
830
831
832
833
834



if __name__=='__main__':
    import doctest
    doctest.testmod()