__init__.py 21.3 KB
Newer Older
CEVAER's avatar
CEVAER committed
1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3
4
5
6
from __future__ import division
from builtins import str
from builtins import range
from past.utils import old_div
CEVAER's avatar
CEVAER committed
7
8
9
10
11
12
13
14
import sys
import os
import datetime
import glob
import netCDF4
import numpy as np
import numpy.ma as ma
import logging
ARCHER's avatar
ARCHER committed
15
import warnings
16
from collections import OrderedDict
CEVAER's avatar
CEVAER committed
17
18
19
20
21
22

logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)

def readFile(fname):
ARCHER's avatar
ARCHER committed
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        data={}
        #list_var=('owiLon','owiLat','owiHeading','owiEcmwfWindSpeed','owiWindSpeed_IPF', 'owiPreProcessing/estimWindDir200', 'owiPreProcessing/MixWindDir200', 'owiPreProcessing/CoefMixWindDir200', 'owiEcmwfWindDirection','owiWindDirection_mod1','owiWindSpeed_mod1','owiWindDirection_mod2','owiWindSpeed_mod2','owiWindDirection_mod3','owiWindSpeed_mod3','owiPreProcessing/FilterBinary200','owiLandFlag','owiIncidenceAngle')
        try:
            nc=netCDF4.Dataset(fname)
        except  Exception as e:
            logger.warn('skipping unreadable %s : %s' % (fname, str(e)))
            return None
        
        # read owiLon to get valid shape
        data['owiLon']=nc.variables['owiLon'][::]
        refDims=nc.variables['owiLon'].dimensions
        # read groups
        for groupName in [ None ] + list(nc.groups.keys()):
            if groupName:
                group=nc.groups[groupName]
                groupNamePref="%s/" % groupName
CEVAER's avatar
CEVAER committed
41
            else:
ARCHER's avatar
ARCHER committed
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
                group=nc
                groupNamePref=""
                
            groupAttrs=group.ncattrs()
            for varName in group.variables:
                var=group[varName]
                dims=var.dimensions
                if dims == refDims:
                    data["%s%s" % (groupNamePref,varName)] = var[::]
                else:
                    # some vars are 3D: 
                    try:
                        if dims[1:] == refDims:
                            values3D=var[::]
                            for islice in range(np.shape(values3D)[0]):
                                value=values3D[islice,:,:]
                                try:
                                    # try to find sliceName from varName and ncattrs
                                    sliceName=group.getncattr([ groupAttr for groupAttr in groupAttrs if groupAttr in varName ][0]).split(', ')[islice]
                                except:
                                    sliceName="%d" % islice
                                data["%s%s/%s" % (groupNamePref,varName, sliceName)]=value 
                    except:
                        pass
        
        nc.close
CEVAER's avatar
CEVAER committed
68
69
70
71
72
73
    
    return data

def readMeta(fname):
    data={}
    #list_var=('owiLon','owiLat','owiHeading','owiEcmwfWindSpeed','owiWindSpeed_IPF', 'owiPreProcessing/estimWindDir200', 'owiPreProcessing/MixWindDir200', 'owiPreProcessing/CoefMixWindDir200', 'owiEcmwfWindDirection','owiWindDirection_mod1','owiWindSpeed_mod1','owiWindDirection_mod2','owiWindSpeed_mod2','owiWindDirection_mod3','owiWindSpeed_mod3','owiPreProcessing/FilterBinary200','owiLandFlag','owiIncidenceAngle')
ARCHER's avatar
ARCHER committed
74
    attrs = ['long_name','units','valid_range','flag_values','flag_meanings']
CEVAER's avatar
CEVAER committed
75
76
    try:
        nc=netCDF4.Dataset(fname)
77
    except  Exception as e:
CEVAER's avatar
CEVAER committed
78
79
80
81
82
83
84
85
86
87
        logger.warn('skipping unreadable %s : %s' % (fname, str(e)))
        return None
    
    # read globals attr
    for attr in nc.ncattrs():
        data[attr]=nc.getncattr(attr)
    
    refDims=nc.variables['owiLon'].dimensions
            
    # read groups
88
    for groupName in [ None ] + list(nc.groups.keys()):
CEVAER's avatar
CEVAER committed
89
90
91
92
93
94
95
96
97
98
99
        if groupName:
            group=nc.groups[groupName]
            groupNamePref="%s/" % groupName
        else:
            group=nc
            groupNamePref=""
            
        groupAttrs=group.ncattrs()
        for varName in group.variables:
            var=group[varName]
            data["%s%s" % (groupNamePref,varName)]={}
ARCHER's avatar
ARCHER committed
100
101
102
103
104
105
106
107
            for _attr in attrs:
                try:
                    data["%s%s" % (groupNamePref,varName)][_attr]=getattr(var,_attr)
                except:
                    data["%s%s" % (groupNamePref,varName)][_attr]=None
            # compatibility old version
            data["%s%s" % (groupNamePref,varName)]['description'] = data["%s%s" % (groupNamePref,varName)]['long_name']

CEVAER's avatar
CEVAER committed
108
109
110
111
112
113
114
115
116
117
118
119
120
            dims=var.dimensions
            if dims != refDims:
                
                # some vars are 3D: 
                try:
                    if dims[1:] == refDims:
                        for islice in range(var.shape[0]):
                            try:
                                # try to find sliceName from varName and ncattrs
                                sliceName=group.getncattr([ groupAttr for groupAttr in groupAttrs if groupAttr in varName ][0]).split(', ')[islice]
                            except:
                                sliceName="%d" % islice
                            data["%s%s/%s" % (groupNamePref,varName, sliceName)]={}
ARCHER's avatar
ARCHER committed
121
122
123
124
125
126
127
128
                            for _attr in attrs:
                                try:
                                    data["%s%s" % (groupNamePref,varName)][_attr]=getattr(var,_attr)
                                except:
                                    data["%s%s" % (groupNamePref,varName)][_attr]=None
                            # compatibility old version
                            data["%s%s" % (groupNamePref,varName)]['description'] = data["%s%s" % (groupNamePref,varName)]['long_name']

CEVAER's avatar
CEVAER committed
129
130
131
132
                except:
                    pass
    
    nc.close
ARCHER's avatar
ARCHER committed
133

CEVAER's avatar
CEVAER committed
134
135
    return data

136
137
138
139
def writeLight(fname,data,date,meta=None):
    """lightweigth file format writer    
    specs from https://github.com/Unidata/EC-netCDF-CF/blob/master/swath/swath.adoc#image-swath
    xarray reader exemple:
ARCHER's avatar
ARCHER committed
140
      * only root group:
141
        xarray.open_dataset(fname).transpose().squeeze() ####.rename_dims({'X':'x','Y':'y'})
ARCHER's avatar
ARCHER committed
142
143
144
145
      * all groups:
        xarray.merge([xarray.open_dataset(args.outfile),xarray.open_dataset(args.outfile,group='preprocessing')])
      
      
146
    """
ARCHER's avatar
ARCHER committed
147
    # version number
148
149
150
151
152
    names_mapping = OrderedDict([
        ('owiLon'    , 'lon'),
        ('owiLat'    , 'lat'),
        ('owiWindSpeed' , 'wind_speed'),
        ('owiWindDirection' , 'wind_to_direction'),
ARCHER's avatar
ARCHER committed
153
#        ('owiWindQuality' , 'quality_flag'),
154
        ('owiMask' , 'mask_flag'),
ARCHER's avatar
ARCHER committed
155
156
        ('owiPreProcessing/ND_2','nrcs_detrend_co'), 
        ('owiPreProcessing/ND_2_cross' , 'nrcs_detrend_cr'),
157
158
159
160
161
162
163
164
        # heterogeneity mask should be commented in the future, once sarwing v3.0 is stable
        ('owiPreProcessing/FilterBinary_1' , 'heterogeneity_co_h'),
        ('owiPreProcessing/FilterBinary_2' , 'heterogeneity_co_m'),
        ('owiPreProcessing/FilterBinary_3' , 'heterogeneity_co_l'),
        ('owiPreProcessing/FilterBinary_1_cross' , 'heterogeneity_cr_h'),
        ('owiPreProcessing/FilterBinary_2_cross' , 'heterogeneity_cr_m'),
        ('owiPreProcessing/FilterBinary_3_cross' , 'heterogeneity_cr_l'),
        
ARCHER's avatar
ARCHER committed
165
        ('owiWindFilter' , 'heterogeneity_mask'), 
166
        ('owiPreProcessing/wind_streaks_orientation_stddev' , 'wind_streaks_orientation_stddev'),
ARCHER's avatar
ARCHER committed
167
        ('owiPreProcessing/wind_streaks_orientation' , 'wind_streaks_orientation'), 
ARCHER's avatar
ARCHER committed
168
169
170
171
        ('owiNrcs' , 'nrcs_co'),
        ('owiNrcs_cross' , 'nrcs_cr'),
        ('owiElevationAngle' , 'elevation_angle'),
        ('owiIncidenceAngle' , 'incidence_angle'),
172
        ])
ARCHER's avatar
ARCHER committed
173
174
    # global attrs
    global_attrs = [ 'sourceProduct' , 'missionName', 'polarisation', 'footprint','l2ProcessingUtcTime']
ARCHER's avatar
ARCHER committed
175
    attrs = ['long_name','units','valid_range','flag_values','flag_meanings']
176
177
178
    # overwrite meta
    meta_over = {
        'wind_speed' : {
ARCHER's avatar
ARCHER committed
179
180
            'units' : "m/s",
            'long_name' : "Ocean 10m Wind speed from co- and cross- polarization"
181
182
183
184
185
186
187
188
        },
        'lon' : {
            'standard_name' : 'longitude',
            'units'         : 'degrees_east'
        },
        'lat' : {
            'standard_name' : 'latitude',
            'units'         : 'degrees_north'
ARCHER's avatar
ARCHER committed
189
190
191
        },
        'wind_to_direction': {
            'long_name'     : 'Wind to direction (oceanographic convention)',
ARCHER's avatar
ARCHER committed
192
193
            'units'         : 'degrees',
#            'usage_level'   : "basic"
ARCHER's avatar
ARCHER committed
194
            }
195
    }
196
197
    # missing input will be forced to NaN/not implemented
    meta_not_implemented = {
198
        #'wind_streaks_orientation_stddev' : None
199
200
    }
    x,y = data['owiLon'].T.shape
201
202
    nc=netCDF4.Dataset(fname, 'w', format='NETCDF4')
    nc.createDimension('time',1)
203
204
    nc.createDimension('y',y)
    nc.createDimension('x',x)
205
    nc.Conventions = 'CF-1.6'
ARCHER's avatar
ARCHER committed
206
    nc.title = 'SAR ocean surface wind field'
ARCHER's avatar
ARCHER committed
207
    nc.institution = 'IFREMER/CLS'
ARCHER's avatar
ARCHER committed
208
    nc.reference = 'Mouche Alexis, Chapron Bertrand, Knaff John, Zhao Yuan, Zhang Biao, Combot Clement (2019). Copolarized and Cross‐Polarized SAR Measurements for High‐Resolution Description of Major Hurricane Wind Structures: Application to Irma Category 5 Hurricane. Journal Of Geophysical Research-oceans, 124(6), 3905-3922. https://doi.org/10.1029/2019JC015056'
209
    nc.measurementDate = date.strftime("%Y-%m-%dT%H:%M:%SZ")
ARCHER's avatar
ARCHER committed
210
    #nc.version = "???" # TODO : add version string
ARCHER's avatar
ARCHER committed
211
212
213
214
215
    for global_attr in global_attrs:
        try:
            setattr(nc, global_attr, meta[global_attr])
        except:
            logger.warning('skipping missing attr %s' % global_attr)
216
217
218
219
220
221
222
223
    time = nc.createVariable('time',np.int64,dimensions=('time'))
    time.standard_name = 'time'
    epoch = datetime.datetime(1970, 1, 1)
    time.units = "seconds since %s" % epoch.strftime("%Y-%m-%d %H:%M:%S")
    time.calendar = "gregorian"
    #time[::]  = int(date.timestamp())
    time[::] = (date - epoch).total_seconds() # works for python < 3.3
    for in_name in names_mapping.keys():
224
        out_name = names_mapping[in_name]
225
        if in_name in data:
ARCHER's avatar
ARCHER committed
226
            value = data[in_name].T
227
228
229
230
            try:
                fill_value=data[in_name].fill_value
            except:
                fill_value=None
ARCHER's avatar
ARCHER committed
231
            # transpose dims for correct netcdf ordering
232
            ncvar = nc.createVariable(out_name,value.dtype,dimensions=('time','y','x'),fill_value=fill_value)
233
        
ARCHER's avatar
ARCHER committed
234
            for _attr in attrs:           
235
                try:
ARCHER's avatar
ARCHER committed
236
237
238
239
240
                    try:
                        setattr(ncvar,_attr,meta_over[in_name][_attr])
                    except:
                        if meta[in_name][_attr] is not None:
                            setattr(ncvar,_attr, meta[in_name][_attr])
241
                except:
ARCHER's avatar
ARCHER committed
242
                    pass
243
            ncvar[::]=value.T
244
        else:
245
246
247
248
249
250
251
252
            if out_name in meta_not_implemented:
                logger.warning('Forcing not implemented var %s' % out_name)
                ncvar = nc.createVariable(out_name,np.float32,dimensions=('time','y','x'),fill_value=np.nan)
                setattr(ncvar,"long_name","Not yet impemented")
            
            else:
                logger.warning('skipping missing var %s' % in_name)
                continue
253
254
        if in_name not in ['owiLon','owiLat']:
            ncvar.coordinates="time lat lon"
ARCHER's avatar
ARCHER committed
255
        
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
        if out_name in meta_over:
            for tag in meta_over[out_name]:
                ncvar.setncattr(tag, meta_over[out_name][tag])
        
    #unable to set crs for gdal     
    #crs = nc.createVariable("crs",int)
    #crs.grid_mapping_name = "latitude_longitude"
    #crs.semi_major_axis = 6378137 
    #crs.inverse_flattening = 298.2572235604902
    #crs.longitude_of_prime_meridian = 0.0 
    
    
    
    nc.close()
    
    return 

ARCHER's avatar
ARCHER committed
273
274
275
276
277
278
279
280
281
282
283

def nclight2gridded(filein,fileout):
    import numpy as np
    import xarray as xr
    import rioxarray as rxr
    import geo_shapely
    from pyproj.transformer import Transformer
    from shapely.geometry import Point
    from scipy import interpolate
    from scipy import ndimage
    
284
285
286
287
288
289
290
291
292
293
294
295
296
    meta_over = {
        'x' : {
            'standard_name' : "projection_x_coordinate",
            'long_name' : 'Easting',
            'units'     : 'm'
            },
        'y' : {
            'standard_name' : "projection_y_coordinate",
            'long_name' : 'Northing',
            'units'     : 'm'
            },
        }
    
ARCHER's avatar
ARCHER committed
297
    ds_sw = xr.open_dataset(filein,mask_and_scale=False)
298
299
    x_center = ds_sw.x.size // 2
    y_center = ds_sw.y.size // 2
ARCHER's avatar
ARCHER committed
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
    
    lon_center = float(ds_sw.lon[0,y_center,x_center])
    lat_center = float(ds_sw.lat[0,y_center,x_center])

    # get aeqd_crs at image center
    crs= geo_shapely.get_aeqd_crs(Point(lon_center,lat_center))
     
    # convert lon/lat in crs
    fromlonlat=Transformer.from_crs("epsg:4326",crs,always_xy=True) # always_xy=False is by default in pyproj 6+
    x_proj,y_proj=fromlonlat.transform(ds_sw.lon.values,ds_sw.lat.values)
    y_proj = y_proj.astype(float)
    x_proj = x_proj.astype(float)
    
    resolution = 1000 # in meters

    # compute grid size     
    x_size = int(np.ceil(np.max([-x_proj.min(),x_proj.max()])/resolution))*2+1
    y_size = int(np.ceil(np.max([-y_proj.min(),y_proj.max()])/resolution))*2+1
    
    
    # grid coordinates
    # do not use linspace : do it manualy, so computing inverse is easy
    #x = np.linspace(-(x_size-1)/2.,(x_size-1)/2.,num=x_size) * resolution
    #y = np.linspace(-(y_size-1)/2.,(y_size-1)/2.,num=y_size) * resolution
    x = (np.arange(x_size)-int((x_size-1)/2)) * resolution
    y = (np.arange(y_size)-int((y_size-1)/2)) * resolution
    # so given an x (resp y), in projection meters, we can have grid index with x / resolution
    x_idx = np.round((x_proj / resolution) + (x_size-1)/2 ).astype(int)
    y_idx = np.round((y_proj / resolution) + (y_size-1)/2 ).astype(int)
    
    # add x_proj and y_proj to original ds_sw, in valid coordinates (ie rounded at resolution)
    #ds_sw['x_proj'] = xr.DataArray(x_proj, dims=['time','Y', 'X'])
    #ds_sw['y_proj'] = xr.DataArray(y_proj, dims=['time','Y', 'X'])
    
    # get lon/lat for each grid cell
    #x_grid,y_grid = np.meshgrid(x,y)
    x_grid,y_grid = np.meshgrid(x,y)
    tolonlat=Transformer.from_crs(crs,"epsg:4326",always_xy=True)
    lon,lat=tolonlat.transform(x_grid,y_grid)
    
    # add time dim
    lon = lon.reshape([1] + list(lon.shape)).astype(np.float32)
    lat = lat.reshape([1] + list(lat.shape)).astype(np.float32)
    
    # generate dataarray
345
346
    da_lon = xr.DataArray(lon,coords=[ds_sw.time, y, x], dims=['time','y', 'x'])
    da_lat = xr.DataArray(lat,coords=[ds_sw.time, y, x], dims=['time','y', 'x'])
ARCHER's avatar
ARCHER committed
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
    
    # xarray dataset
    ds_gd = xr.Dataset({'lon' : da_lon,'lat' : da_lat})
    ds_gd.attrs = ds_sw.attrs
    
    # if resolution is too small, some gaps will occur in result
    # so we find gaps to be able to fill them later
    np_var = np.zeros(da_lon.shape[1:])
    # fill with 1 where the data will go
    np_var[y_idx,x_idx] = 1
    # mask is boolean grid where we need to interp
    mask = np_var == 0
    # xx and yy are all index
    xx, yy = np.meshgrid(np.arange(np_var.shape[1]), np.arange(np_var.shape[0]))
    # x_valid and y_valid are index of valid data
    #x_valid = xx[~mask]
    #y_valid = yy[~mask]
    # binary_closing and data to find isolated points that need interpolation
    mask_interp = ndimage.binary_closing(~mask) & mask
    # coordinates where interpolation is needed
    x_interp = xx[mask_interp]
    y_interp = yy[mask_interp]
    
    # variable loop
371
    for var in list((set(ds_sw.coords.keys()) | set(ds_sw.keys())).difference(set(['time']))):
ARCHER's avatar
ARCHER committed
372
373
        # set up dataarray. dtype is forced to np.float64, because we use Nan for interpolation
        #ds_sw[var].values.dtype
374
        da_var = xr.DataArray(np.full(da_lon.shape, ds_sw[var]._FillValue, dtype=np.float64),coords=[ds_sw.time,y, x], dims=['time','y', 'x'])
ARCHER's avatar
ARCHER committed
375
376
377
378
379
380
381
        # fill dataarray
        da_var.values[0,y_idx,x_idx] = ds_sw[var].values
        # save nan, as they  are going to be interpolated
        nan_mask = np.isnan(da_var.values)
        da_var.values[nan_mask] = ds_sw[var]._FillValue # not really needed
        try:
            da_var.values[0,y_interp,x_interp] = np.nan
382
383
            da_var = da_var.ffill(dim='x',limit=2)
            da_var = da_var.ffill(dim='y',limit=2)
ARCHER's avatar
ARCHER committed
384
385
386
387
388
389
390
        except Exception as e:
            logging.warning("unable to fill nan on %s: %s" % (var,str(e)))
        # restore nan
        da_var.values[nan_mask] = np.nan
        # set original dtype
        da_var = da_var.astype(ds_sw[var].values.dtype)
        da_var.attrs = ds_sw[var].attrs
391
        da_var.rio.set_spatial_dims('x','y',inplace=True)
ARCHER's avatar
ARCHER committed
392
        ds_gd[var] = da_var
393
394
395
396
        
    for var in list(set(ds_gd.coords.keys()) | set(ds_gd.keys())):
        if var in meta_over:
            # python 3 only (merge dict )
ARCHER's avatar
ARCHER committed
397
398
399
            # ds_gd[var].attrs = { **ds_sw[var].attrs,**meta_over[var] }
            # python 2 compat:
            ds_gd[var].attrs.update(meta_over[var])
400
    
ARCHER's avatar
ARCHER committed
401
                
402
    ds_gd.rio.set_spatial_dims('x','y', inplace=True)
ARCHER's avatar
ARCHER committed
403
    ds_gd.rio.write_crs(crs, inplace=True)
ARCHER's avatar
ARCHER committed
404
    ds_gd.to_netcdf(fileout,format='NETCDF4_CLASSIC')
405

CEVAER's avatar
CEVAER committed
406
407
408
def getAttribute(fname, attr):
    try:
        nc=netCDF4.Dataset(fname)
409
    except  Exception as e:
CEVAER's avatar
CEVAER committed
410
411
412
413
414
415
416
417
418
419
        logger.warn('skipping unreadable %s : %s' % (fname, str(e)))
        return None
    
    if attr in nc.ncattrs():
        return getattr(nc, attr)
    return None

def getFootprint(fname):
    try:
        nc=netCDF4.Dataset(fname)
420
    except  Exception as e:
CEVAER's avatar
CEVAER committed
421
422
423
424
425
426
427
428
429
430
431
432
433
        logger.warn('skipping unreadable %s : %s' % (fname, str(e)))
        return None
    
    if "footprint" in nc.ncattrs():
        return nc.footprint
    else:
        return None    
    
    
def getDateFromFname(fname):
    tmp=os.path.basename(fname).split('-')
    startDate=datetime.datetime.strptime(tmp[4],"%Y%m%dt%H%M%S")
    stopDate=datetime.datetime.strptime(tmp[5],"%Y%m%dt%H%M%S")
434
    date=startDate+old_div((stopDate-startDate),2)
CEVAER's avatar
CEVAER committed
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
    return date

def getDataAtCoord(data,lon,lat):
    import geo
    dataAtCoord={}
    #logger.info("lon : %s" % str(lon))
    landFlag=data['owiLandFlag']
    landFlag[landFlag>0]=1
    Mask = landFlag
    # Mask[Mask>0]=1
    # Mask1=data['owiLandFlag']
    # Mask2=np.logical_not(data['FilterBinary200'])
    lons = ma.array(data['owiLon'],mask=Mask)
    lats = ma.array(data['owiLat'],mask=Mask)
    # lons = ma.array(data['owiLon'],mask=Mask2)
    # lats = ma.array(data['owiLat'],mask=Mask2)
    # lons = data['owiLon']
    # lats = data['owiLat']
    dist,bearing=geo.haversine(lon, lat,lons, lats)
    mindist=np.min(dist)
    imindist=np.argmin(dist)
    if mindist > 20: #
        logger.warn("Buoy to far from sar image (%s km)" % mindist)
        return None
    data['dist_km']=mindist

    for key in data:
        try:
            dataAtCoord[key]=np.ravel(data[key])[imindist]
        except:
            dataAtCoord[key]=data[key]

        # convert dir in image conv to oceano convention
        # if ("Dir" in key) and (not key.startswith("DirectionModifie")) and (not key.startswith("Coef")) :
        if ("Dir200" in key) and (not key.startswith("DirectionModifie")) and (not key.startswith("Coef")) :
            if dataAtCoord[key] >180:
                dataAtCoord[key]=dataAtCoord[key]-180
            else :
                dataAtCoord[key]=dataAtCoord[key]+180
474
    return dataAtCoord
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510


# Convert L2C file system path into L2M filesystem path
# INPUT:
#     path : iterable or non iterable of a full L2C path (list of string or just string depending on input)
#     resolution : resolution of the L2M path needed
# Return:
#    A path or a list of path (same type as path parameter) containing the L2M paths
def L2CtoL2M(path, resolution):
    if type(path) == str:
        pathList = [path]
    else:
        pathList = path

    pathList = [p.replace("L2C", "L2M").replace("_CC_", "_CM_").replace("-cc-", "-cm-") for p in pathList]
    pathList = [p.split("/") for p in pathList]
    for p in pathList:
        filenameSplit = p[-1].split("-")
        # Set the field which indicates the file resolution to the needed resolution.
        # Adding leading zeros to keep field length
        filenameSplit[-2] = str(resolution).zfill(len(filenameSplit[-2]))
        p[-1] = "-".join(filenameSplit)
    pathList = ["/".join(p) for p in pathList]

    if type(path) == str:
        return pathList[0]
    else:
        return pathList


# Convert L2C file system path into L2C/L2M light filesystem path
# INPUT:
#     path (list of string or string) : iterable or non iterable of a full L2C path
#     resolution (int) : resolution of the L2M path needed. If not provided, will return the L2CLight
# Return:
#    A path or a list of path (same type as path parameter) containing the L2CLight/L2MLight paths
ARCHER's avatar
ARCHER committed
511
def L2CtoLight(path, resolution=None , mode='sw'):
512
513
514
515
516
517
518
519
520
521
522
523
524
    if type(path) == str:
        pathList = [path]
    else:
        pathList = path

    pathsOut = []
    for p in pathList:
        base = os.path.dirname(p)
        if resolution:
            addPath = "post_processing/nclight_L2M"
        else:
            addPath = "post_processing/nclight"
        newPath = os.path.join(base, addPath)
ARCHER's avatar
ARCHER committed
525
        newNcFile = os.path.splitext(os.path.basename(p))[0] + "_%s.nc" % mode
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
        if resolution:
            newNcFile = newNcFile.replace("-cc-", "-cm-")
            filenameSplit = newNcFile.split("-")
            # Set the field which indicates the file resolution to the needed resolution.
            # Adding leading zeros to keep field length
            filenameSplit[-2] = str(resolution).zfill(len(filenameSplit[-2]))
            newNcFile = "-".join(filenameSplit)
        fullNcPath = os.path.join(newPath, newNcFile)
        pathsOut.append(fullNcPath)

    if type(path) == str:
        return pathsOut[0]
    else:
        return pathsOut