2 warnings
.filterwarnings("ignore")
12 from cmr
import CollectionQuery
, GranuleQuery
13 from pyhdf
.SD
import SD
, SDC
15 import scipy
.io
as sio
20 from itertools
import groupby
21 from subprocess
import check_output
, call
23 def search_api(sname
,bbox
,time
,maxg
=50,platform
="",version
=""):
25 API search of the different satellite granules
27 :param sname: short name
28 :param bbox: polygon with the search bounding box
29 :param time: time interval (init_time,final_time)
30 :param maxg: max number of granules to process
31 :param platform: string with the platform
32 :param version: string with the version
33 :return granules: dictionary with the metadata of the search
35 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
36 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
41 search
= api
.parameters(
48 search
= api
.parameters(
57 search
= api
.parameters(
65 search
= api
.parameters(
74 print "%s gets %s hits in this range" % (sname
,sh
)
76 print "The number of hits %s is larger than the limit %s." % (sh
,maxg
)
77 print "Use a reduced bounding box or a reduced time interval."
80 granules
= api
.get(sh
)
83 def search_archive(url
,prod
,time
,grans
):
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
96 ids
=['.'.join(k
['producer_granule_id'].split('.')[1:3]) for k
in grans
] # satellite ids in bounding box
98 dts
=[datetime
.datetime
.strptime(d
,'%Y-%m-%dT%H:%M:%SZ') for d
in time
]
100 nh
=int(delta
.total_seconds()/3600)
101 dates
=[dts
[0]+datetime
.timedelta(seconds
=3600*k
) for k
in range(1,nh
+1)]
102 fold
=np
.unique(['%d/%03d' % (date
.timetuple().tm_year
,date
.timetuple().tm_yday
) for date
in dates
])
103 urls
=[url
+'/'+prod
+'/'+f
for f
in fold
]
106 js
=requests
.get(u
+'.json').json()
108 arg
=np
.argwhere(np
.array(ids
)=='.'.join(j
['name'].split('.')[1:3]))
112 g
.links
=[{'href':u
+'/'+g
.name
}]
113 g
.time_start
=grans
[ar
]["time_start"]
114 g
.time_end
=grans
[ar
]["time_end"]
116 except Exception as e
:
117 "warning: some JSON request failed"
118 print "%s gets %s hits in this range" % (prod
.split('/')[-1],len(granules
))
121 def get_meta(bbox
,time
,maxg
=50,burned
=False):
123 Get all the meta data from the API for all the necessary products
125 :param bbox: polygon with the search bounding box
126 :param time: time interval (init_time,final_time)
127 :param maxg: max number of granules to process
128 :return granules: dictionary with the metadata of all the products
130 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
131 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
134 #MOD14: MODIS Terra fire data
135 granules
.MOD14
=search_api("MOD14",bbox
,time
,maxg
,"Terra")
136 #MOD03: MODIS Terra geolocation data
137 granules
.MOD03
=search_api("MOD03",bbox
,time
,maxg
,"Terra","6")
138 #MOD09: MODIS Atmospherically corrected surface reflectance
139 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra","6")
140 #MYD14: MODIS Aqua fire data
141 granules
.MYD14
=search_api("MYD14",bbox
,time
,maxg
,"Aqua")
142 #MYD03: MODIS Aqua geolocation data
143 granules
.MYD03
=search_api("MYD03",bbox
,time
,maxg
,"Aqua","6")
144 #MOD09: MODIS Atmospherically corrected surface reflectance
145 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra","6")
146 #VNP14: VIIRS fire data, res 750m
147 granules
.VNP14
=search_api("VNP14",bbox
,time
,maxg
)
148 #VNP03MODLL: VIIRS geolocation data, res 750m
149 granules
.VNP03
=search_api("VNP03MODLL",bbox
,time
,maxg
)
150 #VNP14hi: VIIRS fire data, res 375m
151 #granules.VNP14hi=search_api("VNP14IMGTDL_NRT",bbox,time,maxg)
153 #VNP09: VIIRS Atmospherically corrected surface reflectance
154 url
="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
155 prod
="5000/VNP09" # product
156 granules
.VNP09
=search_archive(url
,prod
,time
,granules
.VNP03
)
159 def group_files(path
,reg
):
161 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
163 :param path: path to the geolocation (03) and fire (14) files
164 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
165 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
167 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
168 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
170 files
=[Dict({'geo':k
}) for k
in glob
.glob(path
+'/'+reg
+'03*')]
171 filesf
=glob
.glob(path
+'/'+reg
+'14*')
172 filesr
=glob
.glob(path
+'/'+reg
+'09*')
179 for k
,g
in enumerate(files
):
180 mmf
=re
.split("/",g
.geo
)
181 mm
=mmf
[-1].split('.')
182 if mm
[0][1]==m
[0][1] and mm
[1]+'.'+mm
[2]==m
[1]+'.'+m
[2]:
190 for k
,g
in enumerate(files
):
191 mmf
=re
.split("/",g
.geo
)
192 mm
=mmf
[-1].split('.')
193 if mm
[0][1]==m
[0][1] and mm
[1]+'.'+mm
[2]==m
[1]+'.'+m
[2]:
199 Combine all the geolocation (03) and fire (14) files in a path
201 :param path: path to the geolocation (03) and fire (14) files
202 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
204 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
205 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
209 modf
=group_files(path
,'MOD')
211 mydf
=group_files(path
,'MYD')
213 vif
=group_files(path
,'VNP')
219 def read_modis_files(files
,bounds
):
221 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
223 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
224 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
225 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
227 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
228 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
231 # Satellite information
232 N
=1354 # Number of columns (maxim number of sample)
233 h
=705. # Altitude of the satellite in km
234 p
=1. # Nadir pixel resolution in km
235 # Reading MODIS files
236 hdfg
=SD(files
.geo
,SDC
.READ
)
237 hdff
=SD(files
.fire
,SDC
.READ
)
238 # Creating all the objects
239 lat_obj
=hdfg
.select('Latitude')
240 lon_obj
=hdfg
.select('Longitude')
241 fire_obj
=hdff
.select('fire mask')
242 lat_fire_obj
=hdff
.select('FP_latitude')
243 lon_fire_obj
=hdff
.select('FP_longitude')
244 brig_fire_obj
=hdff
.select('FP_T21')
245 sample_fire_obj
=hdff
.select('FP_sample')
246 conf_fire_obj
=hdff
.select('FP_confidence')
247 t31_fire_obj
=hdff
.select('FP_T31')
248 frp_fire_obj
=hdff
.select('FP_power')
249 # Geolocation and mask information
250 ret
.lat
=lat_obj
.get()
251 ret
.lon
=lon_obj
.get()
252 ret
.fire
=fire_obj
.get()
253 # Fire detected information
255 flats
=lat_fire_obj
.get()
259 flons
=lon_fire_obj
.get()
262 fll
=np
.logical_and(np
.logical_and(np
.logical_and(flons
>bounds
[0],flons
<bounds
[1]),flats
>bounds
[2]),flats
<bounds
[3])
263 ret
.lat_fire
=flats
[fll
]
264 ret
.lon_fire
=flons
[fll
]
266 ret
.brig_fire
=brig_fire_obj
.get()[fll
]
268 ret
.brig_fire
=np
.array([])
269 ret
.sat_fire
=hdff
.Satellite
271 ret
.conf_fire
=conf_fire_obj
.get()[fll
]
273 ret
.conf_fire
=np
.array([])
275 ret
.t31_fire
=t31_fire_obj
.get()[fll
]
277 ret
.t31_fire
=np
.array([])
279 ret
.frp_fire
=frp_fire_obj
.get()[fll
]
281 ret
.frp_fire
=np
.array([])
283 sf
=sample_fire_obj
.get()[fll
]
286 ret
.scan_angle_fire
,ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
)
288 lats
=np
.reshape(ret
.lat
,np
.prod(ret
.lat
.shape
))
289 lons
=np
.reshape(ret
.lon
,np
.prod(ret
.lon
.shape
))
290 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>bounds
[0],lons
<bounds
[1]),lats
>bounds
[2]),lats
<bounds
[3])
293 fire
=np
.reshape(ret
.fire
,np
.prod(ret
.fire
.shape
))
295 nf
=np
.logical_or(fire
== 3, fire
== 5)
296 ret
.lat_nofire
=lats
[nf
]
297 ret
.lon_nofire
=lons
[nf
]
298 sample
=np
.array([range(0,ret
.lat
.shape
[1])]*ret
.lat
.shape
[0])
299 sample
=np
.reshape(sample
,np
.prod(sample
.shape
))
302 ret
.scan_angle_nofire
,ret
.scan_nofire
,ret
.track_nofire
=pixel_dim(sfn
,N
,h
,p
)
308 def read_viirs_files(files
,bounds
):
310 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
312 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
313 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
314 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
316 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
317 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
320 # Satellite information
321 N
=3200 # Number of columns (maxim number of sample)
322 h
=828. # Altitude of the satellite in km
323 alpha
=np
.array([0,31.59,44.68,56.06])/180*np
.pi
324 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
325 p
=np
.array([0.75,0.75/2,0.75/3])
326 # Reading VIIRS files
327 h5g
=h5py
.File(files
.geo
,'r')
328 ncf
=nc
.Dataset(files
.fire
,'r')
329 # Geolocation and mask information
330 ret
.lat
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
331 ret
.lon
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
332 ret
.fire
=np
.array(ncf
.variables
['fire mask'][:])
333 # Fire detected information
334 flats
=np
.array(ncf
.variables
['FP_latitude'][:])
335 flons
=np
.array(ncf
.variables
['FP_longitude'][:])
336 fll
=np
.logical_and(np
.logical_and(np
.logical_and(flons
>bounds
[0],flons
<bounds
[1]),flats
>bounds
[2]),flats
<bounds
[3])
337 ret
.lat_fire
=flats
[fll
]
338 ret
.lon_fire
=flons
[fll
]
339 ret
.brig_fire
=np
.array(ncf
.variables
['FP_T13'][:])[fll
]
340 ret
.sat_fire
=ncf
.SatelliteInstrument
341 ret
.conf_fire
=np
.array(ncf
.variables
['FP_confidence'][:])[fll
]
342 ret
.t31_fire
=np
.array(ncf
.variables
['FP_T15'][:])[fll
]
343 ret
.frp_fire
=np
.array(ncf
.variables
['FP_power'][:])[fll
]
344 sf
=np
.array(ncf
.variables
['FP_sample'][:])[fll
]
345 ret
.scan_angle_fire
,ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
,alpha
)
347 lats
=np
.reshape(ret
.lat
,np
.prod(ret
.lat
.shape
))
348 lons
=np
.reshape(ret
.lon
,np
.prod(ret
.lon
.shape
))
349 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>bounds
[0],lons
<bounds
[1]),lats
>bounds
[2]),lats
<bounds
[3])
352 fire
=np
.reshape(ret
.fire
,np
.prod(ret
.fire
.shape
))
354 nf
=np
.logical_or(fire
== 3, fire
== 5)
355 ret
.lat_nofire
=lats
[nf
]
356 ret
.lon_nofire
=lons
[nf
]
357 sample
=np
.array([range(0,ret
.lat
.shape
[1])]*ret
.lat
.shape
[0])
358 sample
=np
.reshape(sample
,np
.prod(sample
.shape
))
361 ret
.scan_angle_nofire
,ret
.scan_nofire
,ret
.track_nofire
=pixel_dim(sfn
,N
,h
,p
,alpha
)
362 # Reflectance data for burned scar algorithm
363 if 'ref' in files
.keys():
364 # Read reflectance data
365 hdf
=SD(files
.ref
,SDC
.READ
)
367 M7
=hdf
.select('750m Surface Reflectance Band M7') # 0.86 nm
368 M8
=hdf
.select('750m Surface Reflectance Band M8') # 1.24 nm
369 M10
=hdf
.select('750m Surface Reflectance Band M10') # 1.61 nm
370 M11
=hdf
.select('750m Surface Reflectance Band M11') # 2.25 nm
372 bands
.M7
=M7
.get()*1e-4
373 bands
.M8
=M8
.get()*1e-4
374 bands
.M10
=M10
.get()*1e-4
375 bands
.M11
=M11
.get()*1e-4
376 # Burned scar mask using the burned scar granule algorithm
377 ret
.burned
=burned_algorithm(bands
)
378 # Close reflectance file
385 def read_viirs375_files(path
,bounds
):
387 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
389 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
390 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
392 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
393 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
395 # Opening files if they exist
396 f1
=glob
.glob(path
+'/fire_archive_*.csv')
397 f2
=glob
.glob(path
+'/fire_nrt_*.csv')
399 df1
=pd
.read_csv(f1
[0])
401 df2
=pd
.read_csv(f2
[0])
402 dfs
=pd
.concat([df1
,df2
],sort
=True,ignore_index
=True)
407 dfs
=pd
.read_csv(f2
[0])
412 # In the case something exists, read all the information from the CSV files
413 dfs
=dfs
[(dfs
['longitude']>bounds
[0]) & (dfs
['longitude']<bounds
[1]) & (dfs
['latitude']>bounds
[2]) & (dfs
['latitude']<bounds
[3])]
414 date
=np
.array(dfs
['acq_date'])
415 time
=np
.array(dfs
['acq_time'])
416 dfs
['time']=np
.array(['%s_%04d' % (date
[k
],time
[k
]) for k
in range(len(date
))])
417 dfs
['time']=pd
.to_datetime(dfs
['time'], format
='%Y-%m-%d_%H%M')
418 dfs
['datetime']=dfs
['time']
419 dfs
=dfs
.set_index('time')
420 for group_name
, df
in dfs
.groupby(pd
.TimeGrouper("D")):
422 items
.lat
=np
.array(df
['latitude'])
423 items
.lon
=np
.array(df
['longitude'])
424 conf
=np
.array(df
['confidence'])
425 firemask
=np
.zeros(conf
.shape
)
426 conf_fire
=np
.zeros(conf
.shape
)
427 firemask
[conf
=='l']=7
428 conf_fire
[conf
=='l']=30.
429 firemask
[conf
=='n']=8
430 conf_fire
[conf
=='n']=60.
431 firemask
[conf
=='h']=9
432 conf_fire
[conf
=='h']=90.
433 items
.fire
=firemask
.astype(int)
434 items
.lat_fire
=items
.lat
435 items
.lon_fire
=items
.lon
436 items
.brig_fire
=np
.array(df
['bright_ti4'])
437 items
.sat_fire
='Suomi NPP'
438 items
.conf_fire
=conf_fire
439 items
.t31_fire
=np
.array(df
['bright_ti5'])
440 items
.frp_fire
=np
.array(df
['frp'])
441 items
.scan_fire
=np
.array(df
['scan'])
442 items
.track_fire
=np
.array(df
['track'])
443 items
.scan_angle_fire
=np
.ones(items
.scan_fire
.shape
)*np
.nan
444 items
.lat_nofire
=np
.array([])
445 items
.lon_nofire
=np
.array([])
446 items
.scan_angle_nofire
=np
.array([])
447 items
.scan_nofire
=np
.array([])
448 items
.track_nofire
=np
.array([])
449 items
.instrument
=df
['instrument'][0]
451 items
.time_start_geo_iso
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt
.year
,dt
.month
,dt
.day
,dt
.hour
,dt
.minute
,dt
.second
)
452 items
.time_num
=time_iso2num(items
.time_start_geo_iso
)
453 items
.acq_date
='%02d-%02d-%02d' % (dt
.year
,dt
.month
,dt
.day
)
454 items
.acq_time
='%02d%02d' % (dt
.hour
,dt
.minute
)
455 items
.time_start_fire_iso
=items
.time_start_geo_iso
456 items
.time_end_geo_iso
=items
.time_start_geo_iso
457 items
.time_end_fire_iso
=items
.time_start_geo_iso
459 items
.file_fire
=items
.file_geo
460 tt
=df
['datetime'][0].timetuple()
461 id='VNPH_A%04d%03d_%02d%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
,tt
.tm_min
)
463 items
.name
='A%04d%03d_%02d%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
,tt
.tm_min
)
464 ret
.update({id: items
})
467 def read_goes_files(files
):
469 Read the files for GOES products - geolocation and fire data already included (OS)
471 remove :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
472 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
473 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
475 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
476 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
478 h5g
=h5py
.File(files
.geo
,'r')
480 ret
.lat
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
481 ret
.lon
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
482 ncf
=nc
.Dataset(files
.fire
,'r')
483 ret
.fire
=np
.array(ncf
.variables
['fire mask'][:])
484 ret
.lat_fire
=np
.array(ncf
.variables
['FP_latitude'][:])
485 ret
.lon_fire
=np
.array(ncf
.variables
['FP_longitude'][:])
486 ret
.brig_fire
=np
.array(ncf
.variables
['FP_T13'][:])
487 sf
=np
.array(ncf
.variables
['FP_sample'][:])
488 # Satellite information
489 N
=2500 # Number of columns (maxim number of sample)
490 h
=35786. # Altitude of the satellite in km
491 p
=2. # Nadir pixel resolution in km
492 ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
)
493 ret
.sat_fire
=ncf
.SatelliteInstrument
494 ret
.conf_fire
=np
.array(ncf
.variables
['FP_confidence'][:])
495 ret
.t31_fire
=np
.array(ncf
.variables
['FP_T15'][:])
496 ret
.frp_fire
=np
.array(ncf
.variables
['FP_power'][:])
499 def read_data(files
,file_metadata
,bounds
):
501 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
503 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
504 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
505 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
507 YYYYDDD = year and Julian day (001-366) of data acquisition
508 HHMM = hour and minute of data acquisition (approximate beginning time)
510 yyyyddd = year and Julian day of data processing
511 hhmmss = hour, minute, and second of data processing
513 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
514 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
515 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
517 YYYYDDD = year and Julian day (001-366) of data acquisition
518 HHMM = hour and minute of data acquisition (approximate beginning time)
520 yyyyddd = year and Julian day of data processing
521 hhmmss = hour, minute, and second of data processing
523 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
524 :param file_metadata: dictionary with file names as key and granules metadata as values
525 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
526 :return data: dictionary with Latitude, Longitude and fire mask arrays read
528 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
529 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
531 print "read_data files=%s" % files
533 if files
=='VIIRS375':
534 data
.update(read_viirs375_files('.',bounds
))
537 print "read_data f=%s" % f
538 if 'geo' in f
.keys():
539 f0
=os
.path
.basename(f
.geo
)
541 print 'ERROR: read_data cannot read files=%s, not geo file' % f
543 if 'fire' in f
.keys():
544 f1
=os
.path
.basename(f
.fire
)
546 print 'ERROR: read_data cannot read files=%s, not geo file' % f
549 if 'ref' in f
.keys():
550 f2
=os
.path
.basename(f
.ref
)
553 print 'prefix %s' % prefix
555 print 'ERROR: the prefix of %s %s must coincide' % (f0
,f1
)
560 id = prefix
+ '_' + key
562 if prefix
=="MOD" or prefix
=="MYD":
563 item
=read_modis_files(f
,bounds
)
564 item
.instrument
="MODIS"
566 item
=read_viirs_files(f
,bounds
)
567 item
.instrument
="VIIRS"
569 item
=read_goes_files(f
)
570 item
.instrument
="GOES"
572 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0
,f1
)
575 if f2
in file_metadata
.keys():
577 if (f0
in file_metadata
.keys()) and (f1
in file_metadata
.keys()) and boo
:
578 # connect the file back to metadata
579 item
.time_start_geo_iso
=file_metadata
[f0
]["time_start"]
580 item
.time_num
=time_iso2num(item
.time_start_geo_iso
)
581 dt
=datetime
.datetime
.strptime(item
.time_start_geo_iso
[0:18],'%Y-%m-%dT%H:%M:%S')
582 item
.acq_date
='%02d-%02d-%02d' % (dt
.year
,dt
.month
,dt
.day
)
583 item
.acq_time
='%02d%02d' % (dt
.hour
,dt
.minute
)
584 item
.time_start_fire_iso
=file_metadata
[f1
]["time_start"]
585 item
.time_end_geo_iso
=file_metadata
[f0
]["time_end"]
586 item
.time_end_fire_iso
=file_metadata
[f1
]["time_end"]
589 if 'ref' in f
.keys():
591 item
.time_start_ref_iso
=file_metadata
[f2
]["time_start"]
592 item
.time_end_ref_iso
=file_metadata
[f2
]["time_end"]
595 data
.update({id:item
})
597 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0
, f1
)
603 def download(granules
):
605 Download files as listed in the granules metadata
607 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
608 :return file_metadata: dictionary with file names as key and granules metadata as values
610 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
611 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
614 for granule
in granules
:
615 #print json.dumps(granule,indent=4, separators=(',', ': '))
616 url
= granule
['links'][0]['href']
617 filename
=os
.path
.basename(urlparse
.urlsplit(url
).path
)
618 file_metadata
[filename
]=granule
620 # to store as object in memory (maybe not completely downloaded until accessed?)
621 # with requests.Session() as s:
622 # data.append(s.get(url))
624 # download - a minimal code without various error checking and corrective actions
625 # see wrfxpy/src/ingest/downloader.py
626 if os
.path
.isfile(filename
):
627 print 'file %s already downloaded' % filename
630 chunk_size
= 1024*1024
632 print 'downloading %s as %s' % (url
,filename
)
633 r
= requests
.get(url
, stream
=True)
634 if r
.status_code
== 200:
635 content_size
= int(r
.headers
['Content-Length'])
636 print 'downloading %s as %s size %sB' % (url
, filename
, content_size
)
637 with
open(filename
, 'wb') as f
:
638 for chunk
in r
.iter_content(chunk_size
):
641 print('downloaded %sB of %sB' % (s
, content_size
))
643 print 'cannot connect to %s' % url
644 print 'web request status code %s' % r
.status_code
645 print 'Make sure you have file ~/.netrc permission 600 with the content'
646 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
648 except Exception as e
:
649 print 'download failed with error %s' % e
652 def download_GOES16(time
):
654 Download the GOES16 data through rclone application
656 :param time: tupple with the start and end times
657 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
659 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
660 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
663 dts
=[datetime
.datetime
.strptime(d
,'%Y-%m-%dT%H:%M:%SZ') for d
in time
]
665 nh
=int(delta
.total_seconds()/3600)
666 dates
=[dts
[0]+datetime
.timedelta(seconds
=3600*k
) for k
in range(1,nh
+1)]
669 argT
='%d/%03d/%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
)
671 args
=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
,'.','-L']
672 print 'running: '+' '.join(args
)
674 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
+' downloaded.'
675 args
=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
,'-L']
676 out
=check_output(args
)
677 bucket
.update({argT
: [o
.split(' ')[2] for o
in out
.split('\n')[:-1]]})
678 except Exception as e
:
679 print 'download failed with error %s' % e
682 def retrieve_af_data(bbox
,time
,burned
=False):
684 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
686 :param bbox: polygon with the search bounding box
687 :param time: time interval (init_time,final_time)
688 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
690 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
691 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
695 lonmin
,lonmax
,latmin
,latmax
= bbox
697 bbox
= [(lonmin
,latmax
),(lonmin
,latmin
),(lonmax
,latmin
),(lonmax
,latmax
),(lonmin
,latmax
)]
708 granules
=get_meta(bbox
,time
,maxg
,burned
=burned
)
709 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
711 # Eliminating the NRT data (repeated always)
712 nrt_elimination(granules
)
715 for k
,g
in granules
.items():
716 print 'Downloading %s files' % k
718 file_metadata
.update(download(g
))
722 print "download complete"
724 # Group all files downloaded
726 #print "group all files:"
729 # Generate data dictionary
731 data
.update(read_data(files
.MOD
,file_metadata
,bounds
))
732 data
.update(read_data(files
.MYD
,file_metadata
,bounds
))
733 data
.update(read_data(files
.VNP
,file_metadata
,bounds
))
734 #data.update(read_data('VIIRS375','',bounds))
738 def nrt_elimination(granules
):
740 Cleaning all the NRT data which is repeated
742 :param granules: Dictionary of granules products to clean up
743 :return: It will update the granules dictionary
745 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
746 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
749 nlist
=[g
for g
in granules
['MOD14'] if g
['data_center']=='LPDAAC_ECS']
750 granules
['MOD14']=nlist
751 nlist
=[g
for g
in granules
['MYD14'] if g
['data_center']=='LPDAAC_ECS']
752 granules
['MYD14']=nlist
755 def read_fire_mesh(filename
):
757 Read necessary variables in the fire mesh of the wrfout file filename
759 :param filename: wrfout file
760 :return fxlon: lon coordinates in the fire mesh
761 :return fxlat: lat coordinates in the fire mesh
762 :return bbox: bounding box
763 :return time_esmf: simulation times in ESMF format
765 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
766 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
768 print 'opening ' + filename
769 d
= nc
.Dataset(filename
)
770 m
,n
= d
.variables
['XLONG'][0,:,:].shape
771 fm
,fn
= d
.variables
['FXLONG'][0,:,:].shape
772 fm
=fm
-fm
/(m
+1) # dimensions corrected for extra strip
774 fxlon
= d
.variables
['FXLONG'][0,:fm
,:fn
] # masking extra strip
775 fxlat
= d
.variables
['FXLAT'][0,:fm
,:fn
]
776 tign_g
= d
.variables
['TIGN_G'][0,:fm
,:fn
]
777 time_esmf
= ''.join(d
.variables
['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
779 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
780 print 'min max longitude latitude %s' % bbox
781 print 'time (ESMF) %s' % time_esmf
785 plot_3D(fxlon
,fxlat
,tign_g
)
787 return fxlon
,fxlat
,bbox
,time_esmf
789 def data2json(data
,keys
,dkeys
,N
):
791 Create a json dictionary from data dictionary
793 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
794 :param keys: keys which are going to be included into the json
795 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
796 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
797 :return ret: dictionary with all the fire detection information to create the KML file
799 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
800 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
803 for i
,k
in enumerate(keys
):
804 if isinstance(data
[list(data
)[0]][dkeys
[i
]],(list, tuple, np
.ndarray
)):
805 dd
=[np
.reshape(data
[d
][dkeys
[i
]],np
.prod(data
[d
][dkeys
[i
]].shape
)) for d
in list(data
)]
806 ret
.update({k
: np
.concatenate(dd
)})
808 dd
=[[data
[d
[1]][dkeys
[i
]]]*N
[d
[0]] for d
in enumerate(list(data
))]
809 ret
.update({k
: np
.concatenate(dd
)})
813 def sdata2json(sdata
,keys
,dkeys
,N
):
815 Create a json dictionary from sorted array of data dictionaries
817 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
818 :param keys: keys which are going to be included into the json
819 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
820 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
821 :return ret: dictionary with all the fire detection information to create the KML file
823 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
824 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
826 ret
=Dict({'granules': [d
[0] for d
in sdata
]})
827 for i
,k
in enumerate(keys
):
828 dd
= [d
[1][dkeys
[i
]] if dkeys
[i
] in d
[1] else None for d
in sdata
]
830 if np
.any([isinstance(d
,(list, tuple, np
.ndarray
)) for d
in dd
]):
831 out
= [d
if d
is not None else np
.array([]) for d
in dd
]
834 out
= [[d
[1][1][dkeys
[i
]]]*N
[d
[0]] if dkeys
[i
] in d
[1][1] else [] for d
in enumerate(sdata
)]
835 ret
.update({k
: out
})
840 def write_csv(d
,bounds
):
842 Write fire detections from data dictionary d to a CSV file
844 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
845 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
846 :return: fire_detections.csv file with all the detections
848 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
849 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
851 df
=pd
.DataFrame(data
=d
)
852 df
=df
[(df
['longitude']>bounds
[0]) & (df
['longitude']<bounds
[1]) & (df
['latitude']>bounds
[2]) & (df
['latitude']<bounds
[3])]
853 df
.to_csv('fire_detections.csv', encoding
='utf-8', index
=False)
855 def plot_3D(xx
,yy
,zz
):
857 Plot surface of (xx,yy,zz) data
861 :param zz: values at the (x,y) points
862 :return: a plot show of the 3D data
864 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
865 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
867 from mpl_toolkits
.mplot3d
import Axes3D
868 import matplotlib
.pyplot
as plt
869 from matplotlib
import cm
871 ax
= fig
.gca(projection
='3d')
872 surf
= ax
.plot_surface(xx
,yy
,zz
,cmap
=cm
.coolwarm
)
875 def time_iso2num(time_iso
):
877 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
879 :param time_iso: string iso date
880 :return s: integer number of seconds since December 31 1969 at 17:00:00
882 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
883 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
885 time_datetime
=datetime
.datetime
.strptime(time_iso
[0:18],'%Y-%m-%dT%H:%M:%S')
886 # seconds since December 31 1969 at 17:00:00
887 s
=time
.mktime(time_datetime
.timetuple())
890 def time_iso2datetime(time_iso
):
892 Transform an iso time string to a datetime element
894 :param time_iso: string iso date
895 :return time_datetime: datetime element
897 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
898 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
900 time_datetime
=datetime
.datetime
.strptime(time_iso
[0:18],'%Y-%m-%dT%H:%M:%S')
903 def time_datetime2iso(time_datetime
):
905 Transform a datetime element to iso time string
907 :param time_datetime: datetime element
908 :return time_iso: string iso date
910 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
911 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
913 time_iso
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime
.year
,time_datetime
.month
,
914 time_datetime
.day
,time_datetime
.hour
,
915 time_datetime
.minute
,time_datetime
.second
)
918 def time_num2iso(time_num
):
920 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
922 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
923 :return date: time string in ISO date
925 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
926 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
928 dt
=datetime
.datetime
.fromtimestamp(time_num
)
929 # seconds since December 31 1969 at 17:00:00
930 date
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt
.year
,dt
.month
,dt
.day
,dt
.hour
,dt
.minute
,dt
.second
)
933 def pixel_dim(sample
,N
,h
,p
,a
=None):
935 Computes pixel dimensions (along-scan and track pixel sizes)
937 :param sample: array of integers with the column number (sample variable in files)
938 :param N: scalar, total number of pixels in each row of the image swath
939 :param h: scalar, altitude of the satellite in km
940 :param p: scalar, pixel nadir resolution in km
941 :param a: array of floats of the size of p with the angles where the resolution change
942 :return theta: scan angle in radiands
943 :return scan: along-scan pixel size in km
944 :return track: along-track pixel size in km
946 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
947 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
949 Re
=6378 # approximation of the radius of the Earth in km
952 s
=np
.arctan(p
/h
) # trigonometry (deg/sample)
953 if isinstance(p
,(list, tuple, np
.ndarray
)):
954 Ns
=np
.array([int((a
[k
]-a
[k
-1])/s
[k
-1]) for k
in range(1,len(a
)-1)])
955 Ns
=np
.append(Ns
,int(M
-Ns
.sum()))
956 theta
=s
[0]*(sample
-M
)
957 scan
=Re
*s
[0]*(np
.cos(theta
)/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
)))-1)
958 track
=r
*s
[0]*(np
.cos(theta
)-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
))))
959 for k
in range(1,len(Ns
)):
960 p
=sample
<=M
-Ns
[0:k
].sum()
961 theta
[p
]=s
[k
]*(sample
[p
]-(M
-Ns
[0:k
].sum()))-(s
[0:k
]*Ns
[0:k
]).sum()
962 scan
[p
]=Re
*np
.mean(s
)*(np
.cos(theta
[p
])/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
])))-1)
963 track
[p
]=r
*np
.mean(s
)*(np
.cos(theta
[p
])-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
]))))
964 p
=sample
>=M
+Ns
[0:k
].sum()
965 theta
[p
]=s
[k
]*(sample
[p
]-(M
+Ns
[0:k
].sum()))+(s
[0:k
]*Ns
[0:k
]).sum()
966 scan
[p
]=Re
*np
.mean(s
)*(np
.cos(theta
[p
])/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
])))-1)
967 track
[p
]=r
*np
.mean(s
)*(np
.cos(theta
[p
])-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
]))))
970 scan
=Re
*s
*(np
.cos(theta
)/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
)))-1)
971 track
=r
*s
*(np
.cos(theta
)-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
))))
972 return (theta
,scan
,track
)
974 def copyto(partial_path
,kml
):
976 Copy information in partial_path to kml
978 :param partial_path: path to a partial KML file
979 :param kml: KML object where to write to
980 :return: information from partial_path into kml
982 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
983 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
985 with
open(partial_path
,'r') as part
:
989 def json2kml(d
,kml_path
,bounds
,prods
,opt
='granule'):
991 Creates a KML file kml_path from a dictionary d
993 :param d: dictionary with all the fire detection information to create the KML file
994 :param kml_path: path in where the KML file is going to be written
995 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
998 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
999 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1003 if not isinstance(d
['latitude'][0],(list, tuple, np
.ndarray
)):
1005 ind
=[i
[0] for i
in sorted(enumerate(d
.acq_date
), key
=lambda x
:x
[1])]
1006 L
=[len(list(grp
)) for k
, grp
in groupby(d
['acq_date'][ind
])]
1008 ll
=[sum(L
[0:k
+1]) for k
in range(len(L
))]
1010 sort
=[d
[v
][i
] for i
in ind
]
1011 d
[v
]=[sort
[ll
[k
]:ll
[k
+1]] for k
in range(len(ll
)-1)]
1017 frp_style
={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1018 for i
in range(0,40):
1019 frp_style
[i
]='modis_frp_%s_to_%s' % (i
*10, i
*10+10)
1021 with
open(kml_path
,'w') as kml
:
1023 copyto('kmls/partial1.kml',kml
)
1025 r
= 6378 # Earth radius
1026 km_lat
= 180/(np
.pi
*r
) # 1km in degrees latitude
1030 kml
.write('<Folder>\n')
1031 kml
.write('<name>%s</name>\n' % prods
[prod
])
1034 copyto('kmls/partial2.kml',kml
)
1036 for t
in range(len(d
['latitude'])):
1037 lats
=np
.array(d
['latitude'][t
]).astype(float)
1038 lons
=np
.array(d
['longitude'][t
]).astype(float)
1039 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>bounds
[0],lons
<bounds
[1]),lats
>bounds
[2]),lats
<bounds
[3])
1044 acq_date
=np
.array(d
['acq_date'][t
])[ll
]
1045 acq_time
=np
.array(d
['acq_time'][t
])[ll
]
1047 satellite
=np
.array(d
['satellite'][t
])[ll
]
1049 satellite
=np
.array(['Not available']*NN
)
1051 instrument
=np
.array(d
['instrument'][t
])[ll
]
1053 instrument
=np
.array(['Not available']*NN
)
1055 confidence
=np
.array(d
['confidence'][t
])[ll
].astype(float)
1057 confidence
=np
.array(np
.zeros(NN
)).astype(float)
1059 frps
=np
.array(d
['frp'][t
])[ll
].astype(float)
1061 frps
=np
.array(np
.zeros(NN
)).astype(float)
1063 angles
=np
.array(d
['scan_angle'][t
])[ll
].astype(float)
1065 angles
=np
.array(['Not available']*NN
)
1067 scans
=np
.array(d
['scan'][t
])[ll
].astype(float)
1069 scans
=np
.ones(NN
).astype(float)
1071 tracks
=np
.array(d
['track'][t
])[ll
].astype(float)
1073 tracks
=np
.ones(NN
).astype(float)
1075 kml
.write('<Folder>\n')
1077 kml
.write('<name>%s</name>\n' % acq_date
[0])
1078 elif opt
=='granule':
1079 kml
.write('<name>%s</name>\n' % d
['granules'][t
])
1081 kml
.write('<name>Pixels</name>\n')
1091 timestamp
=acq_date
[p
] + 'T' + acq_time
[p
][0:2] + ':' + acq_time
[p
][2:4] + 'Z'
1092 timedescr
=acq_date
[p
] + ' ' + acq_time
[p
][0:2] + ':' + acq_time
[p
][2:4] + ' UTC'
1095 kml
.write('<Placemark>\n<name>Ground detection square</name>\n')
1096 kml
.write('<description>\nlongitude: %s\n' % lon
1097 + 'latitude: %s\n' % lat
1098 + 'time: %s\n' % timedescr
1099 + 'satellite: %s\n' % satellite
[p
]
1100 + 'instrument: %s\n' % instrument
[p
]
1101 + 'scan angle: %s\n' % angle
1102 + 'along-scan: %s\n' % scan
1103 + 'along-track: %s\n' % track
1104 + '</description>\n')
1106 kml
.write('<Placemark>\n<name>Fire detection square</name>\n')
1107 kml
.write('<description>\nlongitude: %s\n' % lon
1108 + 'latitude: %s\n' % lat
1109 + 'time: %s\n' % timedescr
1110 + 'satellite: %s\n' % satellite
[p
]
1111 + 'instrument: %s\n' % instrument
[p
]
1112 + 'confidence: %s\n' % conf
1114 + 'scan angle: %s\n' % angle
1115 + 'along-scan: %s\n' % scan
1116 + 'along-track: %s\n' % track
1117 + '</description>\n')
1118 kml
.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp
)
1121 kml
.write('<styleUrl> modis_conf_low </styleUrl>\n')
1123 kml
.write('<styleUrl> modis_conf_med </styleUrl>\n')
1125 kml
.write('<styleUrl> modis_conf_high </styleUrl>\n')
1127 frpx
= min(40,np
.ceil(frp
/10.)-1)
1128 kml
.write('<styleUrl> %s </styleUrl>\n' % frp_style
[frpx
] )
1130 kml
.write('<styleUrl> no_fire </styleUrl>\n')
1132 kml
.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1134 km_lon
=km_lat
/np
.cos(lat
*np
.pi
/180) # 1 km in longitude
1136 sq_track_size_km
=track
1137 sq2_lat
=km_lat
* sq_track_size_km
/2
1138 sq_scan_size_km
=scan
1139 sq2_lon
=km_lon
* sq_scan_size_km
/2
1141 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
- sq2_lat
))
1142 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
+ sq2_lat
))
1143 kml
.write('%s,%s,0\n' % (lon
+ sq2_lon
, lat
+ sq2_lat
))
1144 kml
.write('%s,%s,0\n' % (lon
+ sq2_lon
, lat
- sq2_lat
))
1145 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
- sq2_lat
))
1147 kml
.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1148 kml
.write('</Folder>\n')
1150 kml
.write('</Folder>\n')
1152 kml
.write('</Document>\n</kml>\n')
1154 print 'Created file %s' % kml_path
1156 print 'Any detections to be saved as %s' % kml_path
1158 def burned_algorithm(data
):
1160 Computes mask of burned scar pixels
1162 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1163 :return C: Mask of burned scar pixels
1165 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1166 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1184 M
=(M8
.astype(float)-RthSub
)/(M11
.astype(float)+eps
)
1185 C1
=np
.logical_and(M
>0,M
<Rth
)
1187 C2
=np
.logical_and(M8
>M08LB
,M8
<M08UB
)
1193 C5
=np
.logical_and(M10
>M10LB
,M10
<M10UB
)
1194 # All the conditions at the same time
1195 C
=np
.logical_and(np
.logical_and(np
.logical_and(np
.logical_and(C1
,C2
),C3
),C4
),C5
)
1198 if __name__
== "__main__":
1199 bbox
=[-132.86966,-102.0868788,44.002495,66.281204]
1200 time
= ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1201 data
=retrieve_af_data(bbox
,time
)
1202 # Save the data dictionary into a matlab structure file out.mat
1203 sio
.savemat('out.mat', mdict
=data
)