2 warnings
.filterwarnings("ignore")
12 from pyhdf
.SD
import SD
, SDC
13 from utils
import Dict
14 import scipy
.io
as sio
18 import matplotlib
.colors
as colors
19 from itertools
import groupby
20 from subprocess
import check_output
, call
22 def search_api(sname
,bbox
,time
,maxg
=50,platform
="",version
=""):
24 API search of the different satellite granules
26 :param sname: short name
27 :param bbox: polygon with the search bounding box
28 :param time: time interval (init_time,final_time)
29 :param maxg: max number of granules to process
30 :param platform: string with the platform
31 :param version: string with the version
32 :return granules: dictionary with the metadata of the search
34 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
35 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
37 from cmr
import GranuleQuery
41 search
= api
.parameters(
48 search
= api
.parameters(
57 search
= api
.parameters(
65 search
= api
.parameters(
74 print "%s gets %s hits in this range" % (sname
,sh
)
76 print "The number of hits %s is larger than the limit %s." % (sh
,maxg
)
77 print "Use a reduced bounding box or a reduced time interval."
80 granules
= api
.get(sh
)
83 def search_archive(url
,prod
,time
,grans
):
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
96 ids
=['.'.join(k
['producer_granule_id'].split('.')[1:3]) for k
in grans
] # satellite ids in bounding box
98 dts
=[datetime
.datetime
.strptime(d
,'%Y-%m-%dT%H:%M:%SZ') for d
in time
]
100 nh
=int(delta
.total_seconds()/3600)
101 dates
=[dts
[0]+datetime
.timedelta(seconds
=3600*k
) for k
in range(1,nh
+1)]
102 fold
=np
.unique(['%d/%03d' % (date
.timetuple().tm_year
,date
.timetuple().tm_yday
) for date
in dates
])
103 urls
=[url
+'/'+prod
+'/'+f
for f
in fold
]
106 js
=requests
.get(u
+'.json').json()
108 arg
=np
.argwhere(np
.array(ids
)=='.'.join(j
['name'].split('.')[1:3]))
112 g
.links
=[{'href':u
+'/'+g
.name
}]
113 g
.time_start
=grans
[ar
]["time_start"]
114 g
.time_end
=grans
[ar
]["time_end"]
115 g
.dataset_id
='Archive download: unknown dataset id'
116 g
.producer_granule_id
=j
['name']
118 except Exception as e
:
119 "warning: some JSON request failed"
120 print "%s gets %s hits in this range" % (prod
.split('/')[-1],len(granules
))
123 def get_meta(bbox
,time
,maxg
=50,burned
=False,high
=False):
125 Get all the meta data from the API for all the necessary products
127 :param bbox: polygon with the search bounding box
128 :param time: time interval (init_time,final_time)
129 :param maxg: max number of granules to process
130 :return granules: dictionary with the metadata of all the products
132 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
133 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
136 #MOD14: MODIS Terra fire data
137 granules
.MOD14
=search_api("MOD14",bbox
,time
,maxg
,"Terra")
138 #MOD03: MODIS Terra geolocation data
139 granules
.MOD03
=search_api("MOD03",bbox
,time
,maxg
,"Terra")
140 #MOD09: MODIS Atmospherically corrected surface reflectance
141 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra")
142 #MYD14: MODIS Aqua fire data
143 granules
.MYD14
=search_api("MYD14",bbox
,time
,maxg
,"Aqua")
144 #MYD03: MODIS Aqua geolocation data
145 granules
.MYD03
=search_api("MYD03",bbox
,time
,maxg
,"Aqua")
146 #MOD09: MODIS Atmospherically corrected surface reflectance
147 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra")
148 #VNP14: VIIRS fire data, res 750m
149 granules
.VNP14
=search_api("VNP14",bbox
,time
,maxg
)
150 #VNP03MODLL: VIIRS geolocation data, res 750m
151 granules
.VNP03
=search_api("VNP03MODLL",bbox
,time
,maxg
)
152 #VNP14HR: VIIRS fire data, res 375m
153 #granules.VNP14HR=search_api("VNP14IMGTDL_NRT",bbox,time,maxg) # any results in API
155 url
="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
156 prod
="5000/NPP_IMFTS_L1"
157 granules
.VNP03HR
=search_archive(url
,prod
,time
,granules
.VNP03
)
158 prod
="5000/VNP14IMG" # product
159 granules
.VNP14HR
=search_archive(url
,prod
,time
,granules
.VNP03HR
)
161 #VNP09: VIIRS Atmospherically corrected surface reflectance
162 url
="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
163 prod
="5000/VNP09" # product
164 granules
.VNP09
=search_archive(url
,prod
,time
,granules
.VNP03
)
167 def group_files(path
,reg
):
169 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
171 :param path: path to the geolocation (03) and fire (14) files
172 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
173 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
175 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
176 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
178 files
=[Dict({'geo':k
}) for k
in glob
.glob(path
+'/'+reg
+'03*')]
179 filesf
=glob
.glob(path
+'/'+reg
+'14*')
180 filesr
=glob
.glob(path
+'/'+reg
+'09*')
187 for k
,g
in enumerate(files
):
188 mmf
=re
.split("/",g
.geo
)
189 mm
=mmf
[-1].split('.')
190 if mm
[0][1]==m
[0][1] and mm
[1]+'.'+mm
[2]==m
[1]+'.'+m
[2]:
198 for k
,g
in enumerate(files
):
199 mmf
=re
.split("/",g
.geo
)
200 mm
=mmf
[-1].split('.')
201 if mm
[0][1]==m
[0][1] and mm
[1]+'.'+mm
[2]==m
[1]+'.'+m
[2]:
207 Combine all the geolocation (03) and fire (14) files in a path
209 :param path: path to the geolocation (03) and fire (14) files
210 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
212 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
213 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
217 modf
=group_files(path
,'MOD')
219 mydf
=group_files(path
,'MYD')
221 vif
=group_files(path
,'VNP')
227 def read_modis_files(files
,bounds
):
229 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
231 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
232 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
233 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
235 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
236 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
239 # Satellite information
240 N
=1354 # Number of columns (maxim number of sample)
241 h
=705. # Altitude of the satellite in km
242 p
=1. # Nadir pixel resolution in km
243 # Reading MODIS files
244 hdfg
=SD(files
.geo
,SDC
.READ
)
245 hdff
=SD(files
.fire
,SDC
.READ
)
246 # Creating all the objects
247 lat_obj
=hdfg
.select('Latitude')
248 lon_obj
=hdfg
.select('Longitude')
249 fire_obj
=hdff
.select('fire mask')
250 lat_fire_obj
=hdff
.select('FP_latitude')
251 lon_fire_obj
=hdff
.select('FP_longitude')
252 brig_fire_obj
=hdff
.select('FP_T21')
253 sample_fire_obj
=hdff
.select('FP_sample')
254 conf_fire_obj
=hdff
.select('FP_confidence')
255 t31_fire_obj
=hdff
.select('FP_T31')
256 frp_fire_obj
=hdff
.select('FP_power')
257 # Geolocation and mask information
258 ret
.lat
=lat_obj
.get()
259 ret
.lon
=lon_obj
.get()
260 ret
.fire
=fire_obj
.get()
261 # Fire detected information
263 flats
=lat_fire_obj
.get()
267 flons
=lon_fire_obj
.get()
270 fll
=np
.logical_and(np
.logical_and(np
.logical_and(flons
>= bounds
[0], flons
<= bounds
[1]), flats
>= bounds
[2]), flats
<= bounds
[3])
271 ret
.lat_fire
=flats
[fll
]
272 ret
.lon_fire
=flons
[fll
]
274 ret
.brig_fire
=brig_fire_obj
.get()[fll
]
276 ret
.brig_fire
=np
.array([])
277 ret
.sat_fire
=hdff
.Satellite
279 ret
.conf_fire
=conf_fire_obj
.get()[fll
]
281 ret
.conf_fire
=np
.array([])
283 ret
.t31_fire
=t31_fire_obj
.get()[fll
]
285 ret
.t31_fire
=np
.array([])
287 ret
.frp_fire
=frp_fire_obj
.get()[fll
]
289 ret
.frp_fire
=np
.array([])
291 sf
=sample_fire_obj
.get()[fll
]
294 ret
.scan_angle_fire
,ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
)
296 lats
=np
.ravel(ret
.lat
)
297 lons
=np
.ravel(ret
.lon
)
298 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>= bounds
[0], lons
<= bounds
[1]), lats
>= bounds
[2]), lats
<= bounds
[3])
301 fire
=np
.ravel(ret
.fire
)
303 nf
=np
.logical_or(fire
== 3, fire
== 5)
304 ret
.lat_nofire
=lats
[nf
]
305 ret
.lon_nofire
=lons
[nf
]
306 sample
=np
.array([range(0,ret
.lat
.shape
[1])]*ret
.lat
.shape
[0])
307 sample
=np
.ravel(sample
)
310 ret
.scan_angle_nofire
,ret
.scan_nofire
,ret
.track_nofire
=pixel_dim(sfn
,N
,h
,p
)
316 def read_viirs_files(files
,bounds
):
318 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
320 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
321 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
322 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
324 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
325 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
328 # Satellite information
329 N
=3200 # Number of columns (maxim number of sample)
330 h
=828. # Altitude of the satellite in km
331 alpha
=np
.array([0,31.59,44.68,56.06])/180*np
.pi
332 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
333 p
=np
.array([0.75,0.75/2,0.75/3])
334 # Reading VIIRS files
335 h5g
=h5py
.File(files
.geo
,'r')
336 ncf
=nc
.Dataset(files
.fire
,'r')
337 # Geolocation and mask information
338 ret
.lat
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
339 ret
.lon
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
340 ret
.fire
=np
.array(ncf
.variables
['fire mask'][:])
341 # Fire detected information
342 flats
=np
.array(ncf
.variables
['FP_latitude'][:])
343 flons
=np
.array(ncf
.variables
['FP_longitude'][:])
344 fll
=np
.logical_and(np
.logical_and(np
.logical_and(flons
>= bounds
[0], flons
<= bounds
[1]), flats
>= bounds
[2]),flats
<= bounds
[3])
345 ret
.lat_fire
=flats
[fll
]
346 ret
.lon_fire
=flons
[fll
]
347 ret
.brig_fire
=np
.array(ncf
.variables
['FP_T13'][:])[fll
]
348 ret
.sat_fire
=ncf
.SatelliteInstrument
349 ret
.conf_fire
=np
.array(ncf
.variables
['FP_confidence'][:])[fll
]
350 ret
.t31_fire
=np
.array(ncf
.variables
['FP_T15'][:])[fll
]
351 ret
.frp_fire
=np
.array(ncf
.variables
['FP_power'][:])[fll
]
352 sf
=np
.array(ncf
.variables
['FP_sample'][:])[fll
]
353 ret
.scan_angle_fire
,ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
,alpha
)
355 lats
=np
.ravel(ret
.lat
)
356 lons
=np
.ravel(ret
.lon
)
357 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>= bounds
[0], lons
<= bounds
[1]), lats
>= bounds
[2]), lats
<= bounds
[3])
360 fire
=np
.ravel(ret
.fire
)
362 nf
=np
.logical_or(fire
== 3, fire
== 5)
363 ret
.lat_nofire
=lats
[nf
]
364 ret
.lon_nofire
=lons
[nf
]
365 sample
=np
.array([range(0,ret
.lat
.shape
[1])]*ret
.lat
.shape
[0])
366 sample
=np
.ravel(sample
)
369 ret
.scan_angle_nofire
,ret
.scan_nofire
,ret
.track_nofire
=pixel_dim(sfn
,N
,h
,p
,alpha
)
370 # Reflectance data for burned scar algorithm
371 if 'ref' in files
.keys():
372 # Read reflectance data
373 hdf
=SD(files
.ref
,SDC
.READ
)
375 M7
=hdf
.select('750m Surface Reflectance Band M7') # 0.86 nm
376 M8
=hdf
.select('750m Surface Reflectance Band M8') # 1.24 nm
377 M10
=hdf
.select('750m Surface Reflectance Band M10') # 1.61 nm
378 M11
=hdf
.select('750m Surface Reflectance Band M11') # 2.25 nm
380 bands
.M7
=M7
.get()*1e-4
381 bands
.M8
=M8
.get()*1e-4
382 bands
.M10
=M10
.get()*1e-4
383 bands
.M11
=M11
.get()*1e-4
384 # Burned scar mask using the burned scar granule algorithm
385 ret
.burned
=burned_algorithm(bands
)
386 # Close reflectance file
393 def read_viirs375_files(path
,bounds
):
395 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
397 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
398 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
400 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
401 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
404 # Opening files if they exist
405 f1
=glob
.glob(path
+'/fire_archive_*.csv')
406 f2
=glob
.glob(path
+'/fire_nrt_*.csv')
408 df1
=pd
.read_csv(f1
[0])
410 df2
=pd
.read_csv(f2
[0])
411 dfs
=pd
.concat([df1
,df2
],sort
=True,ignore_index
=True)
416 dfs
=pd
.read_csv(f2
[0])
421 # In the case something exists, read all the information from the CSV files
422 dfs
=dfs
[(dfs
['longitude']>bounds
[0]) & (dfs
['longitude']<bounds
[1]) & (dfs
['latitude']>bounds
[2]) & (dfs
['latitude']<bounds
[3])]
423 date
=np
.array(dfs
['acq_date'])
424 time
=np
.array(dfs
['acq_time'])
425 dfs
['time']=np
.array(['%s_%04d' % (date
[k
],time
[k
]) for k
in range(len(date
))])
426 dfs
['time']=pd
.to_datetime(dfs
['time'], format
='%Y-%m-%d_%H%M')
427 dfs
['datetime']=dfs
['time']
428 dfs
=dfs
.set_index('time')
429 for group_name
, df
in dfs
.groupby(pd
.TimeGrouper("D")):
431 items
.lat
=np
.array(df
['latitude'])
432 items
.lon
=np
.array(df
['longitude'])
433 conf
=np
.array(df
['confidence'])
434 firemask
=np
.zeros(conf
.shape
)
435 conf_fire
=np
.zeros(conf
.shape
)
436 firemask
[conf
=='l']=7
437 conf_fire
[conf
=='l']=30.
438 firemask
[conf
=='n']=8
439 conf_fire
[conf
=='n']=60.
440 firemask
[conf
=='h']=9
441 conf_fire
[conf
=='h']=90.
442 items
.fire
=firemask
.astype(int)
443 items
.lat_fire
=items
.lat
444 items
.lon_fire
=items
.lon
445 items
.brig_fire
=np
.array(df
['bright_ti4'])
446 items
.sat_fire
='Suomi NPP'
447 items
.conf_fire
=conf_fire
448 items
.t31_fire
=np
.array(df
['bright_ti5'])
449 items
.frp_fire
=np
.array(df
['frp'])
450 items
.scan_fire
=np
.array(df
['scan'])
451 items
.track_fire
=np
.array(df
['track'])
452 items
.scan_angle_fire
=np
.ones(items
.scan_fire
.shape
)*np
.nan
453 items
.lat_nofire
=np
.array([])
454 items
.lon_nofire
=np
.array([])
455 items
.scan_angle_nofire
=np
.array([])
456 items
.scan_nofire
=np
.array([])
457 items
.track_nofire
=np
.array([])
458 items
.instrument
=df
['instrument'][0]
460 items
.time_start_geo_iso
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt
.year
,dt
.month
,dt
.day
,dt
.hour
,dt
.minute
,dt
.second
)
461 items
.time_num
=time_iso2num(items
.time_start_geo_iso
)
462 items
.acq_date
='%02d-%02d-%02d' % (dt
.year
,dt
.month
,dt
.day
)
463 items
.acq_time
='%02d%02d' % (dt
.hour
,dt
.minute
)
464 items
.time_start_fire_iso
=items
.time_start_geo_iso
465 items
.time_end_geo_iso
=items
.time_start_geo_iso
466 items
.time_end_fire_iso
=items
.time_start_geo_iso
468 items
.file_fire
=items
.file_geo
469 tt
=df
['datetime'][0].timetuple()
470 id='VNPH_A%04d%03d_%02d%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
,tt
.tm_min
)
472 items
.name
='A%04d%03d_%02d%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
,tt
.tm_min
)
473 ret
.update({id: items
})
476 def read_goes_files(files
):
478 Read the files for GOES products - geolocation and fire data already included (OS)
480 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
481 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
482 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
484 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
485 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
487 h5g
=h5py
.File(files
.geo
,'r')
489 ret
.lat
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
490 ret
.lon
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
491 ncf
=nc
.Dataset(files
.fire
,'r')
492 ret
.fire
=np
.array(ncf
.variables
['fire mask'][:])
493 ret
.lat_fire
=np
.array(ncf
.variables
['FP_latitude'][:])
494 ret
.lon_fire
=np
.array(ncf
.variables
['FP_longitude'][:])
495 ret
.brig_fire
=np
.array(ncf
.variables
['FP_T13'][:])
496 sf
=np
.array(ncf
.variables
['FP_sample'][:])
497 # Satellite information
498 N
=2500 # Number of columns (maxim number of sample)
499 h
=35786. # Altitude of the satellite in km
500 p
=2. # Nadir pixel resolution in km
501 ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
)
502 ret
.sat_fire
=ncf
.SatelliteInstrument
503 ret
.conf_fire
=np
.array(ncf
.variables
['FP_confidence'][:])
504 ret
.t31_fire
=np
.array(ncf
.variables
['FP_T15'][:])
505 ret
.frp_fire
=np
.array(ncf
.variables
['FP_power'][:])
508 def read_data(files
,file_metadata
,bounds
):
510 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
512 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
513 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
514 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
516 YYYYDDD = year and Julian day (001-366) of data acquisition
517 HHMM = hour and minute of data acquisition (approximate beginning time)
519 yyyyddd = year and Julian day of data processing
520 hhmmss = hour, minute, and second of data processing
522 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
523 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
524 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
526 YYYYDDD = year and Julian day (001-366) of data acquisition
527 HHMM = hour and minute of data acquisition (approximate beginning time)
529 yyyyddd = year and Julian day of data processing
530 hhmmss = hour, minute, and second of data processing
532 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
533 :param file_metadata: dictionary with file names as key and granules metadata as values
534 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
535 :return data: dictionary with Latitude, Longitude and fire mask arrays read
537 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
538 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
540 print "read_data files=%s" % files
542 if files
=='VIIRS375':
543 data
.update(read_viirs375_files('.',bounds
))
546 print "read_data f=%s" % f
547 if 'geo' in f
.keys():
548 f0
=os
.path
.basename(f
.geo
)
550 print 'ERROR: read_data cannot read files=%s, not geo file' % f
552 if 'fire' in f
.keys():
553 f1
=os
.path
.basename(f
.fire
)
555 print 'ERROR: read_data cannot read files=%s, not fire file' % f
558 if 'ref' in f
.keys():
559 f2
=os
.path
.basename(f
.ref
)
562 print 'prefix %s' % prefix
564 print 'ERROR: the prefix of %s %s must coincide' % (f0
,f1
)
569 id = prefix
+ '_' + key
571 if prefix
=="MOD" or prefix
=="MYD":
573 item
=read_modis_files(f
,bounds
)
574 item
.instrument
="MODIS"
575 except Exception as e
:
576 print 'WARNING: reading the files from MODIS failed with error %s' % e
580 item
=read_viirs_files(f
,bounds
)
581 item
.instrument
="VIIRS"
582 except Exception as e
:
583 print 'WARNING: reading the files from VIIRS failed with error %s' % e
587 item
=read_goes_files(f
)
588 item
.instrument
="GOES"
589 except Exception as e
:
590 print 'WARNING: reading the files from GOES failed with error %s' % e
593 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0
,f1
)
596 if f2
in file_metadata
.keys():
598 if (f0
in file_metadata
.keys()) and (f1
in file_metadata
.keys()) and boo
:
599 # connect the file back to metadata
600 item
.time_start_geo_iso
=file_metadata
[f0
]["time_start"]
601 item
.time_num
=time_iso2num(item
.time_start_geo_iso
)
602 dt
=datetime
.datetime
.strptime(item
.time_start_geo_iso
[0:18],'%Y-%m-%dT%H:%M:%S')
603 item
.acq_date
='%02d-%02d-%02d' % (dt
.year
,dt
.month
,dt
.day
)
604 item
.acq_time
='%02d%02d' % (dt
.hour
,dt
.minute
)
605 item
.time_start_fire_iso
=file_metadata
[f1
]["time_start"]
606 item
.time_end_geo_iso
=file_metadata
[f0
]["time_end"]
607 item
.time_end_fire_iso
=file_metadata
[f1
]["time_end"]
610 if 'ref' in f
.keys():
612 item
.time_start_ref_iso
=file_metadata
[f2
]["time_start"]
613 item
.time_end_ref_iso
=file_metadata
[f2
]["time_end"]
616 data
.update({id:item
})
618 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0
, f1
)
623 def get_url(url
, filename
, max_retries
=3, appkey
=None):
625 General get URL recursive function
627 :param url: URL to be downloaded
628 :param filename: local file name
629 :param appkey: optional, if download key is necessary
630 :return s: status of the request
632 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
633 Angel Farguell (angel.farguell@gmail.com) 2020-05-28
635 chunk_size
= 1024*1024
637 print 'downloading %s as %s' % (url
,filename
)
639 r
= requests
.get(url
, stream
=True, headers
={"Authorization": "Bearer %s" % appkey
})
641 r
= requests
.get(url
, stream
=True)
642 if r
.status_code
== 200:
643 content_size
= int(r
.headers
['Content-Length'])
644 print 'downloading %s as %s size %sB' % (url
, filename
, content_size
)
645 with
open(filename
, 'wb') as f
:
646 for chunk
in r
.iter_content(chunk_size
):
649 print('downloaded %sB of %sB' % (s
, content_size
))
652 get_url(url
,max_retries
-1,appkey
)
656 def download(granules
, appkey
=None):
658 Download files as listed in the granules metadata
660 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
661 :return file_metadata: dictionary with file names as key and granules metadata as values
663 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
664 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
667 for gn
,granule
in enumerate(granules
):
668 #print json.dumps(granule,indent=4, separators=(',', ': '))
669 url
= granule
['links'][0]['href']
670 filename
=os
.path
.basename(urlparse
.urlsplit(url
).path
)
671 file_metadata
[filename
]=granule
673 # download - a minimal code without various error checking and corrective actions
674 # see wrfxpy/src/ingest/downloader.py
675 if os
.path
.isfile(filename
):
676 print 'file %s already downloaded' % filename
679 s
= get_url(url
,filename
,appkey
)
682 print 'cannot connect to %s' % url
683 print 'web request status code %s' % r
.status_code
684 print 'Make sure you have file ~/.netrc permission 600 with the content'
685 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
688 print 'something happened when trying to download %s with status code %s' % (url
,r
.status_code
)
690 except Exception as e
:
691 print 'download failed with error %s' % e
694 def download_GOES16(time
):
696 Download the GOES16 data through rclone application
698 :param time: tupple with the start and end times
699 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
701 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
702 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
705 dts
=[datetime
.datetime
.strptime(d
,'%Y-%m-%dT%H:%M:%SZ') for d
in time
]
707 nh
=int(delta
.total_seconds()/3600)
708 dates
=[dts
[0]+datetime
.timedelta(seconds
=3600*k
) for k
in range(1,nh
+1)]
711 argT
='%d/%03d/%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
)
713 args
=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
,'.','-L']
714 print 'running: '+' '.join(args
)
716 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
+' downloaded.'
717 args
=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
,'-L']
718 out
=check_output(args
)
719 bucket
.update({argT
: [o
.split(' ')[2] for o
in out
.split('\n')[:-1]]})
720 except Exception as e
:
721 print 'download failed with error %s' % e
724 def retrieve_af_data(bbox
,time
,burned
=False,high
=False,appkey
=None):
726 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
728 :param bbox: polygon with the search bounding box
729 :param time: time interval (init_time,final_time)
730 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
732 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
733 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
737 lonmin
,lonmax
,latmin
,latmax
= bbox
739 bbox
= [(lonmin
,latmax
),(lonmin
,latmin
),(lonmax
,latmin
),(lonmax
,latmax
),(lonmin
,latmax
)]
750 granules
=get_meta(bbox
,time
,maxg
,burned
=burned
,high
=high
)
751 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
753 # Eliminating the NRT data (repeated always)
754 nrt_elimination(granules
)
757 for k
,g
in granules
.items():
758 print 'Downloading %s files' % k
760 file_metadata
.update(download(g
,appkey
=appkey
))
764 print "download complete"
766 # Group all files downloaded
768 #print "group all files:"
771 # Generate data dictionary
773 data
.update(read_data(files
.MOD
,file_metadata
,bounds
))
774 data
.update(read_data(files
.MYD
,file_metadata
,bounds
))
775 data
.update(read_data(files
.VNP
,file_metadata
,bounds
))
776 #data.update(read_data('VIIRS375','',bounds))
780 def files2metadata(files
):
782 Get necessary metadata information from granules
784 :param files: dictionary with MOD, MYD, and VNP keys and arrays of two files (geo and fire)
785 :return data: dictonary with all the data
787 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
788 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2020-05-07
790 print "processing files2metadata"
791 file_metadata
=Dict([])
792 for key
in files
.keys():
794 file_metadata
[key
]=Dict([])
795 if key
in ['MOD','MYD']:
798 f0
= os
.path
.basename(f
.geo
)
799 f1
= os
.path
.basename(f
.fire
)
800 file_metadata
[key
][f0
] = Dict([])
801 file_metadata
[key
][f1
] = Dict([])
802 hdfg
= SD(f
.geo
,SDC
.READ
)
803 hdff
= SD(f
.fire
,SDC
.READ
)
804 meta
= hdfg
.attributes()['CoreMetadata.0']
805 date
= meta
.split('RANGEBEGINNINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
806 time
= meta
.split('RANGEBEGINNINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
807 file_metadata
[key
][f0
]["time_start"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
808 date
= meta
.split('RANGEENDINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
809 time
= meta
.split('RANGEENDINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
810 file_metadata
[key
][f0
]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
811 meta
= hdff
.attributes()['CoreMetadata.0']
812 date
= meta
.split('RANGEBEGINNINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
813 time
= meta
.split('RANGEBEGINNINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
814 file_metadata
[key
][f1
]["time_start"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
815 date
= meta
.split('RANGEENDINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
816 time
= meta
.split('RANGEENDINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
817 file_metadata
[key
][f1
]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
823 f0
= os
.path
.basename(f
.geo
)
824 f1
= os
.path
.basename(f
.fire
)
825 file_metadata
[key
][f0
] = Dict([])
826 file_metadata
[key
][f1
] = Dict([])
827 h5g
= h5py
.File(f
.geo
,'r')
828 ncf
= nc
.Dataset(f
.fire
,'r')
829 date
= h5g
['HDFEOS'].attrs
['BeginningDate']
830 time
= h5g
['HDFEOS'].attrs
['BeginningTime']
831 file_metadata
[key
][f0
]["time_start"]='%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[4:6],date
[6:8],time
[:2],time
[2:4],time
[4:6]]))
832 date
= h5g
['HDFEOS'].attrs
['EndingDate']
833 time
= h5g
['HDFEOS'].attrs
['EndingTime']
834 file_metadata
[key
][f0
]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[4:6],date
[6:8],time
[:2],time
[2:4],time
[4:6]]))
835 file_metadata
[key
][f1
]["time_start"] = ncf
.getncattr('StartTime')[:19].replace(' ','T')+'Z'
836 file_metadata
[key
][f1
]["time_end"] = ncf
.getncattr('EndTime')[:19].replace(' ','T')+'Z'
841 def process_data(path
,bounds
):
843 Process the data from a path in a bounding box coordinates
845 :param path: path where all the granules are downloaded
846 :param bbox: polygon with the search bounding box
847 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
849 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
850 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2020-05-07
852 files
= group_all(path
)
853 file_metadata
= files2metadata(files
)
855 data
.update(read_data(files
.MOD
,file_metadata
.MOD
,bounds
))
856 data
.update(read_data(files
.MYD
,file_metadata
.MYD
,bounds
))
857 data
.update(read_data(files
.VNP
,file_metadata
.VNP
,bounds
))
861 def nrt_elimination(granules
):
863 Cleaning all the NRT data which is repeated
865 :param granules: Dictionary of granules products to clean up
866 :return: It will update the granules dictionary
868 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
869 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
872 if 'MOD14' in granules
:
873 nlist
=[g
for g
in granules
['MOD14'] if g
['data_center']=='LPDAAC_ECS']
874 granules
['MOD14']=nlist
875 if 'MYD14' in granules
:
876 nlist
=[g
for g
in granules
['MYD14'] if g
['data_center']=='LPDAAC_ECS']
877 granules
['MYD14']=nlist
880 def read_fire_mesh(filename
):
882 Read necessary variables in the fire mesh of the wrfout file filename
884 :param filename: wrfout file
885 :return fxlon: lon coordinates in the fire mesh
886 :return fxlat: lat coordinates in the fire mesh
887 :return bbox: bounding box
888 :return time_esmf: simulation times in ESMF format
890 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
891 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
893 print 'opening ' + filename
894 d
= nc
.Dataset(filename
)
895 m
,n
= d
.variables
['XLONG'][0,:,:].shape
896 fm
,fn
= d
.variables
['FXLONG'][0,:,:].shape
897 fm
=fm
-fm
/(m
+1) # dimensions corrected for extra strip
899 fxlon
= np
.array(d
.variables
['FXLONG'][0,:fm
,:fn
]) # masking extra strip
900 fxlat
= np
.array(d
.variables
['FXLAT'][0,:fm
,:fn
])
901 time_esmf
= ''.join(d
.variables
['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
902 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
903 print 'min max longitude latitude %s' % bbox
904 print 'time (ESMF) %s' % time_esmf
908 tign_g
= np
.array(d
.variables
['TIGN_G'][0,:fm
,:fn
])
909 plot_3D(fxlon
,fxlat
,tign_g
)
913 return fxlon
,fxlat
,bbox
,time_esmf
915 def data2json(data
,keys
,dkeys
,N
):
917 Create a json dictionary from data dictionary
919 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
920 :param keys: keys which are going to be included into the json
921 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
922 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
923 :return ret: dictionary with all the fire detection information to create the KML file
925 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
926 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
929 for i
,k
in enumerate(keys
):
930 if isinstance(data
[list(data
)[0]][dkeys
[i
]],(list, tuple, np
.ndarray
)):
931 dd
=[np
.ravel(data
[d
][dkeys
[i
]]) for d
in list(data
)]
932 ret
.update({k
: np
.concatenate(dd
)})
934 dd
=[[data
[d
[1]][dkeys
[i
]]]*N
[d
[0]] for d
in enumerate(list(data
))]
935 ret
.update({k
: np
.concatenate(dd
)})
939 def sdata2json(sdata
,keys
,dkeys
,N
):
941 Create a json dictionary from sorted array of data dictionaries
943 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
944 :param keys: keys which are going to be included into the json
945 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
946 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
947 :return ret: dictionary with all the fire detection information to create the KML file
949 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
950 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
952 ret
=Dict({'granules': [d
[0] for d
in sdata
]})
953 for i
,k
in enumerate(keys
):
954 dd
= [d
[1][dkeys
[i
]] if dkeys
[i
] in d
[1] else None for d
in sdata
]
956 if np
.any([isinstance(d
,(list, tuple, np
.ndarray
)) for d
in dd
]):
957 out
= [d
if d
is not None else np
.array([]) for d
in dd
]
960 out
= [[d
[1][1][dkeys
[i
]]]*N
[d
[0]] if dkeys
[i
] in d
[1][1] else [] for d
in enumerate(sdata
)]
961 ret
.update({k
: out
})
966 def write_csv(d
,bounds
):
968 Write fire detections from data dictionary d to a CSV file
970 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
971 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
972 :return: fire_detections.csv file with all the detections
974 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
975 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
978 df
=pd
.DataFrame(data
=d
)
979 df
=df
[(df
['longitude']>bounds
[0]) & (df
['longitude']<bounds
[1]) & (df
['latitude']>bounds
[2]) & (df
['latitude']<bounds
[3])]
980 df
.to_csv('fire_detections.csv', encoding
='utf-8', index
=False)
982 def plot_3D(xx
,yy
,zz
):
984 Plot surface of (xx,yy,zz) data
988 :param zz: values at the (x,y) points
989 :return: a plot show of the 3D data
991 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
992 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
994 from mpl_toolkits
.mplot3d
import Axes3D
995 import matplotlib
.pyplot
as plt
996 from matplotlib
import cm
998 ax
= fig
.gca(projection
='3d')
999 surf
= ax
.plot_surface(xx
,yy
,zz
,cmap
=cm
.coolwarm
)
1002 def time_iso2num(time_iso
):
1004 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
1006 :param time_iso: string iso date
1007 :return s: integer number of seconds since December 31 1969 at 17:00:00
1009 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1010 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1012 time_datetime
=datetime
.datetime
.strptime(time_iso
[0:19],'%Y-%m-%dT%H:%M:%S')
1013 # seconds since December 31 1969 at 17:00:00
1014 s
=time
.mktime(time_datetime
.timetuple())
1017 def time_iso2datetime(time_iso
):
1019 Transform an iso time string to a datetime element
1021 :param time_iso: string iso date
1022 :return time_datetime: datetime element
1024 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1025 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1027 time_datetime
=datetime
.datetime
.strptime(time_iso
[0:19],'%Y-%m-%dT%H:%M:%S')
1028 return time_datetime
1030 def time_datetime2iso(time_datetime
):
1032 Transform a datetime element to iso time string
1034 :param time_datetime: datetime element
1035 :return time_iso: string iso date
1037 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1038 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1040 time_iso
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime
.year
,time_datetime
.month
,
1041 time_datetime
.day
,time_datetime
.hour
,
1042 time_datetime
.minute
,time_datetime
.second
)
1045 def time_num2iso(time_num
):
1047 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
1049 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
1050 :return date: time string in ISO date
1052 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1053 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1055 dt
=datetime
.datetime
.fromtimestamp(time_num
)
1056 # seconds since December 31 1969 at 17:00:00
1057 date
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt
.year
,dt
.month
,dt
.day
,dt
.hour
,dt
.minute
,dt
.second
)
1060 def pixel_dim(sample
,N
,h
,p
,a
=None):
1062 Computes pixel dimensions (along-scan and track pixel sizes)
1064 :param sample: array of integers with the column number (sample variable in files)
1065 :param N: scalar, total number of pixels in each row of the image swath
1066 :param h: scalar, altitude of the satellite in km
1067 :param p: scalar, pixel nadir resolution in km
1068 :param a: array of floats of the size of p with the angles where the resolution change
1069 :return theta: scan angle in radiands
1070 :return scan: along-scan pixel size in km
1071 :return track: along-track pixel size in km
1073 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1074 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1076 Re
=6378 # approximation of the radius of the Earth in km
1079 s
=np
.arctan(p
/h
) # trigonometry (deg/sample)
1080 if isinstance(p
,(list, tuple, np
.ndarray
)):
1081 Ns
=np
.array([int((a
[k
]-a
[k
-1])/s
[k
-1]) for k
in range(1,len(a
)-1)])
1082 Ns
=np
.append(Ns
,int(M
-Ns
.sum()))
1083 theta
=s
[0]*(sample
-M
)
1084 scan
=Re
*s
[0]*(np
.cos(theta
)/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
)))-1)
1085 track
=r
*s
[0]*(np
.cos(theta
)-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
))))
1086 for k
in range(1,len(Ns
)):
1087 p
=sample
<=M
-Ns
[0:k
].sum()
1088 theta
[p
]=s
[k
]*(sample
[p
]-(M
-Ns
[0:k
].sum()))-(s
[0:k
]*Ns
[0:k
]).sum()
1089 scan
[p
]=Re
*np
.mean(s
)*(np
.cos(theta
[p
])/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
])))-1)
1090 track
[p
]=r
*np
.mean(s
)*(np
.cos(theta
[p
])-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
]))))
1091 p
=sample
>=M
+Ns
[0:k
].sum()
1092 theta
[p
]=s
[k
]*(sample
[p
]-(M
+Ns
[0:k
].sum()))+(s
[0:k
]*Ns
[0:k
]).sum()
1093 scan
[p
]=Re
*np
.mean(s
)*(np
.cos(theta
[p
])/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
])))-1)
1094 track
[p
]=r
*np
.mean(s
)*(np
.cos(theta
[p
])-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
]))))
1097 scan
=Re
*s
*(np
.cos(theta
)/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
)))-1)
1098 track
=r
*s
*(np
.cos(theta
)-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
))))
1099 return (theta
,scan
,track
)
1101 def copyto(partial_path
,kml
):
1103 Copy information in partial_path to kml
1105 :param partial_path: path to a partial KML file
1106 :param kml: KML object where to write to
1107 :return: information from partial_path into kml
1109 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1110 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1112 with
open(partial_path
,'r') as part
:
1116 def json2kml(d
,kml_path
,bounds
,prods
,opt
='granule',minconf
=80.):
1118 Creates a KML file kml_path from a dictionary d
1120 :param d: dictionary with all the fire detection information to create the KML file
1121 :param kml_path: path in where the KML file is going to be written
1122 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
1125 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1126 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1130 if not isinstance(d
['latitude'][0],(list, tuple, np
.ndarray
)):
1132 ind
=[i
[0] for i
in sorted(enumerate(d
.acq_date
), key
=lambda x
:x
[1])]
1133 L
=[len(list(grp
)) for k
, grp
in groupby(d
['acq_date'][ind
])]
1135 ll
=[sum(L
[0:k
+1]) for k
in range(len(L
))]
1137 sort
=[d
[v
][i
] for i
in ind
]
1138 d
[v
]=[sort
[ll
[k
]:ll
[k
+1]] for k
in range(len(ll
)-1)]
1144 frp_style
={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1145 for i
in range(0,40):
1146 frp_style
[i
]='modis_frp_%s_to_%s' % (i
*10, i
*10+10)
1148 with
open(kml_path
,'w') as kml
:
1150 copyto('kmls/partial1.kml',kml
)
1152 # write bounding box
1153 kml
.write('<Folder>\n'
1154 +'<name>Bounding Box</name>\n'
1156 +'<name>Bounds</name>\n'
1157 +'<description>Bounding box of the fire detections</description>\n'
1159 +'<LabelStyle><color>00000000</color><scale>0.000000</scale></LabelStyle>\n'
1160 +'<LineStyle><color>ff0000e6</color><width>4.000000</width></LineStyle>\n'
1161 +'<PolyStyle><color>00f0f0f0</color><outline>1</outline></PolyStyle>\n'
1163 +'<MultiGeometry>\n'
1165 +'<extrude>0</extrude><altitudeMode>clampToGround</altitudeMode>\n'
1166 +'<outerBoundaryIs><LinearRing><coordinates>\n'
1167 +'%s,%s,0\n' % (bounds
[0],bounds
[2])
1168 +'%s,%s,0\n' % (bounds
[1],bounds
[2])
1169 +'%s,%s,0\n' % (bounds
[1],bounds
[3])
1170 +'%s,%s,0\n' % (bounds
[0],bounds
[3])
1171 +'%s,%s,0\n' % (bounds
[0],bounds
[2])
1172 +'</coordinates></LinearRing></outerBoundaryIs>\n'
1174 +'</MultiGeometry>\n'
1178 # set some constants
1179 r
= 6378 # Earth radius
1180 km_lat
= 180/(np
.pi
*r
) # 1km in degrees latitude
1181 ND
= len(d
['latitude'])
1185 kml
.write('<Folder>\n')
1186 kml
.write('<name>%s</name>\n' % prods
[prod
])
1189 copyto('kmls/partial2.kml',kml
)
1192 col
= np
.flip(np
.divide([(230, 25, 75, 150), (245, 130, 48, 150), (255, 255, 25, 150),
1193 (210, 245, 60, 150), (60, 180, 75, 150), (70, 240, 240, 150),
1194 (0, 0, 128, 150), (145, 30, 180, 150), (240, 50, 230, 150),
1195 (128, 128, 128, 150)],255.),0)
1196 cm
= colors
.LinearSegmentedColormap
.from_list('BuRd',col
,ND
)
1197 cols
= ['%02x%02x%02x%02x' % tuple(255*np
.flip(c
)) for c
in cm(range(cm
.N
))]
1198 t_range
= range(ND
-1,-1,-1)
1203 lats
=np
.array(d
['latitude'][t
]).astype(float)
1204 lons
=np
.array(d
['longitude'][t
]).astype(float)
1205 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>= bounds
[0], lons
<= bounds
[1]), lats
>= bounds
[2]), lats
<= bounds
[3])
1210 acq_date
=np
.array(d
['acq_date'][t
])[ll
]
1211 acq_time
=np
.array(d
['acq_time'][t
])[ll
]
1213 satellite
=np
.array(d
['satellite'][t
])[ll
]
1215 satellite
=np
.array(['Not available']*NN
)
1217 instrument
=np
.array(d
['instrument'][t
])[ll
]
1219 instrument
=np
.array(['Not available']*NN
)
1221 confidence
=np
.array(d
['confidence'][t
])[ll
].astype(float)
1223 confidence
=np
.array(np
.zeros(NN
)).astype(float)
1225 frps
=np
.array(d
['frp'][t
])[ll
].astype(float)
1227 frps
=np
.array(np
.zeros(NN
)).astype(float)
1229 angles
=np
.array(d
['scan_angle'][t
])[ll
].astype(float)
1231 angles
=np
.array(['Not available']*NN
)
1233 scans
=np
.array(d
['scan'][t
])[ll
].astype(float)
1235 scans
=np
.ones(NN
).astype(float)
1237 tracks
=np
.array(d
['track'][t
])[ll
].astype(float)
1239 tracks
=np
.ones(NN
).astype(float)
1241 kml
.write('<Folder>\n')
1243 kml
.write('<name>%s</name>\n' % acq_date
[0])
1244 elif opt
=='granule':
1245 kml
.write('<name>%s</name>\n' % d
['granules'][t
])
1247 kml
.write('<name>Pixels</name>\n')
1258 timestamp
=acq_date
[p
] + 'T' + acq_time
[p
][0:2] + ':' + acq_time
[p
][2:4] + ':00Z'
1259 timedescr
=acq_date
[p
] + ' ' + acq_time
[p
][0:2] + ':' + acq_time
[p
][2:4] + ':00 UTC'
1262 kml
.write('<Placemark>\n<name>Ground detection square</name>\n')
1263 kml
.write('<description>\nlongitude: %s\n' % lon
1264 + 'latitude: %s\n' % lat
1265 + 'time: %s\n' % timedescr
1266 + 'satellite: %s\n' % satellite
[p
]
1267 + 'instrument: %s\n' % instrument
[p
]
1268 + 'scan angle: %s\n' % angle
1269 + 'along-scan: %s\n' % scan
1270 + 'along-track: %s\n' % track
1271 + '</description>\n')
1273 kml
.write('<Placemark>\n<name>Fire detection square</name>\n')
1274 kml
.write('<description>\nlongitude: %s\n' % lon
1275 + 'latitude: %s\n' % lat
1276 + 'time: %s\n' % timedescr
1277 + 'satellite: %s\n' % satellite
[p
]
1278 + 'instrument: %s\n' % instrument
[p
]
1279 + 'confidence: %s\n' % conf
1281 + 'scan angle: %s\n' % angle
1282 + 'along-scan: %s\n' % scan
1283 + 'along-track: %s\n' % track
1284 + '</description>\n')
1285 kml
.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp
)
1286 kml
.write('<Data name="FRP"><value>%s</value></Data>\n' % frp
)
1289 kml
.write('<styleUrl> modis_conf_low </styleUrl>\n')
1291 kml
.write('<styleUrl> modis_conf_med </styleUrl>\n')
1293 kml
.write('<styleUrl> modis_conf_high </styleUrl>\n')
1295 kml
.write('<Style>\n'+'<PolyStyle>\n'
1296 +'<color>%s</color>\n' % cols
[t
]
1297 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1300 frpx
= min(40,np
.ceil(frp
/10.)-1)
1301 kml
.write('<styleUrl> %s </styleUrl>\n' % frp_style
[frpx
] )
1303 kml
.write('<styleUrl> no_fire </styleUrl>\n')
1306 kml
.write('<Style>\n'+'<PolyStyle>\n'
1307 +'<color>7000ffff</color>\n'
1308 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1311 kml
.write('<Style>\n'+'<PolyStyle>\n'
1312 +'<color>7000a5ff</color>\n'
1313 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1316 kml
.write('<Style>\n'+'<PolyStyle>\n'
1317 +'<color>700000ff</color>\n'
1318 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1321 kml
.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1323 km_lon
=km_lat
/np
.cos(lat
*np
.pi
/180) # 1 km in longitude
1325 sq_track_size_km
=track
1326 sq2_lat
=km_lat
* sq_track_size_km
/2
1327 sq_scan_size_km
=scan
1328 sq2_lon
=km_lon
* sq_scan_size_km
/2
1330 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
- sq2_lat
))
1331 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
+ sq2_lat
))
1332 kml
.write('%s,%s,0\n' % (lon
+ sq2_lon
, lat
+ sq2_lat
))
1333 kml
.write('%s,%s,0\n' % (lon
+ sq2_lon
, lat
- sq2_lat
))
1334 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
- sq2_lat
))
1336 kml
.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1337 kml
.write('</Folder>\n')
1339 kml
.write('</Folder>\n')
1340 kml
.write('</Document>\n</kml>\n')
1342 print 'Created file %s' % kml_path
1344 print 'Any detections to be saved as %s' % kml_path
1346 def burned_algorithm(data
):
1348 Computes mask of burned scar pixels
1350 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1351 :return C: Mask of burned scar pixels
1353 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1354 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1372 M
=(M8
.astype(float)-RthSub
)/(M11
.astype(float)+eps
)
1373 C1
=np
.logical_and(M
>0,M
<Rth
)
1375 C2
=np
.logical_and(M8
>M08LB
,M8
<M08UB
)
1381 C5
=np
.logical_and(M10
>M10LB
,M10
<M10UB
)
1382 # All the conditions at the same time
1383 C
=np
.logical_and(np
.logical_and(np
.logical_and(np
.logical_and(C1
,C2
),C3
),C4
),C5
)
1386 if __name__
== "__main__":
1387 bbox
=[-132.86966,-102.0868788,44.002495,66.281204]
1388 time
= ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1389 data
=retrieve_af_data(bbox
,time
)
1390 # Save the data dictionary into a matlab structure file out.mat
1391 sio
.savemat('out.mat', mdict
=data
)