2 warnings
.filterwarnings("ignore")
12 from pyhdf
.SD
import SD
, SDC
13 from utils
import Dict
14 import scipy
.io
as sio
18 import matplotlib
.colors
as colors
19 from itertools
import groupby
20 from subprocess
import check_output
, call
22 def search_api(sname
,bbox
,time
,maxg
=50,platform
="",version
=""):
24 API search of the different satellite granules
26 :param sname: short name
27 :param bbox: polygon with the search bounding box
28 :param time: time interval (init_time,final_time)
29 :param maxg: max number of granules to process
30 :param platform: string with the platform
31 :param version: string with the version
32 :return granules: dictionary with the metadata of the search
34 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
35 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
37 from cmr
import GranuleQuery
41 search
= api
.parameters(
48 search
= api
.parameters(
57 search
= api
.parameters(
65 search
= api
.parameters(
74 print "%s gets %s hits in this range" % (sname
,sh
)
76 print "The number of hits %s is larger than the limit %s." % (sh
,maxg
)
77 print "Use a reduced bounding box or a reduced time interval."
80 granules
= api
.get(sh
)
83 def search_archive(url
,prod
,time
,grans
):
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
96 ids
=['.'.join(k
['producer_granule_id'].split('.')[1:3]) for k
in grans
] # satellite ids in bounding box
98 dts
=[datetime
.datetime
.strptime(d
,'%Y-%m-%dT%H:%M:%SZ') for d
in time
]
100 nh
=int(delta
.total_seconds()/3600)
101 dates
=[dts
[0]+datetime
.timedelta(seconds
=3600*k
) for k
in range(1,nh
+1)]
102 fold
=np
.unique(['%d/%03d' % (date
.timetuple().tm_year
,date
.timetuple().tm_yday
) for date
in dates
])
103 urls
=[url
+'/'+prod
+'/'+f
for f
in fold
]
106 js
=requests
.get(u
+'.json').json()
108 arg
=np
.argwhere(np
.array(ids
)=='.'.join(j
['name'].split('.')[1:3]))
112 g
.links
=[{'href':u
+'/'+g
.name
}]
113 g
.time_start
=grans
[ar
]["time_start"]
114 g
.time_end
=grans
[ar
]["time_end"]
115 g
.dataset_id
='Archive download: unknown dataset id'
116 g
.producer_granule_id
=j
['name']
118 except Exception as e
:
119 "warning: some JSON request failed"
120 print "%s gets %s hits in this range" % (prod
.split('/')[-1],len(granules
))
123 def get_meta(bbox
,time
,maxg
=50,burned
=False,high
=False):
125 Get all the meta data from the API for all the necessary products
127 :param bbox: polygon with the search bounding box
128 :param time: time interval (init_time,final_time)
129 :param maxg: max number of granules to process
130 :return granules: dictionary with the metadata of all the products
132 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
133 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
136 #MOD14: MODIS Terra fire data
137 granules
.MOD14
=search_api("MOD14",bbox
,time
,maxg
,"Terra")
138 #MOD03: MODIS Terra geolocation data
139 granules
.MOD03
=search_api("MOD03",bbox
,time
,maxg
,"Terra","6")
140 #MOD09: MODIS Atmospherically corrected surface reflectance
141 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra","6")
142 #MYD14: MODIS Aqua fire data
143 granules
.MYD14
=search_api("MYD14",bbox
,time
,maxg
,"Aqua")
144 #MYD03: MODIS Aqua geolocation data
145 granules
.MYD03
=search_api("MYD03",bbox
,time
,maxg
,"Aqua","6")
146 #MOD09: MODIS Atmospherically corrected surface reflectance
147 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra","6")
148 #VNP14: VIIRS fire data, res 750m
149 granules
.VNP14
=search_api("VNP14",bbox
,time
,maxg
)
150 #VNP03MODLL: VIIRS geolocation data, res 750m
151 granules
.VNP03
=search_api("VNP03MODLL",bbox
,time
,maxg
)
152 #VNP14HR: VIIRS fire data, res 375m
153 #granules.VNP14HR=search_api("VNP14IMGTDL_NRT",bbox,time,maxg) # any results in API
155 url
="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
156 prod
="5000/NPP_IMFTS_L1"
157 granules
.VNP03HR
=search_archive(url
,prod
,time
,granules
.VNP03
)
158 prod
="5000/VNP14IMG" # product
159 granules
.VNP14HR
=search_archive(url
,prod
,time
,granules
.VNP03HR
)
161 #VNP09: VIIRS Atmospherically corrected surface reflectance
162 url
="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
163 prod
="5000/VNP09" # product
164 granules
.VNP09
=search_archive(url
,prod
,time
,granules
.VNP03
)
167 def group_files(path
,reg
):
169 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
171 :param path: path to the geolocation (03) and fire (14) files
172 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
173 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
175 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
176 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
178 files
=[Dict({'geo':k
}) for k
in glob
.glob(path
+'/'+reg
+'03*')]
179 filesf
=glob
.glob(path
+'/'+reg
+'14*')
180 filesr
=glob
.glob(path
+'/'+reg
+'09*')
187 for k
,g
in enumerate(files
):
188 mmf
=re
.split("/",g
.geo
)
189 mm
=mmf
[-1].split('.')
190 if mm
[0][1]==m
[0][1] and mm
[1]+'.'+mm
[2]==m
[1]+'.'+m
[2]:
198 for k
,g
in enumerate(files
):
199 mmf
=re
.split("/",g
.geo
)
200 mm
=mmf
[-1].split('.')
201 if mm
[0][1]==m
[0][1] and mm
[1]+'.'+mm
[2]==m
[1]+'.'+m
[2]:
207 Combine all the geolocation (03) and fire (14) files in a path
209 :param path: path to the geolocation (03) and fire (14) files
210 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
212 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
213 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
217 modf
=group_files(path
,'MOD')
219 mydf
=group_files(path
,'MYD')
221 vif
=group_files(path
,'VNP')
227 def read_modis_files(files
,bounds
):
229 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
231 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
232 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
233 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
235 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
236 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
239 # Satellite information
240 N
=1354 # Number of columns (maxim number of sample)
241 h
=705. # Altitude of the satellite in km
242 p
=1. # Nadir pixel resolution in km
243 # Reading MODIS files
244 hdfg
=SD(files
.geo
,SDC
.READ
)
245 hdff
=SD(files
.fire
,SDC
.READ
)
246 # Creating all the objects
247 lat_obj
=hdfg
.select('Latitude')
248 lon_obj
=hdfg
.select('Longitude')
249 fire_obj
=hdff
.select('fire mask')
250 lat_fire_obj
=hdff
.select('FP_latitude')
251 lon_fire_obj
=hdff
.select('FP_longitude')
252 brig_fire_obj
=hdff
.select('FP_T21')
253 sample_fire_obj
=hdff
.select('FP_sample')
254 conf_fire_obj
=hdff
.select('FP_confidence')
255 t31_fire_obj
=hdff
.select('FP_T31')
256 frp_fire_obj
=hdff
.select('FP_power')
257 # Geolocation and mask information
258 ret
.lat
=lat_obj
.get()
259 ret
.lon
=lon_obj
.get()
260 ret
.fire
=fire_obj
.get()
261 # Fire detected information
263 flats
=lat_fire_obj
.get()
267 flons
=lon_fire_obj
.get()
270 fll
=np
.logical_and(np
.logical_and(np
.logical_and(flons
>= bounds
[0], flons
<= bounds
[1]), flats
>= bounds
[2]), flats
<= bounds
[3])
271 ret
.lat_fire
=flats
[fll
]
272 ret
.lon_fire
=flons
[fll
]
274 ret
.brig_fire
=brig_fire_obj
.get()[fll
]
276 ret
.brig_fire
=np
.array([])
277 ret
.sat_fire
=hdff
.Satellite
279 ret
.conf_fire
=conf_fire_obj
.get()[fll
]
281 ret
.conf_fire
=np
.array([])
283 ret
.t31_fire
=t31_fire_obj
.get()[fll
]
285 ret
.t31_fire
=np
.array([])
287 ret
.frp_fire
=frp_fire_obj
.get()[fll
]
289 ret
.frp_fire
=np
.array([])
291 sf
=sample_fire_obj
.get()[fll
]
294 ret
.scan_angle_fire
,ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
)
296 lats
=np
.ravel(ret
.lat
)
297 lons
=np
.ravel(ret
.lon
)
298 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>= bounds
[0], lons
<= bounds
[1]), lats
>= bounds
[2]), lats
<= bounds
[3])
301 fire
=np
.ravel(ret
.fire
)
303 nf
=np
.logical_or(fire
== 3, fire
== 5)
304 ret
.lat_nofire
=lats
[nf
]
305 ret
.lon_nofire
=lons
[nf
]
306 sample
=np
.array([range(0,ret
.lat
.shape
[1])]*ret
.lat
.shape
[0])
307 sample
=np
.ravel(sample
)
310 ret
.scan_angle_nofire
,ret
.scan_nofire
,ret
.track_nofire
=pixel_dim(sfn
,N
,h
,p
)
316 def read_viirs_files(files
,bounds
):
318 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
320 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
321 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
322 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
324 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
325 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
328 # Satellite information
329 N
=3200 # Number of columns (maxim number of sample)
330 h
=828. # Altitude of the satellite in km
331 alpha
=np
.array([0,31.59,44.68,56.06])/180*np
.pi
332 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
333 p
=np
.array([0.75,0.75/2,0.75/3])
334 # Reading VIIRS files
335 h5g
=h5py
.File(files
.geo
,'r')
336 ncf
=nc
.Dataset(files
.fire
,'r')
337 # Geolocation and mask information
338 ret
.lat
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
339 ret
.lon
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
340 ret
.fire
=np
.array(ncf
.variables
['fire mask'][:])
341 # Fire detected information
342 flats
=np
.array(ncf
.variables
['FP_latitude'][:])
343 flons
=np
.array(ncf
.variables
['FP_longitude'][:])
344 fll
=np
.logical_and(np
.logical_and(np
.logical_and(flons
>= bounds
[0], flons
<= bounds
[1]), flats
>= bounds
[2]),flats
<= bounds
[3])
345 ret
.lat_fire
=flats
[fll
]
346 ret
.lon_fire
=flons
[fll
]
347 ret
.brig_fire
=np
.array(ncf
.variables
['FP_T13'][:])[fll
]
348 ret
.sat_fire
=ncf
.SatelliteInstrument
349 ret
.conf_fire
=np
.array(ncf
.variables
['FP_confidence'][:])[fll
]
350 ret
.t31_fire
=np
.array(ncf
.variables
['FP_T15'][:])[fll
]
351 ret
.frp_fire
=np
.array(ncf
.variables
['FP_power'][:])[fll
]
352 sf
=np
.array(ncf
.variables
['FP_sample'][:])[fll
]
353 ret
.scan_angle_fire
,ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
,alpha
)
355 lats
=np
.ravel(ret
.lat
)
356 lons
=np
.ravel(ret
.lon
)
357 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>= bounds
[0], lons
<= bounds
[1]), lats
>= bounds
[2]), lats
<= bounds
[3])
360 fire
=np
.ravel(ret
.fire
)
362 nf
=np
.logical_or(fire
== 3, fire
== 5)
363 ret
.lat_nofire
=lats
[nf
]
364 ret
.lon_nofire
=lons
[nf
]
365 sample
=np
.array([range(0,ret
.lat
.shape
[1])]*ret
.lat
.shape
[0])
366 sample
=np
.ravel(sample
)
369 ret
.scan_angle_nofire
,ret
.scan_nofire
,ret
.track_nofire
=pixel_dim(sfn
,N
,h
,p
,alpha
)
370 # Reflectance data for burned scar algorithm
371 if 'ref' in files
.keys():
372 # Read reflectance data
373 hdf
=SD(files
.ref
,SDC
.READ
)
375 M7
=hdf
.select('750m Surface Reflectance Band M7') # 0.86 nm
376 M8
=hdf
.select('750m Surface Reflectance Band M8') # 1.24 nm
377 M10
=hdf
.select('750m Surface Reflectance Band M10') # 1.61 nm
378 M11
=hdf
.select('750m Surface Reflectance Band M11') # 2.25 nm
380 bands
.M7
=M7
.get()*1e-4
381 bands
.M8
=M8
.get()*1e-4
382 bands
.M10
=M10
.get()*1e-4
383 bands
.M11
=M11
.get()*1e-4
384 # Burned scar mask using the burned scar granule algorithm
385 ret
.burned
=burned_algorithm(bands
)
386 # Close reflectance file
393 def read_viirs375_files(path
,bounds
):
395 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
397 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
398 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
400 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
401 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
404 # Opening files if they exist
405 f1
=glob
.glob(path
+'/fire_archive_*.csv')
406 f2
=glob
.glob(path
+'/fire_nrt_*.csv')
408 df1
=pd
.read_csv(f1
[0])
410 df2
=pd
.read_csv(f2
[0])
411 dfs
=pd
.concat([df1
,df2
],sort
=True,ignore_index
=True)
416 dfs
=pd
.read_csv(f2
[0])
421 # In the case something exists, read all the information from the CSV files
422 dfs
=dfs
[(dfs
['longitude']>bounds
[0]) & (dfs
['longitude']<bounds
[1]) & (dfs
['latitude']>bounds
[2]) & (dfs
['latitude']<bounds
[3])]
423 date
=np
.array(dfs
['acq_date'])
424 time
=np
.array(dfs
['acq_time'])
425 dfs
['time']=np
.array(['%s_%04d' % (date
[k
],time
[k
]) for k
in range(len(date
))])
426 dfs
['time']=pd
.to_datetime(dfs
['time'], format
='%Y-%m-%d_%H%M')
427 dfs
['datetime']=dfs
['time']
428 dfs
=dfs
.set_index('time')
429 for group_name
, df
in dfs
.groupby(pd
.TimeGrouper("D")):
431 items
.lat
=np
.array(df
['latitude'])
432 items
.lon
=np
.array(df
['longitude'])
433 conf
=np
.array(df
['confidence'])
434 firemask
=np
.zeros(conf
.shape
)
435 conf_fire
=np
.zeros(conf
.shape
)
436 firemask
[conf
=='l']=7
437 conf_fire
[conf
=='l']=30.
438 firemask
[conf
=='n']=8
439 conf_fire
[conf
=='n']=60.
440 firemask
[conf
=='h']=9
441 conf_fire
[conf
=='h']=90.
442 items
.fire
=firemask
.astype(int)
443 items
.lat_fire
=items
.lat
444 items
.lon_fire
=items
.lon
445 items
.brig_fire
=np
.array(df
['bright_ti4'])
446 items
.sat_fire
='Suomi NPP'
447 items
.conf_fire
=conf_fire
448 items
.t31_fire
=np
.array(df
['bright_ti5'])
449 items
.frp_fire
=np
.array(df
['frp'])
450 items
.scan_fire
=np
.array(df
['scan'])
451 items
.track_fire
=np
.array(df
['track'])
452 items
.scan_angle_fire
=np
.ones(items
.scan_fire
.shape
)*np
.nan
453 items
.lat_nofire
=np
.array([])
454 items
.lon_nofire
=np
.array([])
455 items
.scan_angle_nofire
=np
.array([])
456 items
.scan_nofire
=np
.array([])
457 items
.track_nofire
=np
.array([])
458 items
.instrument
=df
['instrument'][0]
460 items
.time_start_geo_iso
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt
.year
,dt
.month
,dt
.day
,dt
.hour
,dt
.minute
,dt
.second
)
461 items
.time_num
=time_iso2num(items
.time_start_geo_iso
)
462 items
.acq_date
='%02d-%02d-%02d' % (dt
.year
,dt
.month
,dt
.day
)
463 items
.acq_time
='%02d%02d' % (dt
.hour
,dt
.minute
)
464 items
.time_start_fire_iso
=items
.time_start_geo_iso
465 items
.time_end_geo_iso
=items
.time_start_geo_iso
466 items
.time_end_fire_iso
=items
.time_start_geo_iso
468 items
.file_fire
=items
.file_geo
469 tt
=df
['datetime'][0].timetuple()
470 id='VNPH_A%04d%03d_%02d%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
,tt
.tm_min
)
472 items
.name
='A%04d%03d_%02d%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
,tt
.tm_min
)
473 ret
.update({id: items
})
476 def read_goes_files(files
):
478 Read the files for GOES products - geolocation and fire data already included (OS)
480 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
481 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
482 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
484 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
485 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
487 h5g
=h5py
.File(files
.geo
,'r')
489 ret
.lat
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
490 ret
.lon
=np
.array(h5g
['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
491 ncf
=nc
.Dataset(files
.fire
,'r')
492 ret
.fire
=np
.array(ncf
.variables
['fire mask'][:])
493 ret
.lat_fire
=np
.array(ncf
.variables
['FP_latitude'][:])
494 ret
.lon_fire
=np
.array(ncf
.variables
['FP_longitude'][:])
495 ret
.brig_fire
=np
.array(ncf
.variables
['FP_T13'][:])
496 sf
=np
.array(ncf
.variables
['FP_sample'][:])
497 # Satellite information
498 N
=2500 # Number of columns (maxim number of sample)
499 h
=35786. # Altitude of the satellite in km
500 p
=2. # Nadir pixel resolution in km
501 ret
.scan_fire
,ret
.track_fire
=pixel_dim(sf
,N
,h
,p
)
502 ret
.sat_fire
=ncf
.SatelliteInstrument
503 ret
.conf_fire
=np
.array(ncf
.variables
['FP_confidence'][:])
504 ret
.t31_fire
=np
.array(ncf
.variables
['FP_T15'][:])
505 ret
.frp_fire
=np
.array(ncf
.variables
['FP_power'][:])
508 def read_data(files
,file_metadata
,bounds
):
510 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
512 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
513 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
514 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
516 YYYYDDD = year and Julian day (001-366) of data acquisition
517 HHMM = hour and minute of data acquisition (approximate beginning time)
519 yyyyddd = year and Julian day of data processing
520 hhmmss = hour, minute, and second of data processing
522 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
523 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
524 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
526 YYYYDDD = year and Julian day (001-366) of data acquisition
527 HHMM = hour and minute of data acquisition (approximate beginning time)
529 yyyyddd = year and Julian day of data processing
530 hhmmss = hour, minute, and second of data processing
532 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
533 :param file_metadata: dictionary with file names as key and granules metadata as values
534 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
535 :return data: dictionary with Latitude, Longitude and fire mask arrays read
537 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
538 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
540 print "read_data files=%s" % files
542 if files
=='VIIRS375':
543 data
.update(read_viirs375_files('.',bounds
))
546 print "read_data f=%s" % f
547 if 'geo' in f
.keys():
548 f0
=os
.path
.basename(f
.geo
)
550 print 'ERROR: read_data cannot read files=%s, not geo file' % f
552 if 'fire' in f
.keys():
553 f1
=os
.path
.basename(f
.fire
)
555 print 'ERROR: read_data cannot read files=%s, not fire file' % f
558 if 'ref' in f
.keys():
559 f2
=os
.path
.basename(f
.ref
)
562 print 'prefix %s' % prefix
564 print 'ERROR: the prefix of %s %s must coincide' % (f0
,f1
)
569 id = prefix
+ '_' + key
571 if prefix
=="MOD" or prefix
=="MYD":
573 item
=read_modis_files(f
,bounds
)
574 item
.instrument
="MODIS"
575 except Exception as e
:
576 print 'WARNING: reading the files from MODIS failed with error %s' % e
580 item
=read_viirs_files(f
,bounds
)
581 item
.instrument
="VIIRS"
582 except Exception as e
:
583 print 'WARNING: reading the files from VIIRS failed with error %s' % e
587 item
=read_goes_files(f
)
588 item
.instrument
="GOES"
589 except Exception as e
:
590 print 'WARNING: reading the files from GOES failed with error %s' % e
593 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0
,f1
)
596 if f2
in file_metadata
.keys():
598 if (f0
in file_metadata
.keys()) and (f1
in file_metadata
.keys()) and boo
:
599 # connect the file back to metadata
600 item
.time_start_geo_iso
=file_metadata
[f0
]["time_start"]
601 item
.time_num
=time_iso2num(item
.time_start_geo_iso
)
602 dt
=datetime
.datetime
.strptime(item
.time_start_geo_iso
[0:18],'%Y-%m-%dT%H:%M:%S')
603 item
.acq_date
='%02d-%02d-%02d' % (dt
.year
,dt
.month
,dt
.day
)
604 item
.acq_time
='%02d%02d' % (dt
.hour
,dt
.minute
)
605 item
.time_start_fire_iso
=file_metadata
[f1
]["time_start"]
606 item
.time_end_geo_iso
=file_metadata
[f0
]["time_end"]
607 item
.time_end_fire_iso
=file_metadata
[f1
]["time_end"]
610 if 'ref' in f
.keys():
612 item
.time_start_ref_iso
=file_metadata
[f2
]["time_start"]
613 item
.time_end_ref_iso
=file_metadata
[f2
]["time_end"]
616 data
.update({id:item
})
618 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0
, f1
)
624 def download(granules
, appkey
=None):
626 Download files as listed in the granules metadata
628 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
629 :return file_metadata: dictionary with file names as key and granules metadata as values
631 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
632 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
635 for gn
,granule
in enumerate(granules
):
636 #print json.dumps(granule,indent=4, separators=(',', ': '))
637 url
= granule
['links'][0]['href']
638 filename
=os
.path
.basename(urlparse
.urlsplit(url
).path
)
639 file_metadata
[filename
]=granule
641 # to store as object in memory (maybe not completely downloaded until accessed?)
642 # with requests.Session() as s:
643 # data.append(s.get(url))
645 # download - a minimal code without various error checking and corrective actions
646 # see wrfxpy/src/ingest/downloader.py
647 if os
.path
.isfile(filename
):
648 print 'file %s already downloaded' % filename
651 chunk_size
= 1024*1024
653 print 'downloading %s as %s' % (url
,filename
)
655 r
= requests
.get(url
, stream
=True, headers
={"Authorization": "Bearer %s" % appkey
})
657 r
= requests
.get(url
, stream
=True)
658 if r
.status_code
== 200:
659 content_size
= int(r
.headers
['Content-Length'])
660 print 'downloading %s as %s size %sB' % (url
, filename
, content_size
)
661 with
open(filename
, 'wb') as f
:
662 for chunk
in r
.iter_content(chunk_size
):
665 print('downloaded %sB of %sB' % (s
, content_size
))
668 print 'cannot connect to %s' % url
669 print 'web request status code %s' % r
.status_code
670 print 'Make sure you have file ~/.netrc permission 600 with the content'
671 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
674 print 'something happened when trying to download %s with status code %s' % (url
,r
.status_code
)
676 except Exception as e
:
677 print 'download failed with error %s' % e
680 def download_GOES16(time
):
682 Download the GOES16 data through rclone application
684 :param time: tupple with the start and end times
685 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
687 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
688 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
691 dts
=[datetime
.datetime
.strptime(d
,'%Y-%m-%dT%H:%M:%SZ') for d
in time
]
693 nh
=int(delta
.total_seconds()/3600)
694 dates
=[dts
[0]+datetime
.timedelta(seconds
=3600*k
) for k
in range(1,nh
+1)]
697 argT
='%d/%03d/%02d' % (tt
.tm_year
,tt
.tm_yday
,tt
.tm_hour
)
699 args
=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
,'.','-L']
700 print 'running: '+' '.join(args
)
702 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
+' downloaded.'
703 args
=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT
,'-L']
704 out
=check_output(args
)
705 bucket
.update({argT
: [o
.split(' ')[2] for o
in out
.split('\n')[:-1]]})
706 except Exception as e
:
707 print 'download failed with error %s' % e
710 def retrieve_af_data(bbox
,time
,burned
=False,high
=False,appkey
=None):
712 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
714 :param bbox: polygon with the search bounding box
715 :param time: time interval (init_time,final_time)
716 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
718 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
719 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
723 lonmin
,lonmax
,latmin
,latmax
= bbox
725 bbox
= [(lonmin
,latmax
),(lonmin
,latmin
),(lonmax
,latmin
),(lonmax
,latmax
),(lonmin
,latmax
)]
736 granules
=get_meta(bbox
,time
,maxg
,burned
=burned
,high
=high
)
737 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
739 # Eliminating the NRT data (repeated always)
740 nrt_elimination(granules
)
743 for k
,g
in granules
.items():
744 print 'Downloading %s files' % k
746 file_metadata
.update(download(g
,appkey
=appkey
))
750 print "download complete"
752 # Group all files downloaded
754 #print "group all files:"
757 # Generate data dictionary
759 data
.update(read_data(files
.MOD
,file_metadata
,bounds
))
760 data
.update(read_data(files
.MYD
,file_metadata
,bounds
))
761 data
.update(read_data(files
.VNP
,file_metadata
,bounds
))
762 #data.update(read_data('VIIRS375','',bounds))
766 def files2metadata(files
):
768 Get necessary metadata information from granules
770 :param files: dictionary with MOD, MYD, and VNP keys and arrays of two files (geo and fire)
771 :return data: dictonary with all the data
773 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
774 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2020-05-07
776 print "processing files2metadata"
777 file_metadata
=Dict([])
778 for key
in files
.keys():
780 file_metadata
[key
]=Dict([])
781 if key
in ['MOD','MYD']:
784 f0
= os
.path
.basename(f
.geo
)
785 f1
= os
.path
.basename(f
.fire
)
786 file_metadata
[key
][f0
] = Dict([])
787 file_metadata
[key
][f1
] = Dict([])
788 hdfg
= SD(f
.geo
,SDC
.READ
)
789 hdff
= SD(f
.fire
,SDC
.READ
)
790 meta
= hdfg
.attributes()['CoreMetadata.0']
791 date
= meta
.split('RANGEBEGINNINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
792 time
= meta
.split('RANGEBEGINNINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
793 file_metadata
[key
][f0
]["time_start"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
794 date
= meta
.split('RANGEENDINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
795 time
= meta
.split('RANGEENDINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
796 file_metadata
[key
][f0
]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
797 meta
= hdff
.attributes()['CoreMetadata.0']
798 date
= meta
.split('RANGEBEGINNINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
799 time
= meta
.split('RANGEBEGINNINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
800 file_metadata
[key
][f1
]["time_start"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
801 date
= meta
.split('RANGEENDINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
802 time
= meta
.split('RANGEENDINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
803 file_metadata
[key
][f1
]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[5:7],date
[8:10],time
[:2],time
[3:5],time
[6:8]]))
808 print "files " + files
[key
]
809 f0
= os
.path
.basename(f
.geo
)
810 f1
= os
.path
.basename(f
.fire
)
811 file_metadata
[key
][f0
] = Dict([])
812 file_metadata
[key
][f1
] = Dict([])
813 h5g
= h5py
.File(f
.geo
,'r')
814 ncf
= nc
.Dataset(f
.fire
,'r')
815 date
= h5g
['HDFEOS'].attrs
['BeginningDate']
816 time
= h5g
['HDFEOS'].attrs
['BeginningTime']
817 file_metadata
[key
][f0
]["time_start"]='%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[4:6],date
[6:8],time
[:2],time
[2:4],time
[4:6]]))
818 date
= h5g
['HDFEOS'].attrs
['EndingDate']
819 time
= h5g
['HDFEOS'].attrs
['EndingTime']
820 file_metadata
[key
][f0
]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date
[:4],date
[4:6],date
[6:8],time
[:2],time
[2:4],time
[4:6]]))
821 file_metadata
[key
][f1
]["time_start"] = ncf
.getncattr('StartTime')[:19].replace(' ','T')+'Z'
822 file_metadata
[key
][f1
]["time_end"] = ncf
.getncattr('EndTime')[:19].replace(' ','T')+'Z'
827 def process_data(path
,bounds
):
829 Process the data from a path in a bounding box coordinates
831 :param path: path where all the granules are downloaded
832 :param bbox: polygon with the search bounding box
833 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
835 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
836 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2020-05-07
838 files
= group_all(path
)
839 file_metadata
= files2metadata(files
)
841 data
.update(read_data(files
.MOD
,file_metadata
.MOD
,bounds
))
842 data
.update(read_data(files
.MYD
,file_metadata
.MYD
,bounds
))
843 data
.update(read_data(files
.VNP
,file_metadata
.VNP
,bounds
))
847 def nrt_elimination(granules
):
849 Cleaning all the NRT data which is repeated
851 :param granules: Dictionary of granules products to clean up
852 :return: It will update the granules dictionary
854 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
855 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
858 if 'MOD14' in granules
:
859 nlist
=[g
for g
in granules
['MOD14'] if g
['data_center']=='LPDAAC_ECS']
860 granules
['MOD14']=nlist
861 if 'MYD14' in granules
:
862 nlist
=[g
for g
in granules
['MYD14'] if g
['data_center']=='LPDAAC_ECS']
863 granules
['MYD14']=nlist
866 def read_fire_mesh(filename
):
868 Read necessary variables in the fire mesh of the wrfout file filename
870 :param filename: wrfout file
871 :return fxlon: lon coordinates in the fire mesh
872 :return fxlat: lat coordinates in the fire mesh
873 :return bbox: bounding box
874 :return time_esmf: simulation times in ESMF format
876 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
877 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
879 print 'opening ' + filename
880 d
= nc
.Dataset(filename
)
881 m
,n
= d
.variables
['XLONG'][0,:,:].shape
882 fm
,fn
= d
.variables
['FXLONG'][0,:,:].shape
883 fm
=fm
-fm
/(m
+1) # dimensions corrected for extra strip
885 fxlon
= np
.array(d
.variables
['FXLONG'][0,:fm
,:fn
]) # masking extra strip
886 fxlat
= np
.array(d
.variables
['FXLAT'][0,:fm
,:fn
])
887 time_esmf
= ''.join(d
.variables
['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
888 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
889 print 'min max longitude latitude %s' % bbox
890 print 'time (ESMF) %s' % time_esmf
894 tign_g
= np
.array(d
.variables
['TIGN_G'][0,:fm
,:fn
])
895 plot_3D(fxlon
,fxlat
,tign_g
)
899 return fxlon
,fxlat
,bbox
,time_esmf
901 def data2json(data
,keys
,dkeys
,N
):
903 Create a json dictionary from data dictionary
905 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
906 :param keys: keys which are going to be included into the json
907 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
908 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
909 :return ret: dictionary with all the fire detection information to create the KML file
911 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
912 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
915 for i
,k
in enumerate(keys
):
916 if isinstance(data
[list(data
)[0]][dkeys
[i
]],(list, tuple, np
.ndarray
)):
917 dd
=[np
.ravel(data
[d
][dkeys
[i
]]) for d
in list(data
)]
918 ret
.update({k
: np
.concatenate(dd
)})
920 dd
=[[data
[d
[1]][dkeys
[i
]]]*N
[d
[0]] for d
in enumerate(list(data
))]
921 ret
.update({k
: np
.concatenate(dd
)})
925 def sdata2json(sdata
,keys
,dkeys
,N
):
927 Create a json dictionary from sorted array of data dictionaries
929 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
930 :param keys: keys which are going to be included into the json
931 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
932 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
933 :return ret: dictionary with all the fire detection information to create the KML file
935 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
936 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
938 ret
=Dict({'granules': [d
[0] for d
in sdata
]})
939 for i
,k
in enumerate(keys
):
940 dd
= [d
[1][dkeys
[i
]] if dkeys
[i
] in d
[1] else None for d
in sdata
]
942 if np
.any([isinstance(d
,(list, tuple, np
.ndarray
)) for d
in dd
]):
943 out
= [d
if d
is not None else np
.array([]) for d
in dd
]
946 out
= [[d
[1][1][dkeys
[i
]]]*N
[d
[0]] if dkeys
[i
] in d
[1][1] else [] for d
in enumerate(sdata
)]
947 ret
.update({k
: out
})
952 def write_csv(d
,bounds
):
954 Write fire detections from data dictionary d to a CSV file
956 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
957 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
958 :return: fire_detections.csv file with all the detections
960 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
961 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
964 df
=pd
.DataFrame(data
=d
)
965 df
=df
[(df
['longitude']>bounds
[0]) & (df
['longitude']<bounds
[1]) & (df
['latitude']>bounds
[2]) & (df
['latitude']<bounds
[3])]
966 df
.to_csv('fire_detections.csv', encoding
='utf-8', index
=False)
968 def plot_3D(xx
,yy
,zz
):
970 Plot surface of (xx,yy,zz) data
974 :param zz: values at the (x,y) points
975 :return: a plot show of the 3D data
977 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
978 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
980 from mpl_toolkits
.mplot3d
import Axes3D
981 import matplotlib
.pyplot
as plt
982 from matplotlib
import cm
984 ax
= fig
.gca(projection
='3d')
985 surf
= ax
.plot_surface(xx
,yy
,zz
,cmap
=cm
.coolwarm
)
988 def time_iso2num(time_iso
):
990 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
992 :param time_iso: string iso date
993 :return s: integer number of seconds since December 31 1969 at 17:00:00
995 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
996 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
998 time_datetime
=datetime
.datetime
.strptime(time_iso
[0:19],'%Y-%m-%dT%H:%M:%S')
999 # seconds since December 31 1969 at 17:00:00
1000 s
=time
.mktime(time_datetime
.timetuple())
1003 def time_iso2datetime(time_iso
):
1005 Transform an iso time string to a datetime element
1007 :param time_iso: string iso date
1008 :return time_datetime: datetime element
1010 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1011 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1013 time_datetime
=datetime
.datetime
.strptime(time_iso
[0:19],'%Y-%m-%dT%H:%M:%S')
1014 return time_datetime
1016 def time_datetime2iso(time_datetime
):
1018 Transform a datetime element to iso time string
1020 :param time_datetime: datetime element
1021 :return time_iso: string iso date
1023 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1024 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1026 time_iso
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime
.year
,time_datetime
.month
,
1027 time_datetime
.day
,time_datetime
.hour
,
1028 time_datetime
.minute
,time_datetime
.second
)
1031 def time_num2iso(time_num
):
1033 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
1035 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
1036 :return date: time string in ISO date
1038 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1039 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1041 dt
=datetime
.datetime
.fromtimestamp(time_num
)
1042 # seconds since December 31 1969 at 17:00:00
1043 date
='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt
.year
,dt
.month
,dt
.day
,dt
.hour
,dt
.minute
,dt
.second
)
1046 def pixel_dim(sample
,N
,h
,p
,a
=None):
1048 Computes pixel dimensions (along-scan and track pixel sizes)
1050 :param sample: array of integers with the column number (sample variable in files)
1051 :param N: scalar, total number of pixels in each row of the image swath
1052 :param h: scalar, altitude of the satellite in km
1053 :param p: scalar, pixel nadir resolution in km
1054 :param a: array of floats of the size of p with the angles where the resolution change
1055 :return theta: scan angle in radiands
1056 :return scan: along-scan pixel size in km
1057 :return track: along-track pixel size in km
1059 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1060 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1062 Re
=6378 # approximation of the radius of the Earth in km
1065 s
=np
.arctan(p
/h
) # trigonometry (deg/sample)
1066 if isinstance(p
,(list, tuple, np
.ndarray
)):
1067 Ns
=np
.array([int((a
[k
]-a
[k
-1])/s
[k
-1]) for k
in range(1,len(a
)-1)])
1068 Ns
=np
.append(Ns
,int(M
-Ns
.sum()))
1069 theta
=s
[0]*(sample
-M
)
1070 scan
=Re
*s
[0]*(np
.cos(theta
)/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
)))-1)
1071 track
=r
*s
[0]*(np
.cos(theta
)-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
))))
1072 for k
in range(1,len(Ns
)):
1073 p
=sample
<=M
-Ns
[0:k
].sum()
1074 theta
[p
]=s
[k
]*(sample
[p
]-(M
-Ns
[0:k
].sum()))-(s
[0:k
]*Ns
[0:k
]).sum()
1075 scan
[p
]=Re
*np
.mean(s
)*(np
.cos(theta
[p
])/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
])))-1)
1076 track
[p
]=r
*np
.mean(s
)*(np
.cos(theta
[p
])-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
]))))
1077 p
=sample
>=M
+Ns
[0:k
].sum()
1078 theta
[p
]=s
[k
]*(sample
[p
]-(M
+Ns
[0:k
].sum()))+(s
[0:k
]*Ns
[0:k
]).sum()
1079 scan
[p
]=Re
*np
.mean(s
)*(np
.cos(theta
[p
])/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
])))-1)
1080 track
[p
]=r
*np
.mean(s
)*(np
.cos(theta
[p
])-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
[p
]))))
1083 scan
=Re
*s
*(np
.cos(theta
)/np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
)))-1)
1084 track
=r
*s
*(np
.cos(theta
)-np
.sqrt((Re
/r
)**2-np
.square(np
.sin(theta
))))
1085 return (theta
,scan
,track
)
1087 def copyto(partial_path
,kml
):
1089 Copy information in partial_path to kml
1091 :param partial_path: path to a partial KML file
1092 :param kml: KML object where to write to
1093 :return: information from partial_path into kml
1095 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1096 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1098 with
open(partial_path
,'r') as part
:
1102 def json2kml(d
,kml_path
,bounds
,prods
,opt
='granule',minconf
=80.):
1104 Creates a KML file kml_path from a dictionary d
1106 :param d: dictionary with all the fire detection information to create the KML file
1107 :param kml_path: path in where the KML file is going to be written
1108 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
1111 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1112 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1116 if not isinstance(d
['latitude'][0],(list, tuple, np
.ndarray
)):
1118 ind
=[i
[0] for i
in sorted(enumerate(d
.acq_date
), key
=lambda x
:x
[1])]
1119 L
=[len(list(grp
)) for k
, grp
in groupby(d
['acq_date'][ind
])]
1121 ll
=[sum(L
[0:k
+1]) for k
in range(len(L
))]
1123 sort
=[d
[v
][i
] for i
in ind
]
1124 d
[v
]=[sort
[ll
[k
]:ll
[k
+1]] for k
in range(len(ll
)-1)]
1130 frp_style
={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1131 for i
in range(0,40):
1132 frp_style
[i
]='modis_frp_%s_to_%s' % (i
*10, i
*10+10)
1134 with
open(kml_path
,'w') as kml
:
1136 copyto('kmls/partial1.kml',kml
)
1138 # set some constants
1139 r
= 6378 # Earth radius
1140 km_lat
= 180/(np
.pi
*r
) # 1km in degrees latitude
1141 ND
= len(d
['latitude'])
1145 kml
.write('<Folder>\n')
1146 kml
.write('<name>%s</name>\n' % prods
[prod
])
1149 copyto('kmls/partial2.kml',kml
)
1152 col
= np
.flip(np
.divide([(230, 25, 75, 150), (245, 130, 48, 150), (255, 255, 25, 150),
1153 (210, 245, 60, 150), (60, 180, 75, 150), (70, 240, 240, 150),
1154 (0, 0, 128, 150), (145, 30, 180, 150), (240, 50, 230, 150),
1155 (128, 128, 128, 150)],255.),0)
1156 cm
= colors
.LinearSegmentedColormap
.from_list('BuRd',col
,ND
)
1157 cols
= ['%02x%02x%02x%02x' % tuple(255*np
.flip(c
)) for c
in cm(range(cm
.N
))]
1158 t_range
= range(ND
-1,-1,-1)
1163 lats
=np
.array(d
['latitude'][t
]).astype(float)
1164 lons
=np
.array(d
['longitude'][t
]).astype(float)
1165 ll
=np
.logical_and(np
.logical_and(np
.logical_and(lons
>= bounds
[0], lons
<= bounds
[1]), lats
>= bounds
[2]), lats
<= bounds
[3])
1170 acq_date
=np
.array(d
['acq_date'][t
])[ll
]
1171 acq_time
=np
.array(d
['acq_time'][t
])[ll
]
1173 satellite
=np
.array(d
['satellite'][t
])[ll
]
1175 satellite
=np
.array(['Not available']*NN
)
1177 instrument
=np
.array(d
['instrument'][t
])[ll
]
1179 instrument
=np
.array(['Not available']*NN
)
1181 confidence
=np
.array(d
['confidence'][t
])[ll
].astype(float)
1183 confidence
=np
.array(np
.zeros(NN
)).astype(float)
1185 frps
=np
.array(d
['frp'][t
])[ll
].astype(float)
1187 frps
=np
.array(np
.zeros(NN
)).astype(float)
1189 angles
=np
.array(d
['scan_angle'][t
])[ll
].astype(float)
1191 angles
=np
.array(['Not available']*NN
)
1193 scans
=np
.array(d
['scan'][t
])[ll
].astype(float)
1195 scans
=np
.ones(NN
).astype(float)
1197 tracks
=np
.array(d
['track'][t
])[ll
].astype(float)
1199 tracks
=np
.ones(NN
).astype(float)
1201 kml
.write('<Folder>\n')
1203 kml
.write('<name>%s</name>\n' % acq_date
[0])
1204 elif opt
=='granule':
1205 kml
.write('<name>%s</name>\n' % d
['granules'][t
])
1207 kml
.write('<name>Pixels</name>\n')
1218 timestamp
=acq_date
[p
] + 'T' + acq_time
[p
][0:2] + ':' + acq_time
[p
][2:4] + 'Z'
1219 timedescr
=acq_date
[p
] + ' ' + acq_time
[p
][0:2] + ':' + acq_time
[p
][2:4] + ' UTC'
1222 kml
.write('<Placemark>\n<name>Ground detection square</name>\n')
1223 kml
.write('<description>\nlongitude: %s\n' % lon
1224 + 'latitude: %s\n' % lat
1225 + 'time: %s\n' % timedescr
1226 + 'satellite: %s\n' % satellite
[p
]
1227 + 'instrument: %s\n' % instrument
[p
]
1228 + 'scan angle: %s\n' % angle
1229 + 'along-scan: %s\n' % scan
1230 + 'along-track: %s\n' % track
1231 + '</description>\n')
1233 kml
.write('<Placemark>\n<name>Fire detection square</name>\n')
1234 kml
.write('<description>\nlongitude: %s\n' % lon
1235 + 'latitude: %s\n' % lat
1236 + 'time: %s\n' % timedescr
1237 + 'satellite: %s\n' % satellite
[p
]
1238 + 'instrument: %s\n' % instrument
[p
]
1239 + 'confidence: %s\n' % conf
1241 + 'scan angle: %s\n' % angle
1242 + 'along-scan: %s\n' % scan
1243 + 'along-track: %s\n' % track
1244 + '</description>\n')
1245 kml
.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp
)
1248 kml
.write('<styleUrl> modis_conf_low </styleUrl>\n')
1250 kml
.write('<styleUrl> modis_conf_med </styleUrl>\n')
1252 kml
.write('<styleUrl> modis_conf_high </styleUrl>\n')
1254 kml
.write('<Style>\n'+'<PolyStyle>\n'
1255 +'<color>%s</color>\n' % cols
[t
]
1256 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1259 frpx
= min(40,np
.ceil(frp
/10.)-1)
1260 kml
.write('<styleUrl> %s </styleUrl>\n' % frp_style
[frpx
] )
1262 kml
.write('<styleUrl> no_fire </styleUrl>\n')
1265 kml
.write('<Style>\n'+'<PolyStyle>\n'
1266 +'<color>7000ffff</color>\n'
1267 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1270 kml
.write('<Style>\n'+'<PolyStyle>\n'
1271 +'<color>7000a5ff</color>\n'
1272 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1275 kml
.write('<Style>\n'+'<PolyStyle>\n'
1276 +'<color>700000ff</color>\n'
1277 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1280 kml
.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1282 km_lon
=km_lat
/np
.cos(lat
*np
.pi
/180) # 1 km in longitude
1284 sq_track_size_km
=track
1285 sq2_lat
=km_lat
* sq_track_size_km
/2
1286 sq_scan_size_km
=scan
1287 sq2_lon
=km_lon
* sq_scan_size_km
/2
1289 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
- sq2_lat
))
1290 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
+ sq2_lat
))
1291 kml
.write('%s,%s,0\n' % (lon
+ sq2_lon
, lat
+ sq2_lat
))
1292 kml
.write('%s,%s,0\n' % (lon
+ sq2_lon
, lat
- sq2_lat
))
1293 kml
.write('%s,%s,0\n' % (lon
- sq2_lon
, lat
- sq2_lat
))
1295 kml
.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1296 kml
.write('</Folder>\n')
1298 kml
.write('</Folder>\n')
1300 kml
.write('</Document>\n</kml>\n')
1302 print 'Created file %s' % kml_path
1304 print 'Any detections to be saved as %s' % kml_path
1306 def burned_algorithm(data
):
1308 Computes mask of burned scar pixels
1310 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1311 :return C: Mask of burned scar pixels
1313 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1314 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1332 M
=(M8
.astype(float)-RthSub
)/(M11
.astype(float)+eps
)
1333 C1
=np
.logical_and(M
>0,M
<Rth
)
1335 C2
=np
.logical_and(M8
>M08LB
,M8
<M08UB
)
1341 C5
=np
.logical_and(M10
>M10LB
,M10
<M10UB
)
1342 # All the conditions at the same time
1343 C
=np
.logical_and(np
.logical_and(np
.logical_and(np
.logical_and(C1
,C2
),C3
),C4
),C5
)
1346 if __name__
== "__main__":
1347 bbox
=[-132.86966,-102.0868788,44.002495,66.281204]
1348 time
= ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1349 data
=retrieve_af_data(bbox
,time
)
1350 # Save the data dictionary into a matlab structure file out.mat
1351 sio
.savemat('out.mat', mdict
=data
)