plotting settings
[JPSSData.git] / JPSSD.py
blobc25acf69a803dececb64327aff5be45bb5824430
1 import warnings
2 warnings.filterwarnings("ignore")
3 import numpy as np
4 import json
5 import requests
6 import urlparse
7 import os
8 import sys
9 import re
10 import glob
11 import netCDF4 as nc
12 from pyhdf.SD import SD, SDC
13 from utils import *
14 import scipy.io as sio
15 import h5py
16 import datetime
17 import time
18 import matplotlib.colors as colors
19 from itertools import groupby
20 from subprocess import check_output, call
22 def search_api(sname,bbox,time,maxg=50,platform="",version=""):
23 """
24 API search of the different satellite granules
26 :param sname: short name
27 :param bbox: polygon with the search bounding box
28 :param time: time interval (init_time,final_time)
29 :param maxg: max number of granules to process
30 :param platform: string with the platform
31 :param version: string with the version
32 :return granules: dictionary with the metadata of the search
34 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
35 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
36 """
37 from cmr import GranuleQuery
38 api = GranuleQuery()
39 if not version:
40 if not platform:
41 search = api.parameters(
42 short_name=sname,
43 downloadable=True,
44 polygon=bbox,
45 temporal=time
47 else:
48 search = api.parameters(
49 short_name=sname,
50 platform=platform,
51 downloadable=True,
52 polygon=bbox,
53 temporal=time
55 else:
56 if not platform:
57 search = api.parameters(
58 short_name=sname,
59 downloadable=True,
60 polygon=bbox,
61 temporal=time,
62 version=version
64 else:
65 search = api.parameters(
66 short_name=sname,
67 platform=platform,
68 downloadable=True,
69 polygon=bbox,
70 temporal=time,
71 version=version
73 sh=search.hits()
74 print "%s gets %s hits in this range" % (sname,sh)
75 if sh>maxg:
76 print "The number of hits %s is larger than the limit %s." % (sh,maxg)
77 print "Use a reduced bounding box or a reduced time interval."
78 granules = []
79 else:
80 granules = api.get(sh)
81 return granules
83 def search_archive(url,prod,time,grans):
84 """
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
95 """
96 ids=['.'.join(k['producer_granule_id'].split('.')[1:3]) for k in grans] # satellite ids in bounding box
97 granules=[]
98 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
99 delta=dts[1]-dts[0]
100 nh=int(delta.total_seconds()/3600)
101 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
102 fold=np.unique(['%d/%03d' % (date.timetuple().tm_year,date.timetuple().tm_yday) for date in dates])
103 urls=[url+'/'+prod+'/'+f for f in fold]
104 for u in urls:
105 try:
106 js=requests.get(u+'.json').json()
107 for j in js:
108 arg=np.argwhere(np.array(ids)=='.'.join(j['name'].split('.')[1:3]))
109 if arg.size:
110 ar=arg[0][0]
111 g=Dict(j)
112 g.links=[{'href':u+'/'+g.name}]
113 g.time_start=grans[ar]["time_start"]
114 g.time_end=grans[ar]["time_end"]
115 g.dataset_id='Archive download: unknown dataset id'
116 g.producer_granule_id=j['name']
117 granules.append(g)
118 except Exception as e:
119 "warning: some JSON request failed"
120 print "%s gets %s hits in this range" % (prod.split('/')[-1],len(granules))
121 return granules
123 def get_meta(bbox,time,maxg=50,burned=False,high=False):
125 Get all the meta data from the API for all the necessary products
127 :param bbox: polygon with the search bounding box
128 :param time: time interval (init_time,final_time)
129 :param maxg: max number of granules to process
130 :return granules: dictionary with the metadata of all the products
132 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
133 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
135 granules=Dict([])
136 #MOD14: MODIS Terra fire data
137 granules.MOD14=search_api("MOD14",bbox,time,maxg,"Terra")
138 #MOD03: MODIS Terra geolocation data
139 granules.MOD03=search_api("MOD03",bbox,time,maxg,"Terra","6")
140 #MOD09: MODIS Atmospherically corrected surface reflectance
141 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra","6")
142 #MYD14: MODIS Aqua fire data
143 granules.MYD14=search_api("MYD14",bbox,time,maxg,"Aqua")
144 #MYD03: MODIS Aqua geolocation data
145 granules.MYD03=search_api("MYD03",bbox,time,maxg,"Aqua","6")
146 #MOD09: MODIS Atmospherically corrected surface reflectance
147 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra","6")
148 #VNP14: VIIRS fire data, res 750m
149 granules.VNP14=search_api("VNP14",bbox,time,maxg)
150 #VNP03MODLL: VIIRS geolocation data, res 750m
151 granules.VNP03=search_api("VNP03MODLL",bbox,time,maxg)
152 #VNP14HR: VIIRS fire data, res 375m
153 #granules.VNP14HR=search_api("VNP14IMGTDL_NRT",bbox,time,maxg) # any results in API
154 if high:
155 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
156 prod="5000/NPP_IMFTS_L1"
157 granules.VNP03HR=search_archive(url,prod,time,granules.VNP03)
158 prod="5000/VNP14IMG" # product
159 granules.VNP14HR=search_archive(url,prod,time,granules.VNP03HR)
160 if burned:
161 #VNP09: VIIRS Atmospherically corrected surface reflectance
162 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
163 prod="5000/VNP09" # product
164 granules.VNP09=search_archive(url,prod,time,granules.VNP03)
165 return granules
167 def group_files(path,reg):
169 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
171 :param path: path to the geolocation (03) and fire (14) files
172 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
173 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
175 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
176 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
178 files=[Dict({'geo':k}) for k in glob.glob(path+'/'+reg+'03*')]
179 filesf=glob.glob(path+'/'+reg+'14*')
180 filesr=glob.glob(path+'/'+reg+'09*')
181 if len(filesf)>0:
182 for f in filesf:
183 mf=re.split("/",f)
184 if mf is not None:
185 m=mf[-1].split('.')
186 if m is not None:
187 for k,g in enumerate(files):
188 mmf=re.split("/",g.geo)
189 mm=mmf[-1].split('.')
190 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
191 files[k].fire=f
192 if len(filesr)>0:
193 for f in filesr:
194 mf=re.split("/",f)
195 if mf is not None:
196 m=mf[-1].split('.')
197 if m is not None:
198 for k,g in enumerate(files):
199 mmf=re.split("/",g.geo)
200 mm=mmf[-1].split('.')
201 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
202 files[k].ref=f
203 return files
205 def group_all(path):
207 Combine all the geolocation (03) and fire (14) files in a path
209 :param path: path to the geolocation (03) and fire (14) files
210 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
212 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
213 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
215 files=Dict({})
216 # MOD files
217 modf=group_files(path,'MOD')
218 # MYD files
219 mydf=group_files(path,'MYD')
220 # VIIRS files
221 vif=group_files(path,'VNP')
222 files.MOD=modf
223 files.MYD=mydf
224 files.VNP=vif
225 return files
227 def read_modis_files(files,bounds):
229 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
231 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
232 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
233 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
235 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
236 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
238 ret=Dict([])
239 # Satellite information
240 N=1354 # Number of columns (maxim number of sample)
241 h=705. # Altitude of the satellite in km
242 p=1. # Nadir pixel resolution in km
243 # Reading MODIS files
244 hdfg=SD(files.geo,SDC.READ)
245 hdff=SD(files.fire,SDC.READ)
246 # Creating all the objects
247 lat_obj=hdfg.select('Latitude')
248 lon_obj=hdfg.select('Longitude')
249 fire_obj=hdff.select('fire mask')
250 lat_fire_obj=hdff.select('FP_latitude')
251 lon_fire_obj=hdff.select('FP_longitude')
252 brig_fire_obj=hdff.select('FP_T21')
253 sample_fire_obj=hdff.select('FP_sample')
254 conf_fire_obj=hdff.select('FP_confidence')
255 t31_fire_obj=hdff.select('FP_T31')
256 frp_fire_obj=hdff.select('FP_power')
257 # Geolocation and mask information
258 ret.lat=lat_obj.get()
259 ret.lon=lon_obj.get()
260 ret.fire=fire_obj.get()
261 # Fire detected information
262 try:
263 flats=lat_fire_obj.get()
264 except:
265 flats=np.array([])
266 try:
267 flons=lon_fire_obj.get()
268 except:
269 flons=np.array([])
270 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]), flats <= bounds[3])
271 ret.lat_fire=flats[fll]
272 ret.lon_fire=flons[fll]
273 try:
274 ret.brig_fire=brig_fire_obj.get()[fll]
275 except:
276 ret.brig_fire=np.array([])
277 ret.sat_fire=hdff.Satellite
278 try:
279 ret.conf_fire=conf_fire_obj.get()[fll]
280 except:
281 ret.conf_fire=np.array([])
282 try:
283 ret.t31_fire=t31_fire_obj.get()[fll]
284 except:
285 ret.t31_fire=np.array([])
286 try:
287 ret.frp_fire=frp_fire_obj.get()[fll]
288 except:
289 ret.frp_fire=np.array([])
290 try:
291 sf=sample_fire_obj.get()[fll]
292 except:
293 sf=np.array([])
294 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
295 # No fire data
296 lats=np.ravel(ret.lat)
297 lons=np.ravel(ret.lon)
298 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
299 lats=lats[ll]
300 lons=lons[ll]
301 fire=np.ravel(ret.fire)
302 fire=fire[ll]
303 nf=np.logical_or(fire == 3, fire == 5)
304 ret.lat_nofire=lats[nf]
305 ret.lon_nofire=lons[nf]
306 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
307 sample=np.ravel(sample)
308 sample=sample[ll]
309 sfn=sample[nf]
310 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p)
311 # Close files
312 hdfg.end()
313 hdff.end()
314 return ret
316 def read_viirs_files(files,bounds):
318 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
320 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
321 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
322 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
324 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
325 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
327 ret=Dict([])
328 # Satellite information
329 N=3200 # Number of columns (maxim number of sample)
330 h=828. # Altitude of the satellite in km
331 alpha=np.array([0,31.59,44.68,56.06])/180*np.pi
332 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
333 p=np.array([0.75,0.75/2,0.75/3])
334 # Reading VIIRS files
335 h5g=h5py.File(files.geo,'r')
336 ncf=nc.Dataset(files.fire,'r')
337 # Geolocation and mask information
338 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
339 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
340 ret.fire=np.array(ncf.variables['fire mask'][:])
341 # Fire detected information
342 flats=np.array(ncf.variables['FP_latitude'][:])
343 flons=np.array(ncf.variables['FP_longitude'][:])
344 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]),flats <= bounds[3])
345 ret.lat_fire=flats[fll]
346 ret.lon_fire=flons[fll]
347 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])[fll]
348 ret.sat_fire=ncf.SatelliteInstrument
349 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])[fll]
350 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])[fll]
351 ret.frp_fire=np.array(ncf.variables['FP_power'][:])[fll]
352 sf=np.array(ncf.variables['FP_sample'][:])[fll]
353 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p,alpha)
354 # No fire data
355 lats=np.ravel(ret.lat)
356 lons=np.ravel(ret.lon)
357 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
358 lats=lats[ll]
359 lons=lons[ll]
360 fire=np.ravel(ret.fire)
361 fire=fire[ll]
362 nf=np.logical_or(fire == 3, fire == 5)
363 ret.lat_nofire=lats[nf]
364 ret.lon_nofire=lons[nf]
365 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
366 sample=np.ravel(sample)
367 sample=sample[ll]
368 sfn=sample[nf]
369 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p,alpha)
370 # Reflectance data for burned scar algorithm
371 if 'ref' in files.keys():
372 # Read reflectance data
373 hdf=SD(files.ref,SDC.READ)
374 # Bands data
375 M7=hdf.select('750m Surface Reflectance Band M7') # 0.86 nm
376 M8=hdf.select('750m Surface Reflectance Band M8') # 1.24 nm
377 M10=hdf.select('750m Surface Reflectance Band M10') # 1.61 nm
378 M11=hdf.select('750m Surface Reflectance Band M11') # 2.25 nm
379 bands=Dict({})
380 bands.M7=M7.get()*1e-4
381 bands.M8=M8.get()*1e-4
382 bands.M10=M10.get()*1e-4
383 bands.M11=M11.get()*1e-4
384 # Burned scar mask using the burned scar granule algorithm
385 ret.burned=burned_algorithm(bands)
386 # Close reflectance file
387 hdf.end()
388 # Close files
389 h5g.close()
390 ncf.close()
391 return ret
393 def read_viirs375_files(path,bounds):
395 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
397 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
398 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
400 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
401 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
403 import pandas as pd
404 # Opening files if they exist
405 f1=glob.glob(path+'/fire_archive_*.csv')
406 f2=glob.glob(path+'/fire_nrt_*.csv')
407 if len(f1)==1:
408 df1=pd.read_csv(f1[0])
409 if len(f2)==1:
410 df2=pd.read_csv(f2[0])
411 dfs=pd.concat([df1,df2],sort=True,ignore_index=True)
412 else:
413 dfs=df1
414 else:
415 if len(f2)==1:
416 dfs=pd.read_csv(f2[0])
417 else:
418 return {}
420 ret=Dict({})
421 # In the case something exists, read all the information from the CSV files
422 dfs=dfs[(dfs['longitude']>bounds[0]) & (dfs['longitude']<bounds[1]) & (dfs['latitude']>bounds[2]) & (dfs['latitude']<bounds[3])]
423 date=np.array(dfs['acq_date'])
424 time=np.array(dfs['acq_time'])
425 dfs['time']=np.array(['%s_%04d' % (date[k],time[k]) for k in range(len(date))])
426 dfs['time']=pd.to_datetime(dfs['time'], format='%Y-%m-%d_%H%M')
427 dfs['datetime']=dfs['time']
428 dfs=dfs.set_index('time')
429 for group_name, df in dfs.groupby(pd.TimeGrouper("D")):
430 items=Dict([])
431 items.lat=np.array(df['latitude'])
432 items.lon=np.array(df['longitude'])
433 conf=np.array(df['confidence'])
434 firemask=np.zeros(conf.shape)
435 conf_fire=np.zeros(conf.shape)
436 firemask[conf=='l']=7
437 conf_fire[conf=='l']=30.
438 firemask[conf=='n']=8
439 conf_fire[conf=='n']=60.
440 firemask[conf=='h']=9
441 conf_fire[conf=='h']=90.
442 items.fire=firemask.astype(int)
443 items.lat_fire=items.lat
444 items.lon_fire=items.lon
445 items.brig_fire=np.array(df['bright_ti4'])
446 items.sat_fire='Suomi NPP'
447 items.conf_fire=conf_fire
448 items.t31_fire=np.array(df['bright_ti5'])
449 items.frp_fire=np.array(df['frp'])
450 items.scan_fire=np.array(df['scan'])
451 items.track_fire=np.array(df['track'])
452 items.scan_angle_fire=np.ones(items.scan_fire.shape)*np.nan
453 items.lat_nofire=np.array([])
454 items.lon_nofire=np.array([])
455 items.scan_angle_nofire=np.array([])
456 items.scan_nofire=np.array([])
457 items.track_nofire=np.array([])
458 items.instrument=df['instrument'][0]
459 dt=df['datetime'][0]
460 items.time_start_geo_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
461 items.time_num=time_iso2num(items.time_start_geo_iso)
462 items.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
463 items.acq_time='%02d%02d' % (dt.hour,dt.minute)
464 items.time_start_fire_iso=items.time_start_geo_iso
465 items.time_end_geo_iso=items.time_start_geo_iso
466 items.time_end_fire_iso=items.time_start_geo_iso
467 items.file_geo=f1+f2
468 items.file_fire=items.file_geo
469 tt=df['datetime'][0].timetuple()
470 id='VNPH_A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
471 items.prefix='VNPH'
472 items.name='A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
473 ret.update({id: items})
474 return ret
476 def read_goes_files(files):
478 Read the files for GOES products - geolocation and fire data already included (OS)
480 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
481 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
482 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
484 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
485 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
487 h5g=h5py.File(files.geo,'r')
488 ret=Dict([])
489 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
490 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
491 ncf=nc.Dataset(files.fire,'r')
492 ret.fire=np.array(ncf.variables['fire mask'][:])
493 ret.lat_fire=np.array(ncf.variables['FP_latitude'][:])
494 ret.lon_fire=np.array(ncf.variables['FP_longitude'][:])
495 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])
496 sf=np.array(ncf.variables['FP_sample'][:])
497 # Satellite information
498 N=2500 # Number of columns (maxim number of sample)
499 h=35786. # Altitude of the satellite in km
500 p=2. # Nadir pixel resolution in km
501 ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
502 ret.sat_fire=ncf.SatelliteInstrument
503 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])
504 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])
505 ret.frp_fire=np.array(ncf.variables['FP_power'][:])
506 return ret
508 def read_data(files,file_metadata,bounds):
510 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
512 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
513 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
514 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
515 Where:
516 YYYYDDD = year and Julian day (001-366) of data acquisition
517 HHMM = hour and minute of data acquisition (approximate beginning time)
518 vvv = version number
519 yyyyddd = year and Julian day of data processing
520 hhmmss = hour, minute, and second of data processing
522 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
523 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
524 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
525 Where:
526 YYYYDDD = year and Julian day (001-366) of data acquisition
527 HHMM = hour and minute of data acquisition (approximate beginning time)
528 vvv = version number
529 yyyyddd = year and Julian day of data processing
530 hhmmss = hour, minute, and second of data processing
532 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
533 :param file_metadata: dictionary with file names as key and granules metadata as values
534 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
535 :return data: dictionary with Latitude, Longitude and fire mask arrays read
537 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
538 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
540 print "read_data files=%s" % files
541 data=Dict([])
542 if files=='VIIRS375':
543 data.update(read_viirs375_files('.',bounds))
544 else:
545 for f in files:
546 print "read_data f=%s" % f
547 if 'geo' in f.keys():
548 f0=os.path.basename(f.geo)
549 else:
550 print 'ERROR: read_data cannot read files=%s, not geo file' % f
551 continue
552 if 'fire' in f.keys():
553 f1=os.path.basename(f.fire)
554 else:
555 print 'ERROR: read_data cannot read files=%s, not fire file' % f
556 continue
557 boo=True
558 if 'ref' in f.keys():
559 f2=os.path.basename(f.ref)
560 boo=False
561 prefix = f0[:3]
562 print 'prefix %s' % prefix
563 if prefix != f1[:3]:
564 print 'ERROR: the prefix of %s %s must coincide' % (f0,f1)
565 continue
566 m=f.geo.split('/')
567 mm=m[-1].split('.')
568 key=mm[1]+'_'+mm[2]
569 id = prefix + '_' + key
570 print "id " + id
571 if prefix=="MOD" or prefix=="MYD":
572 try:
573 item=read_modis_files(f,bounds)
574 item.instrument="MODIS"
575 except Exception as e:
576 print 'WARNING: reading the files from MODIS failed with error %s' % e
577 continue
578 elif prefix=="VNP":
579 try:
580 item=read_viirs_files(f,bounds)
581 item.instrument="VIIRS"
582 except Exception as e:
583 print 'WARNING: reading the files from VIIRS failed with error %s' % e
584 continue
585 elif prefix=="OR":
586 try:
587 item=read_goes_files(f)
588 item.instrument="GOES"
589 except Exception as e:
590 print 'WARNING: reading the files from GOES failed with error %s' % e
591 continue
592 else:
593 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0,f1)
594 continue
595 if not boo:
596 if f2 in file_metadata.keys():
597 boo=True
598 if (f0 in file_metadata.keys()) and (f1 in file_metadata.keys()) and boo:
599 # connect the file back to metadata
600 item.time_start_geo_iso=file_metadata[f0]["time_start"]
601 item.time_num=time_iso2num(item.time_start_geo_iso)
602 dt=datetime.datetime.strptime(item.time_start_geo_iso[0:18],'%Y-%m-%dT%H:%M:%S')
603 item.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
604 item.acq_time='%02d%02d' % (dt.hour,dt.minute)
605 item.time_start_fire_iso=file_metadata[f1]["time_start"]
606 item.time_end_geo_iso=file_metadata[f0]["time_end"]
607 item.time_end_fire_iso=file_metadata[f1]["time_end"]
608 item.file_geo=f0
609 item.file_fire=f1
610 if 'ref' in f.keys():
611 item.file_ref=f2
612 item.time_start_ref_iso=file_metadata[f2]["time_start"]
613 item.time_end_ref_iso=file_metadata[f2]["time_end"]
614 item.prefix=prefix
615 item.name=key
616 data.update({id:item})
617 else:
618 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0, f1)
619 continue
621 return data
624 def download(granules):
626 Download files as listed in the granules metadata
628 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
629 :return file_metadata: dictionary with file names as key and granules metadata as values
631 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
632 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
634 file_metadata = {}
635 for granule in granules:
636 #print json.dumps(granule,indent=4, separators=(',', ': '))
637 url = granule['links'][0]['href']
638 filename=os.path.basename(urlparse.urlsplit(url).path)
639 file_metadata[filename]=granule
641 # to store as object in memory (maybe not completely downloaded until accessed?)
642 # with requests.Session() as s:
643 # data.append(s.get(url))
645 # download - a minimal code without various error checking and corrective actions
646 # see wrfxpy/src/ingest/downloader.py
647 if os.path.isfile(filename):
648 print 'file %s already downloaded' % filename
649 continue
650 try:
651 chunk_size = 1024*1024
652 s = 0
653 print 'downloading %s as %s' % (url,filename)
654 r = requests.get(url, stream=True)
655 if r.status_code == 200:
656 content_size = int(r.headers['Content-Length'])
657 print 'downloading %s as %s size %sB' % (url, filename, content_size)
658 with open(filename, 'wb') as f:
659 for chunk in r.iter_content(chunk_size):
660 f.write(chunk)
661 s = s + len(chunk)
662 print('downloaded %sB of %sB' % (s, content_size))
663 else:
664 print 'cannot connect to %s' % url
665 print 'web request status code %s' % r.status_code
666 print 'Make sure you have file ~/.netrc permission 600 with the content'
667 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
668 sys.exit(1)
669 except Exception as e:
670 print 'download failed with error %s' % e
671 return file_metadata
673 def download_GOES16(time):
675 Download the GOES16 data through rclone application
677 :param time: tupple with the start and end times
678 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
680 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
681 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
683 bucket=Dict({})
684 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
685 delta=dts[1]-dts[0]
686 nh=int(delta.total_seconds()/3600)
687 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
688 for date in dates:
689 tt=date.timetuple()
690 argT='%d/%03d/%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour)
691 try:
692 args=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'.','-L']
693 print 'running: '+' '.join(args)
694 res=call(args)
695 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT+' downloaded.'
696 args=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'-L']
697 out=check_output(args)
698 bucket.update({argT : [o.split(' ')[2] for o in out.split('\n')[:-1]]})
699 except Exception as e:
700 print 'download failed with error %s' % e
701 return bucket
703 def retrieve_af_data(bbox,time,burned=False,high=False):
705 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
707 :param bbox: polygon with the search bounding box
708 :param time: time interval (init_time,final_time)
709 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
711 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
712 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
715 # Define settings
716 lonmin,lonmax,latmin,latmax = bbox
717 bounds=bbox
718 bbox = [(lonmin,latmax),(lonmin,latmin),(lonmax,latmin),(lonmax,latmax),(lonmin,latmax)]
719 maxg = 1000
721 print "bbox"
722 print bbox
723 print "time:"
724 print time
725 print "maxg:"
726 print maxg
728 # Get data
729 granules=get_meta(bbox,time,maxg,burned=burned,high=high)
730 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
732 # Eliminating the NRT data (repeated always)
733 nrt_elimination(granules)
735 file_metadata = {}
736 for k,g in granules.items():
737 print 'Downloading %s files' % k
738 sys.stdout.flush()
739 file_metadata.update(download(g))
740 #print "download g:"
741 #print g
743 print "download complete"
745 # Group all files downloaded
746 files=group_all(".")
747 #print "group all files:"
748 #print files
750 # Generate data dictionary
751 data=Dict({})
752 data.update(read_data(files.MOD,file_metadata,bounds))
753 data.update(read_data(files.MYD,file_metadata,bounds))
754 data.update(read_data(files.VNP,file_metadata,bounds))
755 #data.update(read_data('VIIRS375','',bounds))
757 return data
759 def nrt_elimination(granules):
761 Cleaning all the NRT data which is repeated
763 :param granules: Dictionary of granules products to clean up
764 :return: It will update the granules dictionary
766 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
767 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
770 if 'MOD14' in granules:
771 nlist=[g for g in granules['MOD14'] if g['data_center']=='LPDAAC_ECS']
772 granules['MOD14']=nlist
773 if 'MYD14' in granules:
774 nlist=[g for g in granules['MYD14'] if g['data_center']=='LPDAAC_ECS']
775 granules['MYD14']=nlist
778 def read_fire_mesh(filename):
780 Read necessary variables in the fire mesh of the wrfout file filename
782 :param filename: wrfout file
783 :return fxlon: lon coordinates in the fire mesh
784 :return fxlat: lat coordinates in the fire mesh
785 :return bbox: bounding box
786 :return time_esmf: simulation times in ESMF format
788 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
789 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
791 print 'opening ' + filename
792 d = nc.Dataset(filename)
793 m,n = d.variables['XLONG'][0,:,:].shape
794 fm,fn = d.variables['FXLONG'][0,:,:].shape
795 fm=fm-fm/(m+1) # dimensions corrected for extra strip
796 fn=fn-fn/(n+1)
797 fxlon = np.array(d.variables['FXLONG'][0,:fm,:fn]) # masking extra strip
798 fxlat = np.array(d.variables['FXLAT'][0,:fm,:fn])
799 time_esmf = ''.join(d.variables['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
800 bbox = [fxlon.min(),fxlon.max(),fxlat.min(),fxlat.max()]
801 print 'min max longitude latitude %s' % bbox
802 print 'time (ESMF) %s' % time_esmf
804 plot = False
805 if plot:
806 tign_g = np.array(d.variables['TIGN_G'][0,:fm,:fn])
807 plot_3D(fxlon,fxlat,tign_g)
809 d.close()
811 return fxlon,fxlat,bbox,time_esmf
813 def data2json(data,keys,dkeys,N):
815 Create a json dictionary from data dictionary
817 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
818 :param keys: keys which are going to be included into the json
819 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
820 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
821 :return ret: dictionary with all the fire detection information to create the KML file
823 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
824 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
826 ret=Dict({})
827 for i,k in enumerate(keys):
828 if isinstance(data[list(data)[0]][dkeys[i]],(list, tuple, np.ndarray)):
829 dd=[np.ravel(data[d][dkeys[i]]) for d in list(data)]
830 ret.update({k : np.concatenate(dd)})
831 else:
832 dd=[[data[d[1]][dkeys[i]]]*N[d[0]] for d in enumerate(list(data))]
833 ret.update({k : np.concatenate(dd)})
835 return ret
837 def sdata2json(sdata,keys,dkeys,N):
839 Create a json dictionary from sorted array of data dictionaries
841 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
842 :param keys: keys which are going to be included into the json
843 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
844 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
845 :return ret: dictionary with all the fire detection information to create the KML file
847 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
848 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
850 ret=Dict({'granules': [d[0] for d in sdata]})
851 for i,k in enumerate(keys):
852 dd = [d[1][dkeys[i]] if dkeys[i] in d[1] else None for d in sdata]
853 if dd:
854 if np.any([isinstance(d,(list, tuple, np.ndarray)) for d in dd]):
855 out = [d if d is not None else np.array([]) for d in dd]
856 ret.update({k : dd})
857 else:
858 out = [[d[1][1][dkeys[i]]]*N[d[0]] if dkeys[i] in d[1][1] else [] for d in enumerate(sdata)]
859 ret.update({k : out})
861 return ret
864 def write_csv(d,bounds):
866 Write fire detections from data dictionary d to a CSV file
868 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
869 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
870 :return: fire_detections.csv file with all the detections
872 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
873 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
875 import pandas as pd
876 df=pd.DataFrame(data=d)
877 df=df[(df['longitude']>bounds[0]) & (df['longitude']<bounds[1]) & (df['latitude']>bounds[2]) & (df['latitude']<bounds[3])]
878 df.to_csv('fire_detections.csv', encoding='utf-8', index=False)
880 def plot_3D(xx,yy,zz):
882 Plot surface of (xx,yy,zz) data
884 :param xx: x arrays
885 :param yy: y arrays
886 :param zz: values at the (x,y) points
887 :return: a plot show of the 3D data
889 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
890 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
892 from mpl_toolkits.mplot3d import Axes3D
893 import matplotlib.pyplot as plt
894 from matplotlib import cm
895 fig = plt.figure()
896 ax = fig.gca(projection='3d')
897 surf = ax.plot_surface(xx,yy,zz,cmap=cm.coolwarm)
898 plt.show()
900 def time_iso2num(time_iso):
902 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
904 :param time_iso: string iso date
905 :return s: integer number of seconds since December 31 1969 at 17:00:00
907 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
908 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
910 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
911 # seconds since December 31 1969 at 17:00:00
912 s=time.mktime(time_datetime.timetuple())
913 return s
915 def time_iso2datetime(time_iso):
917 Transform an iso time string to a datetime element
919 :param time_iso: string iso date
920 :return time_datetime: datetime element
922 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
923 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
925 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
926 return time_datetime
928 def time_datetime2iso(time_datetime):
930 Transform a datetime element to iso time string
932 :param time_datetime: datetime element
933 :return time_iso: string iso date
935 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
936 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
938 time_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime.year,time_datetime.month,
939 time_datetime.day,time_datetime.hour,
940 time_datetime.minute,time_datetime.second)
941 return time_iso
943 def time_num2iso(time_num):
945 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
947 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
948 :return date: time string in ISO date
950 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
951 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
953 dt=datetime.datetime.fromtimestamp(time_num)
954 # seconds since December 31 1969 at 17:00:00
955 date='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
956 return date
958 def pixel_dim(sample,N,h,p,a=None):
960 Computes pixel dimensions (along-scan and track pixel sizes)
962 :param sample: array of integers with the column number (sample variable in files)
963 :param N: scalar, total number of pixels in each row of the image swath
964 :param h: scalar, altitude of the satellite in km
965 :param p: scalar, pixel nadir resolution in km
966 :param a: array of floats of the size of p with the angles where the resolution change
967 :return theta: scan angle in radiands
968 :return scan: along-scan pixel size in km
969 :return track: along-track pixel size in km
971 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
972 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
974 Re=6378 # approximation of the radius of the Earth in km
975 r=Re+h
976 M=(N-1)*0.5
977 s=np.arctan(p/h) # trigonometry (deg/sample)
978 if isinstance(p,(list, tuple, np.ndarray)):
979 Ns=np.array([int((a[k]-a[k-1])/s[k-1]) for k in range(1,len(a)-1)])
980 Ns=np.append(Ns,int(M-Ns.sum()))
981 theta=s[0]*(sample-M)
982 scan=Re*s[0]*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
983 track=r*s[0]*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
984 for k in range(1,len(Ns)):
985 p=sample<=M-Ns[0:k].sum()
986 theta[p]=s[k]*(sample[p]-(M-Ns[0:k].sum()))-(s[0:k]*Ns[0:k]).sum()
987 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
988 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
989 p=sample>=M+Ns[0:k].sum()
990 theta[p]=s[k]*(sample[p]-(M+Ns[0:k].sum()))+(s[0:k]*Ns[0:k]).sum()
991 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
992 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
993 else:
994 theta=s*(sample-M)
995 scan=Re*s*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
996 track=r*s*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
997 return (theta,scan,track)
999 def copyto(partial_path,kml):
1001 Copy information in partial_path to kml
1003 :param partial_path: path to a partial KML file
1004 :param kml: KML object where to write to
1005 :return: information from partial_path into kml
1007 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1008 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1010 with open(partial_path,'r') as part:
1011 for line in part:
1012 kml.write(line)
1014 def json2kml(d,kml_path,bounds,prods,opt='granule',minconf=80.):
1016 Creates a KML file kml_path from a dictionary d
1018 :param d: dictionary with all the fire detection information to create the KML file
1019 :param kml_path: path in where the KML file is going to be written
1020 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
1021 :return: a KML file
1023 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1024 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1027 if 'latitude' in d:
1028 if not isinstance(d['latitude'][0],(list, tuple, np.ndarray)):
1029 if opt=='dates':
1030 ind=[i[0] for i in sorted(enumerate(d.acq_date), key=lambda x:x[1])]
1031 L=[len(list(grp)) for k, grp in groupby(d['acq_date'][ind])]
1032 L.insert(0,0)
1033 ll=[sum(L[0:k+1]) for k in range(len(L))]
1034 for v in list(d):
1035 sort=[d[v][i] for i in ind]
1036 d[v]=[sort[ll[k]:ll[k+1]] for k in range(len(ll)-1)]
1037 else:
1038 for v in list(d):
1039 d[v]=[d[v]]
1040 opt='pixels'
1042 frp_style={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1043 for i in range(0,40):
1044 frp_style[i]='modis_frp_%s_to_%s' % (i*10, i*10+10)
1046 with open(kml_path,'w') as kml:
1048 copyto('kmls/partial1.kml',kml)
1050 # set some constants
1051 r = 6378 # Earth radius
1052 km_lat = 180/(np.pi*r) # 1km in degrees latitude
1053 ND = len(d['latitude'])
1055 for prod in prods:
1057 kml.write('<Folder>\n')
1058 kml.write('<name>%s</name>\n' % prods[prod])
1060 if prod == 'FRP':
1061 copyto('kmls/partial2.kml',kml)
1063 if prod == 'TF':
1064 col = np.flip(np.divide([(230, 25, 75, 150), (245, 130, 48, 150), (255, 255, 25, 150),
1065 (210, 245, 60, 150), (60, 180, 75, 150), (70, 240, 240, 150),
1066 (0, 0, 128, 150), (145, 30, 180, 150), (240, 50, 230, 150),
1067 (128, 128, 128, 150)],255.),0)
1068 cm = colors.LinearSegmentedColormap.from_list('BuRd',col,ND)
1069 cols = ['%02x%02x%02x%02x' % tuple(255*np.flip(c)) for c in cm(range(cm.N))]
1070 t_range = range(ND-1,-1,-1)
1071 else:
1072 t_range = range(ND)
1074 for t in t_range:
1075 lats=np.array(d['latitude'][t]).astype(float)
1076 lons=np.array(d['longitude'][t]).astype(float)
1077 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
1078 latitude=lats[ll]
1079 longitude=lons[ll]
1080 NN=len(latitude)
1081 if NN > 0:
1082 acq_date=np.array(d['acq_date'][t])[ll]
1083 acq_time=np.array(d['acq_time'][t])[ll]
1084 try:
1085 satellite=np.array(d['satellite'][t])[ll]
1086 except:
1087 satellite=np.array(['Not available']*NN)
1088 try:
1089 instrument=np.array(d['instrument'][t])[ll]
1090 except:
1091 instrument=np.array(['Not available']*NN)
1092 try:
1093 confidence=np.array(d['confidence'][t])[ll].astype(float)
1094 except:
1095 confidence=np.array(np.zeros(NN)).astype(float)
1096 try:
1097 frps=np.array(d['frp'][t])[ll].astype(float)
1098 except:
1099 frps=np.array(np.zeros(NN)).astype(float)
1100 try:
1101 angles=np.array(d['scan_angle'][t])[ll].astype(float)
1102 except:
1103 angles=np.array(['Not available']*NN)
1104 try:
1105 scans=np.array(d['scan'][t])[ll].astype(float)
1106 except:
1107 scans=np.ones(NN).astype(float)
1108 try:
1109 tracks=np.array(d['track'][t])[ll].astype(float)
1110 except:
1111 tracks=np.ones(NN).astype(float)
1113 kml.write('<Folder>\n')
1114 if opt=='date':
1115 kml.write('<name>%s</name>\n' % acq_date[0])
1116 elif opt=='granule':
1117 kml.write('<name>%s</name>\n' % d['granules'][t])
1118 else:
1119 kml.write('<name>Pixels</name>\n')
1121 for p in range(NN):
1122 lat=latitude[p]
1123 lon=longitude[p]
1124 conf=confidence[p]
1125 if conf >= maxconf:
1126 frp=frps[p]
1127 angle=angles[p]
1128 scan=scans[p]
1129 track=tracks[p]
1130 timestamp=acq_date[p] + 'T' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + 'Z'
1131 timedescr=acq_date[p] + ' ' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + ' UTC'
1133 if prod == 'NF':
1134 kml.write('<Placemark>\n<name>Ground detection square</name>\n')
1135 kml.write('<description>\nlongitude: %s\n' % lon
1136 + 'latitude: %s\n' % lat
1137 + 'time: %s\n' % timedescr
1138 + 'satellite: %s\n' % satellite[p]
1139 + 'instrument: %s\n' % instrument[p]
1140 + 'scan angle: %s\n' % angle
1141 + 'along-scan: %s\n' % scan
1142 + 'along-track: %s\n' % track
1143 + '</description>\n')
1144 else:
1145 kml.write('<Placemark>\n<name>Fire detection square</name>\n')
1146 kml.write('<description>\nlongitude: %s\n' % lon
1147 + 'latitude: %s\n' % lat
1148 + 'time: %s\n' % timedescr
1149 + 'satellite: %s\n' % satellite[p]
1150 + 'instrument: %s\n' % instrument[p]
1151 + 'confidence: %s\n' % conf
1152 + 'FRP: %s\n' % frp
1153 + 'scan angle: %s\n' % angle
1154 + 'along-scan: %s\n' % scan
1155 + 'along-track: %s\n' % track
1156 + '</description>\n')
1157 kml.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp)
1158 if prod == 'AF':
1159 if conf < 30:
1160 kml.write('<styleUrl> modis_conf_low </styleUrl>\n')
1161 elif conf < 80:
1162 kml.write('<styleUrl> modis_conf_med </styleUrl>\n')
1163 else:
1164 kml.write('<styleUrl> modis_conf_high </styleUrl>\n')
1165 elif prod == 'TF':
1166 kml.write('<Style>\n'+'<PolyStyle>\n'
1167 +'<color>%s</color>\n' % cols[t]
1168 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1169 +'</Style>\n')
1170 elif prod == 'FRP':
1171 frpx = min(40,np.ceil(frp/10.)-1)
1172 kml.write('<styleUrl> %s </styleUrl>\n' % frp_style[frpx] )
1173 elif prod == 'NF':
1174 kml.write('<styleUrl> no_fire </styleUrl>\n')
1175 elif prod == 'AFN':
1176 if conf < 30:
1177 kml.write('<Style>\n'+'<PolyStyle>\n'
1178 +'<color>7000ffff</color>\n'
1179 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1180 +'</Style>\n')
1181 elif conf < 80:
1182 kml.write('<Style>\n'+'<PolyStyle>\n'
1183 +'<color>7000a5ff</color>\n'
1184 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1185 +'</Style>\n')
1186 else:
1187 kml.write('<Style>\n'+'<PolyStyle>\n'
1188 +'<color>700000ff</color>\n'
1189 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1190 +'</Style>\n')
1192 kml.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1194 km_lon=km_lat/np.cos(lat*np.pi/180) # 1 km in longitude
1196 sq_track_size_km=track
1197 sq2_lat=km_lat * sq_track_size_km/2
1198 sq_scan_size_km=scan
1199 sq2_lon=km_lon * sq_scan_size_km/2
1201 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1202 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat + sq2_lat))
1203 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat + sq2_lat))
1204 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat - sq2_lat))
1205 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1207 kml.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1208 kml.write('</Folder>\n')
1210 kml.write('</Folder>\n')
1212 kml.write('</Document>\n</kml>\n')
1214 print 'Created file %s' % kml_path
1215 else:
1216 print 'Any detections to be saved as %s' % kml_path
1218 def burned_algorithm(data):
1220 Computes mask of burned scar pixels
1222 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1223 :return C: Mask of burned scar pixels
1225 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1226 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1228 # Parameters
1229 RthSub=0.05
1230 Rth=0.81
1231 M07UB=0.19
1232 M08LB=0.00
1233 M08UB=0.28
1234 M10LB=0.07
1235 M10UB=1.00
1236 M11LB=0.05
1237 eps=1e-30
1238 # Bands
1239 M7=data.M7
1240 M8=data.M8
1241 M10=data.M10
1242 M11=data.M11
1243 # Eq 1a
1244 M=(M8.astype(float)-RthSub)/(M11.astype(float)+eps)
1245 C1=np.logical_and(M>0,M<Rth)
1246 # Eq 1b
1247 C2=np.logical_and(M8>M08LB,M8<M08UB)
1248 # Eq 1c
1249 C3=M7<M07UB
1250 # Eq 1d
1251 C4=M11>M11LB
1252 # Eq 1e
1253 C5=np.logical_and(M10>M10LB,M10<M10UB)
1254 # All the conditions at the same time
1255 C=np.logical_and(np.logical_and(np.logical_and(np.logical_and(C1,C2),C3),C4),C5)
1256 return C
1258 if __name__ == "__main__":
1259 bbox=[-132.86966,-102.0868788,44.002495,66.281204]
1260 time = ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1261 data=retrieve_af_data(bbox,time)
1262 # Save the data dictionary into a matlab structure file out.mat
1263 sio.savemat('out.mat', mdict=data)