minor changes to routines
[JPSSData.git] / JPSSD.py
blob28ad960080b465831a9248243aa9e58431220284
1 import warnings
2 warnings.filterwarnings("ignore")
3 import numpy as np
4 import json
5 import requests
6 import urlparse
7 import os
8 import sys
9 import re
10 import glob
11 import netCDF4 as nc
12 from pyhdf.SD import SD, SDC
13 from utils import Dict
14 import scipy.io as sio
15 import h5py
16 import datetime
17 import time
18 import matplotlib.colors as colors
19 from itertools import groupby
20 from subprocess import check_output, call
22 def search_api(sname,bbox,time,maxg=50,platform="",version=""):
23 """
24 API search of the different satellite granules
26 :param sname: short name
27 :param bbox: polygon with the search bounding box
28 :param time: time interval (init_time,final_time)
29 :param maxg: max number of granules to process
30 :param platform: string with the platform
31 :param version: string with the version
32 :return granules: dictionary with the metadata of the search
34 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
35 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
36 """
37 from cmr import GranuleQuery
38 api = GranuleQuery()
39 if not version:
40 if not platform:
41 search = api.parameters(
42 short_name=sname,
43 downloadable=True,
44 polygon=bbox,
45 temporal=time
47 else:
48 search = api.parameters(
49 short_name=sname,
50 platform=platform,
51 downloadable=True,
52 polygon=bbox,
53 temporal=time
55 else:
56 if not platform:
57 search = api.parameters(
58 short_name=sname,
59 downloadable=True,
60 polygon=bbox,
61 temporal=time,
62 version=version
64 else:
65 search = api.parameters(
66 short_name=sname,
67 platform=platform,
68 downloadable=True,
69 polygon=bbox,
70 temporal=time,
71 version=version
73 sh=search.hits()
74 print "%s gets %s hits in this range" % (sname,sh)
75 if sh>maxg:
76 print "The number of hits %s is larger than the limit %s." % (sh,maxg)
77 print "Use a reduced bounding box or a reduced time interval."
78 granules = []
79 else:
80 granules = api.get(sh)
81 return granules
83 def search_archive(url,prod,time,grans):
84 """
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
95 """
96 ids=['.'.join(k['producer_granule_id'].split('.')[1:3]) for k in grans] # satellite ids in bounding box
97 granules=[]
98 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
99 delta=dts[1]-dts[0]
100 nh=int(delta.total_seconds()/3600)
101 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
102 fold=np.unique(['%d/%03d' % (date.timetuple().tm_year,date.timetuple().tm_yday) for date in dates])
103 urls=[url+'/'+prod+'/'+f for f in fold]
104 for u in urls:
105 try:
106 js=requests.get(u+'.json').json()
107 for j in js:
108 arg=np.argwhere(np.array(ids)=='.'.join(j['name'].split('.')[1:3]))
109 if arg.size:
110 ar=arg[0][0]
111 g=Dict(j)
112 g.links=[{'href':u+'/'+g.name}]
113 g.time_start=grans[ar]["time_start"]
114 g.time_end=grans[ar]["time_end"]
115 g.dataset_id='Archive download: unknown dataset id'
116 g.producer_granule_id=j['name']
117 granules.append(g)
118 except Exception as e:
119 "warning: some JSON request failed"
120 print "%s gets %s hits in this range" % (prod.split('/')[-1],len(granules))
121 return granules
123 def get_meta(bbox,time,maxg=50,burned=False,high=False):
125 Get all the meta data from the API for all the necessary products
127 :param bbox: polygon with the search bounding box
128 :param time: time interval (init_time,final_time)
129 :param maxg: max number of granules to process
130 :return granules: dictionary with the metadata of all the products
132 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
133 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
135 granules=Dict([])
136 #MOD14: MODIS Terra fire data
137 granules.MOD14=search_api("MOD14",bbox,time,maxg,"Terra")
138 #MOD03: MODIS Terra geolocation data
139 granules.MOD03=search_api("MOD03",bbox,time,maxg,"Terra","6")
140 #MOD09: MODIS Atmospherically corrected surface reflectance
141 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra","6")
142 #MYD14: MODIS Aqua fire data
143 granules.MYD14=search_api("MYD14",bbox,time,maxg,"Aqua")
144 #MYD03: MODIS Aqua geolocation data
145 granules.MYD03=search_api("MYD03",bbox,time,maxg,"Aqua","6")
146 #MOD09: MODIS Atmospherically corrected surface reflectance
147 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra","6")
148 #VNP14: VIIRS fire data, res 750m
149 granules.VNP14=search_api("VNP14",bbox,time,maxg)
150 #VNP03MODLL: VIIRS geolocation data, res 750m
151 granules.VNP03=search_api("VNP03MODLL",bbox,time,maxg)
152 #VNP14HR: VIIRS fire data, res 375m
153 #granules.VNP14HR=search_api("VNP14IMGTDL_NRT",bbox,time,maxg) # any results in API
154 if high:
155 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
156 prod="5000/NPP_IMFTS_L1"
157 granules.VNP03HR=search_archive(url,prod,time,granules.VNP03)
158 prod="5000/VNP14IMG" # product
159 granules.VNP14HR=search_archive(url,prod,time,granules.VNP03HR)
160 if burned:
161 #VNP09: VIIRS Atmospherically corrected surface reflectance
162 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
163 prod="5000/VNP09" # product
164 granules.VNP09=search_archive(url,prod,time,granules.VNP03)
165 return granules
167 def group_files(path,reg):
169 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
171 :param path: path to the geolocation (03) and fire (14) files
172 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
173 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
175 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
176 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
178 files=[Dict({'geo':k}) for k in glob.glob(path+'/'+reg+'03*')]
179 filesf=glob.glob(path+'/'+reg+'14*')
180 filesr=glob.glob(path+'/'+reg+'09*')
181 if len(filesf)>0:
182 for f in filesf:
183 mf=re.split("/",f)
184 if mf is not None:
185 m=mf[-1].split('.')
186 if m is not None:
187 for k,g in enumerate(files):
188 mmf=re.split("/",g.geo)
189 mm=mmf[-1].split('.')
190 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
191 files[k].fire=f
192 if len(filesr)>0:
193 for f in filesr:
194 mf=re.split("/",f)
195 if mf is not None:
196 m=mf[-1].split('.')
197 if m is not None:
198 for k,g in enumerate(files):
199 mmf=re.split("/",g.geo)
200 mm=mmf[-1].split('.')
201 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
202 files[k].ref=f
203 return files
205 def group_all(path):
207 Combine all the geolocation (03) and fire (14) files in a path
209 :param path: path to the geolocation (03) and fire (14) files
210 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
212 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
213 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
215 files=Dict({})
216 # MOD files
217 modf=group_files(path,'MOD')
218 # MYD files
219 mydf=group_files(path,'MYD')
220 # VIIRS files
221 vif=group_files(path,'VNP')
222 files.MOD=modf
223 files.MYD=mydf
224 files.VNP=vif
225 return files
227 def read_modis_files(files,bounds):
229 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
231 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
232 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
233 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
235 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
236 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
238 ret=Dict([])
239 # Satellite information
240 N=1354 # Number of columns (maxim number of sample)
241 h=705. # Altitude of the satellite in km
242 p=1. # Nadir pixel resolution in km
243 # Reading MODIS files
244 hdfg=SD(files.geo,SDC.READ)
245 hdff=SD(files.fire,SDC.READ)
246 # Creating all the objects
247 lat_obj=hdfg.select('Latitude')
248 lon_obj=hdfg.select('Longitude')
249 fire_obj=hdff.select('fire mask')
250 lat_fire_obj=hdff.select('FP_latitude')
251 lon_fire_obj=hdff.select('FP_longitude')
252 brig_fire_obj=hdff.select('FP_T21')
253 sample_fire_obj=hdff.select('FP_sample')
254 conf_fire_obj=hdff.select('FP_confidence')
255 t31_fire_obj=hdff.select('FP_T31')
256 frp_fire_obj=hdff.select('FP_power')
257 # Geolocation and mask information
258 ret.lat=lat_obj.get()
259 ret.lon=lon_obj.get()
260 ret.fire=fire_obj.get()
261 # Fire detected information
262 try:
263 flats=lat_fire_obj.get()
264 except:
265 flats=np.array([])
266 try:
267 flons=lon_fire_obj.get()
268 except:
269 flons=np.array([])
270 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]), flats <= bounds[3])
271 ret.lat_fire=flats[fll]
272 ret.lon_fire=flons[fll]
273 try:
274 ret.brig_fire=brig_fire_obj.get()[fll]
275 except:
276 ret.brig_fire=np.array([])
277 ret.sat_fire=hdff.Satellite
278 try:
279 ret.conf_fire=conf_fire_obj.get()[fll]
280 except:
281 ret.conf_fire=np.array([])
282 try:
283 ret.t31_fire=t31_fire_obj.get()[fll]
284 except:
285 ret.t31_fire=np.array([])
286 try:
287 ret.frp_fire=frp_fire_obj.get()[fll]
288 except:
289 ret.frp_fire=np.array([])
290 try:
291 sf=sample_fire_obj.get()[fll]
292 except:
293 sf=np.array([])
294 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
295 # No fire data
296 lats=np.ravel(ret.lat)
297 lons=np.ravel(ret.lon)
298 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
299 lats=lats[ll]
300 lons=lons[ll]
301 fire=np.ravel(ret.fire)
302 fire=fire[ll]
303 nf=np.logical_or(fire == 3, fire == 5)
304 ret.lat_nofire=lats[nf]
305 ret.lon_nofire=lons[nf]
306 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
307 sample=np.ravel(sample)
308 sample=sample[ll]
309 sfn=sample[nf]
310 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p)
311 # Close files
312 hdfg.end()
313 hdff.end()
314 return ret
316 def read_viirs_files(files,bounds):
318 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
320 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
321 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
322 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
324 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
325 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
327 ret=Dict([])
328 # Satellite information
329 N=3200 # Number of columns (maxim number of sample)
330 h=828. # Altitude of the satellite in km
331 alpha=np.array([0,31.59,44.68,56.06])/180*np.pi
332 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
333 p=np.array([0.75,0.75/2,0.75/3])
334 # Reading VIIRS files
335 h5g=h5py.File(files.geo,'r')
336 ncf=nc.Dataset(files.fire,'r')
337 # Geolocation and mask information
338 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
339 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
340 ret.fire=np.array(ncf.variables['fire mask'][:])
341 # Fire detected information
342 flats=np.array(ncf.variables['FP_latitude'][:])
343 flons=np.array(ncf.variables['FP_longitude'][:])
344 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]),flats <= bounds[3])
345 ret.lat_fire=flats[fll]
346 ret.lon_fire=flons[fll]
347 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])[fll]
348 ret.sat_fire=ncf.SatelliteInstrument
349 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])[fll]
350 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])[fll]
351 ret.frp_fire=np.array(ncf.variables['FP_power'][:])[fll]
352 sf=np.array(ncf.variables['FP_sample'][:])[fll]
353 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p,alpha)
354 # No fire data
355 lats=np.ravel(ret.lat)
356 lons=np.ravel(ret.lon)
357 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
358 lats=lats[ll]
359 lons=lons[ll]
360 fire=np.ravel(ret.fire)
361 fire=fire[ll]
362 nf=np.logical_or(fire == 3, fire == 5)
363 ret.lat_nofire=lats[nf]
364 ret.lon_nofire=lons[nf]
365 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
366 sample=np.ravel(sample)
367 sample=sample[ll]
368 sfn=sample[nf]
369 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p,alpha)
370 # Reflectance data for burned scar algorithm
371 if 'ref' in files.keys():
372 # Read reflectance data
373 hdf=SD(files.ref,SDC.READ)
374 # Bands data
375 M7=hdf.select('750m Surface Reflectance Band M7') # 0.86 nm
376 M8=hdf.select('750m Surface Reflectance Band M8') # 1.24 nm
377 M10=hdf.select('750m Surface Reflectance Band M10') # 1.61 nm
378 M11=hdf.select('750m Surface Reflectance Band M11') # 2.25 nm
379 bands=Dict({})
380 bands.M7=M7.get()*1e-4
381 bands.M8=M8.get()*1e-4
382 bands.M10=M10.get()*1e-4
383 bands.M11=M11.get()*1e-4
384 # Burned scar mask using the burned scar granule algorithm
385 ret.burned=burned_algorithm(bands)
386 # Close reflectance file
387 hdf.end()
388 # Close files
389 h5g.close()
390 ncf.close()
391 return ret
393 def read_viirs375_files(path,bounds):
395 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
397 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
398 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
400 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
401 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
403 import pandas as pd
404 # Opening files if they exist
405 f1=glob.glob(path+'/fire_archive_*.csv')
406 f2=glob.glob(path+'/fire_nrt_*.csv')
407 if len(f1)==1:
408 df1=pd.read_csv(f1[0])
409 if len(f2)==1:
410 df2=pd.read_csv(f2[0])
411 dfs=pd.concat([df1,df2],sort=True,ignore_index=True)
412 else:
413 dfs=df1
414 else:
415 if len(f2)==1:
416 dfs=pd.read_csv(f2[0])
417 else:
418 return {}
420 ret=Dict({})
421 # In the case something exists, read all the information from the CSV files
422 dfs=dfs[(dfs['longitude']>bounds[0]) & (dfs['longitude']<bounds[1]) & (dfs['latitude']>bounds[2]) & (dfs['latitude']<bounds[3])]
423 date=np.array(dfs['acq_date'])
424 time=np.array(dfs['acq_time'])
425 dfs['time']=np.array(['%s_%04d' % (date[k],time[k]) for k in range(len(date))])
426 dfs['time']=pd.to_datetime(dfs['time'], format='%Y-%m-%d_%H%M')
427 dfs['datetime']=dfs['time']
428 dfs=dfs.set_index('time')
429 for group_name, df in dfs.groupby(pd.TimeGrouper("D")):
430 items=Dict([])
431 items.lat=np.array(df['latitude'])
432 items.lon=np.array(df['longitude'])
433 conf=np.array(df['confidence'])
434 firemask=np.zeros(conf.shape)
435 conf_fire=np.zeros(conf.shape)
436 firemask[conf=='l']=7
437 conf_fire[conf=='l']=30.
438 firemask[conf=='n']=8
439 conf_fire[conf=='n']=60.
440 firemask[conf=='h']=9
441 conf_fire[conf=='h']=90.
442 items.fire=firemask.astype(int)
443 items.lat_fire=items.lat
444 items.lon_fire=items.lon
445 items.brig_fire=np.array(df['bright_ti4'])
446 items.sat_fire='Suomi NPP'
447 items.conf_fire=conf_fire
448 items.t31_fire=np.array(df['bright_ti5'])
449 items.frp_fire=np.array(df['frp'])
450 items.scan_fire=np.array(df['scan'])
451 items.track_fire=np.array(df['track'])
452 items.scan_angle_fire=np.ones(items.scan_fire.shape)*np.nan
453 items.lat_nofire=np.array([])
454 items.lon_nofire=np.array([])
455 items.scan_angle_nofire=np.array([])
456 items.scan_nofire=np.array([])
457 items.track_nofire=np.array([])
458 items.instrument=df['instrument'][0]
459 dt=df['datetime'][0]
460 items.time_start_geo_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
461 items.time_num=time_iso2num(items.time_start_geo_iso)
462 items.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
463 items.acq_time='%02d%02d' % (dt.hour,dt.minute)
464 items.time_start_fire_iso=items.time_start_geo_iso
465 items.time_end_geo_iso=items.time_start_geo_iso
466 items.time_end_fire_iso=items.time_start_geo_iso
467 items.file_geo=f1+f2
468 items.file_fire=items.file_geo
469 tt=df['datetime'][0].timetuple()
470 id='VNPH_A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
471 items.prefix='VNPH'
472 items.name='A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
473 ret.update({id: items})
474 return ret
476 def read_goes_files(files):
478 Read the files for GOES products - geolocation and fire data already included (OS)
480 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
481 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
482 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
484 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
485 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
487 h5g=h5py.File(files.geo,'r')
488 ret=Dict([])
489 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
490 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
491 ncf=nc.Dataset(files.fire,'r')
492 ret.fire=np.array(ncf.variables['fire mask'][:])
493 ret.lat_fire=np.array(ncf.variables['FP_latitude'][:])
494 ret.lon_fire=np.array(ncf.variables['FP_longitude'][:])
495 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])
496 sf=np.array(ncf.variables['FP_sample'][:])
497 # Satellite information
498 N=2500 # Number of columns (maxim number of sample)
499 h=35786. # Altitude of the satellite in km
500 p=2. # Nadir pixel resolution in km
501 ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
502 ret.sat_fire=ncf.SatelliteInstrument
503 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])
504 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])
505 ret.frp_fire=np.array(ncf.variables['FP_power'][:])
506 return ret
508 def read_data(files,file_metadata,bounds):
510 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
512 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
513 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
514 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
515 Where:
516 YYYYDDD = year and Julian day (001-366) of data acquisition
517 HHMM = hour and minute of data acquisition (approximate beginning time)
518 vvv = version number
519 yyyyddd = year and Julian day of data processing
520 hhmmss = hour, minute, and second of data processing
522 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
523 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
524 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
525 Where:
526 YYYYDDD = year and Julian day (001-366) of data acquisition
527 HHMM = hour and minute of data acquisition (approximate beginning time)
528 vvv = version number
529 yyyyddd = year and Julian day of data processing
530 hhmmss = hour, minute, and second of data processing
532 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
533 :param file_metadata: dictionary with file names as key and granules metadata as values
534 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
535 :return data: dictionary with Latitude, Longitude and fire mask arrays read
537 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
538 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
540 print "read_data files=%s" % files
541 data=Dict([])
542 if files=='VIIRS375':
543 data.update(read_viirs375_files('.',bounds))
544 else:
545 for f in files:
546 print "read_data f=%s" % f
547 if 'geo' in f.keys():
548 f0=os.path.basename(f.geo)
549 else:
550 print 'ERROR: read_data cannot read files=%s, not geo file' % f
551 continue
552 if 'fire' in f.keys():
553 f1=os.path.basename(f.fire)
554 else:
555 print 'ERROR: read_data cannot read files=%s, not fire file' % f
556 continue
557 boo=True
558 if 'ref' in f.keys():
559 f2=os.path.basename(f.ref)
560 boo=False
561 prefix = f0[:3]
562 print 'prefix %s' % prefix
563 if prefix != f1[:3]:
564 print 'ERROR: the prefix of %s %s must coincide' % (f0,f1)
565 continue
566 m=f.geo.split('/')
567 mm=m[-1].split('.')
568 key=mm[1]+'_'+mm[2]
569 id = prefix + '_' + key
570 print "id " + id
571 if prefix=="MOD" or prefix=="MYD":
572 try:
573 item=read_modis_files(f,bounds)
574 item.instrument="MODIS"
575 except Exception as e:
576 print 'WARNING: reading the files from MODIS failed with error %s' % e
577 continue
578 elif prefix=="VNP":
579 try:
580 item=read_viirs_files(f,bounds)
581 item.instrument="VIIRS"
582 except Exception as e:
583 print 'WARNING: reading the files from VIIRS failed with error %s' % e
584 continue
585 elif prefix=="OR":
586 try:
587 item=read_goes_files(f)
588 item.instrument="GOES"
589 except Exception as e:
590 print 'WARNING: reading the files from GOES failed with error %s' % e
591 continue
592 else:
593 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0,f1)
594 continue
595 if not boo:
596 if f2 in file_metadata.keys():
597 boo=True
598 if (f0 in file_metadata.keys()) and (f1 in file_metadata.keys()) and boo:
599 # connect the file back to metadata
600 item.time_start_geo_iso=file_metadata[f0]["time_start"]
601 item.time_num=time_iso2num(item.time_start_geo_iso)
602 dt=datetime.datetime.strptime(item.time_start_geo_iso[0:18],'%Y-%m-%dT%H:%M:%S')
603 item.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
604 item.acq_time='%02d%02d' % (dt.hour,dt.minute)
605 item.time_start_fire_iso=file_metadata[f1]["time_start"]
606 item.time_end_geo_iso=file_metadata[f0]["time_end"]
607 item.time_end_fire_iso=file_metadata[f1]["time_end"]
608 item.file_geo=f0
609 item.file_fire=f1
610 if 'ref' in f.keys():
611 item.file_ref=f2
612 item.time_start_ref_iso=file_metadata[f2]["time_start"]
613 item.time_end_ref_iso=file_metadata[f2]["time_end"]
614 item.prefix=prefix
615 item.name=key
616 data.update({id:item})
617 else:
618 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0, f1)
619 continue
621 return data
624 def download(granules, appkey=None):
626 Download files as listed in the granules metadata
628 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
629 :return file_metadata: dictionary with file names as key and granules metadata as values
631 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
632 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
634 file_metadata = {}
635 for gn,granule in enumerate(granules):
636 #print json.dumps(granule,indent=4, separators=(',', ': '))
637 url = granule['links'][0]['href']
638 filename=os.path.basename(urlparse.urlsplit(url).path)
639 file_metadata[filename]=granule
641 # to store as object in memory (maybe not completely downloaded until accessed?)
642 # with requests.Session() as s:
643 # data.append(s.get(url))
645 # download - a minimal code without various error checking and corrective actions
646 # see wrfxpy/src/ingest/downloader.py
647 if os.path.isfile(filename):
648 print 'file %s already downloaded' % filename
649 continue
650 try:
651 chunk_size = 1024*1024
652 s = 0
653 print 'downloading %s as %s' % (url,filename)
654 if appkey:
655 r = requests.get(url, stream=True, headers={"Authorization": "Bearer %s" % appkey})
656 else:
657 r = requests.get(url, stream=True)
658 if r.status_code == 200:
659 content_size = int(r.headers['Content-Length'])
660 print 'downloading %s as %s size %sB' % (url, filename, content_size)
661 with open(filename, 'wb') as f:
662 for chunk in r.iter_content(chunk_size):
663 f.write(chunk)
664 s = s + len(chunk)
665 print('downloaded %sB of %sB' % (s, content_size))
666 else:
667 if gn == 0:
668 print 'cannot connect to %s' % url
669 print 'web request status code %s' % r.status_code
670 print 'Make sure you have file ~/.netrc permission 600 with the content'
671 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
672 sys.exit(1)
673 else:
674 print 'something happened when trying to download %s with status code %s' % (url,r.status_code)
676 except Exception as e:
677 print 'download failed with error %s' % e
678 return file_metadata
680 def download_GOES16(time):
682 Download the GOES16 data through rclone application
684 :param time: tupple with the start and end times
685 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
687 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
688 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
690 bucket=Dict({})
691 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
692 delta=dts[1]-dts[0]
693 nh=int(delta.total_seconds()/3600)
694 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
695 for date in dates:
696 tt=date.timetuple()
697 argT='%d/%03d/%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour)
698 try:
699 args=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'.','-L']
700 print 'running: '+' '.join(args)
701 res=call(args)
702 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT+' downloaded.'
703 args=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'-L']
704 out=check_output(args)
705 bucket.update({argT : [o.split(' ')[2] for o in out.split('\n')[:-1]]})
706 except Exception as e:
707 print 'download failed with error %s' % e
708 return bucket
710 def retrieve_af_data(bbox,time,burned=False,high=False,appkey=None):
712 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
714 :param bbox: polygon with the search bounding box
715 :param time: time interval (init_time,final_time)
716 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
718 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
719 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
722 # Define settings
723 lonmin,lonmax,latmin,latmax = bbox
724 bounds=bbox
725 bbox = [(lonmin,latmax),(lonmin,latmin),(lonmax,latmin),(lonmax,latmax),(lonmin,latmax)]
726 maxg = 1000
728 print "bbox"
729 print bbox
730 print "time:"
731 print time
732 print "maxg:"
733 print maxg
735 # Get data
736 granules=get_meta(bbox,time,maxg,burned=burned,high=high)
737 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
739 # Eliminating the NRT data (repeated always)
740 nrt_elimination(granules)
742 file_metadata = {}
743 for k,g in granules.items():
744 print 'Downloading %s files' % k
745 sys.stdout.flush()
746 file_metadata.update(download(g,appkey=appkey))
747 #print "download g:"
748 #print g
750 print "download complete"
752 # Group all files downloaded
753 files=group_all(".")
754 #print "group all files:"
755 #print files
757 # Generate data dictionary
758 data=Dict({})
759 data.update(read_data(files.MOD,file_metadata,bounds))
760 data.update(read_data(files.MYD,file_metadata,bounds))
761 data.update(read_data(files.VNP,file_metadata,bounds))
762 #data.update(read_data('VIIRS375','',bounds))
764 return data
766 def nrt_elimination(granules):
768 Cleaning all the NRT data which is repeated
770 :param granules: Dictionary of granules products to clean up
771 :return: It will update the granules dictionary
773 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
774 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
777 if 'MOD14' in granules:
778 nlist=[g for g in granules['MOD14'] if g['data_center']=='LPDAAC_ECS']
779 granules['MOD14']=nlist
780 if 'MYD14' in granules:
781 nlist=[g for g in granules['MYD14'] if g['data_center']=='LPDAAC_ECS']
782 granules['MYD14']=nlist
785 def read_fire_mesh(filename):
787 Read necessary variables in the fire mesh of the wrfout file filename
789 :param filename: wrfout file
790 :return fxlon: lon coordinates in the fire mesh
791 :return fxlat: lat coordinates in the fire mesh
792 :return bbox: bounding box
793 :return time_esmf: simulation times in ESMF format
795 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
796 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
798 print 'opening ' + filename
799 d = nc.Dataset(filename)
800 m,n = d.variables['XLONG'][0,:,:].shape
801 fm,fn = d.variables['FXLONG'][0,:,:].shape
802 fm=fm-fm/(m+1) # dimensions corrected for extra strip
803 fn=fn-fn/(n+1)
804 fxlon = np.array(d.variables['FXLONG'][0,:fm,:fn]) # masking extra strip
805 fxlat = np.array(d.variables['FXLAT'][0,:fm,:fn])
806 time_esmf = ''.join(d.variables['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
807 bbox = [fxlon.min(),fxlon.max(),fxlat.min(),fxlat.max()]
808 print 'min max longitude latitude %s' % bbox
809 print 'time (ESMF) %s' % time_esmf
811 plot = False
812 if plot:
813 tign_g = np.array(d.variables['TIGN_G'][0,:fm,:fn])
814 plot_3D(fxlon,fxlat,tign_g)
816 d.close()
818 return fxlon,fxlat,bbox,time_esmf
820 def data2json(data,keys,dkeys,N):
822 Create a json dictionary from data dictionary
824 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
825 :param keys: keys which are going to be included into the json
826 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
827 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
828 :return ret: dictionary with all the fire detection information to create the KML file
830 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
831 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
833 ret=Dict({})
834 for i,k in enumerate(keys):
835 if isinstance(data[list(data)[0]][dkeys[i]],(list, tuple, np.ndarray)):
836 dd=[np.ravel(data[d][dkeys[i]]) for d in list(data)]
837 ret.update({k : np.concatenate(dd)})
838 else:
839 dd=[[data[d[1]][dkeys[i]]]*N[d[0]] for d in enumerate(list(data))]
840 ret.update({k : np.concatenate(dd)})
842 return ret
844 def sdata2json(sdata,keys,dkeys,N):
846 Create a json dictionary from sorted array of data dictionaries
848 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
849 :param keys: keys which are going to be included into the json
850 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
851 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
852 :return ret: dictionary with all the fire detection information to create the KML file
854 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
855 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
857 ret=Dict({'granules': [d[0] for d in sdata]})
858 for i,k in enumerate(keys):
859 dd = [d[1][dkeys[i]] if dkeys[i] in d[1] else None for d in sdata]
860 if dd:
861 if np.any([isinstance(d,(list, tuple, np.ndarray)) for d in dd]):
862 out = [d if d is not None else np.array([]) for d in dd]
863 ret.update({k : dd})
864 else:
865 out = [[d[1][1][dkeys[i]]]*N[d[0]] if dkeys[i] in d[1][1] else [] for d in enumerate(sdata)]
866 ret.update({k : out})
868 return ret
871 def write_csv(d,bounds):
873 Write fire detections from data dictionary d to a CSV file
875 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
876 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
877 :return: fire_detections.csv file with all the detections
879 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
880 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
882 import pandas as pd
883 df=pd.DataFrame(data=d)
884 df=df[(df['longitude']>bounds[0]) & (df['longitude']<bounds[1]) & (df['latitude']>bounds[2]) & (df['latitude']<bounds[3])]
885 df.to_csv('fire_detections.csv', encoding='utf-8', index=False)
887 def plot_3D(xx,yy,zz):
889 Plot surface of (xx,yy,zz) data
891 :param xx: x arrays
892 :param yy: y arrays
893 :param zz: values at the (x,y) points
894 :return: a plot show of the 3D data
896 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
897 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
899 from mpl_toolkits.mplot3d import Axes3D
900 import matplotlib.pyplot as plt
901 from matplotlib import cm
902 fig = plt.figure()
903 ax = fig.gca(projection='3d')
904 surf = ax.plot_surface(xx,yy,zz,cmap=cm.coolwarm)
905 plt.show()
907 def time_iso2num(time_iso):
909 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
911 :param time_iso: string iso date
912 :return s: integer number of seconds since December 31 1969 at 17:00:00
914 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
915 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
917 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
918 # seconds since December 31 1969 at 17:00:00
919 s=time.mktime(time_datetime.timetuple())
920 return s
922 def time_iso2datetime(time_iso):
924 Transform an iso time string to a datetime element
926 :param time_iso: string iso date
927 :return time_datetime: datetime element
929 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
930 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
932 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
933 return time_datetime
935 def time_datetime2iso(time_datetime):
937 Transform a datetime element to iso time string
939 :param time_datetime: datetime element
940 :return time_iso: string iso date
942 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
943 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
945 time_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime.year,time_datetime.month,
946 time_datetime.day,time_datetime.hour,
947 time_datetime.minute,time_datetime.second)
948 return time_iso
950 def time_num2iso(time_num):
952 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
954 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
955 :return date: time string in ISO date
957 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
958 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
960 dt=datetime.datetime.fromtimestamp(time_num)
961 # seconds since December 31 1969 at 17:00:00
962 date='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
963 return date
965 def pixel_dim(sample,N,h,p,a=None):
967 Computes pixel dimensions (along-scan and track pixel sizes)
969 :param sample: array of integers with the column number (sample variable in files)
970 :param N: scalar, total number of pixels in each row of the image swath
971 :param h: scalar, altitude of the satellite in km
972 :param p: scalar, pixel nadir resolution in km
973 :param a: array of floats of the size of p with the angles where the resolution change
974 :return theta: scan angle in radiands
975 :return scan: along-scan pixel size in km
976 :return track: along-track pixel size in km
978 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
979 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
981 Re=6378 # approximation of the radius of the Earth in km
982 r=Re+h
983 M=(N-1)*0.5
984 s=np.arctan(p/h) # trigonometry (deg/sample)
985 if isinstance(p,(list, tuple, np.ndarray)):
986 Ns=np.array([int((a[k]-a[k-1])/s[k-1]) for k in range(1,len(a)-1)])
987 Ns=np.append(Ns,int(M-Ns.sum()))
988 theta=s[0]*(sample-M)
989 scan=Re*s[0]*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
990 track=r*s[0]*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
991 for k in range(1,len(Ns)):
992 p=sample<=M-Ns[0:k].sum()
993 theta[p]=s[k]*(sample[p]-(M-Ns[0:k].sum()))-(s[0:k]*Ns[0:k]).sum()
994 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
995 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
996 p=sample>=M+Ns[0:k].sum()
997 theta[p]=s[k]*(sample[p]-(M+Ns[0:k].sum()))+(s[0:k]*Ns[0:k]).sum()
998 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
999 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
1000 else:
1001 theta=s*(sample-M)
1002 scan=Re*s*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
1003 track=r*s*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
1004 return (theta,scan,track)
1006 def copyto(partial_path,kml):
1008 Copy information in partial_path to kml
1010 :param partial_path: path to a partial KML file
1011 :param kml: KML object where to write to
1012 :return: information from partial_path into kml
1014 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1015 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1017 with open(partial_path,'r') as part:
1018 for line in part:
1019 kml.write(line)
1021 def json2kml(d,kml_path,bounds,prods,opt='granule',minconf=80.):
1023 Creates a KML file kml_path from a dictionary d
1025 :param d: dictionary with all the fire detection information to create the KML file
1026 :param kml_path: path in where the KML file is going to be written
1027 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
1028 :return: a KML file
1030 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1031 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1034 if 'latitude' in d:
1035 if not isinstance(d['latitude'][0],(list, tuple, np.ndarray)):
1036 if opt=='dates':
1037 ind=[i[0] for i in sorted(enumerate(d.acq_date), key=lambda x:x[1])]
1038 L=[len(list(grp)) for k, grp in groupby(d['acq_date'][ind])]
1039 L.insert(0,0)
1040 ll=[sum(L[0:k+1]) for k in range(len(L))]
1041 for v in list(d):
1042 sort=[d[v][i] for i in ind]
1043 d[v]=[sort[ll[k]:ll[k+1]] for k in range(len(ll)-1)]
1044 else:
1045 for v in list(d):
1046 d[v]=[d[v]]
1047 opt='pixels'
1049 frp_style={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1050 for i in range(0,40):
1051 frp_style[i]='modis_frp_%s_to_%s' % (i*10, i*10+10)
1053 with open(kml_path,'w') as kml:
1055 copyto('kmls/partial1.kml',kml)
1057 # set some constants
1058 r = 6378 # Earth radius
1059 km_lat = 180/(np.pi*r) # 1km in degrees latitude
1060 ND = len(d['latitude'])
1062 for prod in prods:
1064 kml.write('<Folder>\n')
1065 kml.write('<name>%s</name>\n' % prods[prod])
1067 if prod == 'FRP':
1068 copyto('kmls/partial2.kml',kml)
1070 if prod == 'TF':
1071 col = np.flip(np.divide([(230, 25, 75, 150), (245, 130, 48, 150), (255, 255, 25, 150),
1072 (210, 245, 60, 150), (60, 180, 75, 150), (70, 240, 240, 150),
1073 (0, 0, 128, 150), (145, 30, 180, 150), (240, 50, 230, 150),
1074 (128, 128, 128, 150)],255.),0)
1075 cm = colors.LinearSegmentedColormap.from_list('BuRd',col,ND)
1076 cols = ['%02x%02x%02x%02x' % tuple(255*np.flip(c)) for c in cm(range(cm.N))]
1077 t_range = range(ND-1,-1,-1)
1078 else:
1079 t_range = range(ND)
1081 for t in t_range:
1082 lats=np.array(d['latitude'][t]).astype(float)
1083 lons=np.array(d['longitude'][t]).astype(float)
1084 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
1085 latitude=lats[ll]
1086 longitude=lons[ll]
1087 NN=len(latitude)
1088 if NN > 0:
1089 acq_date=np.array(d['acq_date'][t])[ll]
1090 acq_time=np.array(d['acq_time'][t])[ll]
1091 try:
1092 satellite=np.array(d['satellite'][t])[ll]
1093 except:
1094 satellite=np.array(['Not available']*NN)
1095 try:
1096 instrument=np.array(d['instrument'][t])[ll]
1097 except:
1098 instrument=np.array(['Not available']*NN)
1099 try:
1100 confidence=np.array(d['confidence'][t])[ll].astype(float)
1101 except:
1102 confidence=np.array(np.zeros(NN)).astype(float)
1103 try:
1104 frps=np.array(d['frp'][t])[ll].astype(float)
1105 except:
1106 frps=np.array(np.zeros(NN)).astype(float)
1107 try:
1108 angles=np.array(d['scan_angle'][t])[ll].astype(float)
1109 except:
1110 angles=np.array(['Not available']*NN)
1111 try:
1112 scans=np.array(d['scan'][t])[ll].astype(float)
1113 except:
1114 scans=np.ones(NN).astype(float)
1115 try:
1116 tracks=np.array(d['track'][t])[ll].astype(float)
1117 except:
1118 tracks=np.ones(NN).astype(float)
1120 kml.write('<Folder>\n')
1121 if opt=='date':
1122 kml.write('<name>%s</name>\n' % acq_date[0])
1123 elif opt=='granule':
1124 kml.write('<name>%s</name>\n' % d['granules'][t])
1125 else:
1126 kml.write('<name>Pixels</name>\n')
1128 for p in range(NN):
1129 lat=latitude[p]
1130 lon=longitude[p]
1131 conf=confidence[p]
1132 if conf >= minconf:
1133 frp=frps[p]
1134 angle=angles[p]
1135 scan=scans[p]
1136 track=tracks[p]
1137 timestamp=acq_date[p] + 'T' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + 'Z'
1138 timedescr=acq_date[p] + ' ' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + ' UTC'
1140 if prod == 'NF':
1141 kml.write('<Placemark>\n<name>Ground detection square</name>\n')
1142 kml.write('<description>\nlongitude: %s\n' % lon
1143 + 'latitude: %s\n' % lat
1144 + 'time: %s\n' % timedescr
1145 + 'satellite: %s\n' % satellite[p]
1146 + 'instrument: %s\n' % instrument[p]
1147 + 'scan angle: %s\n' % angle
1148 + 'along-scan: %s\n' % scan
1149 + 'along-track: %s\n' % track
1150 + '</description>\n')
1151 else:
1152 kml.write('<Placemark>\n<name>Fire detection square</name>\n')
1153 kml.write('<description>\nlongitude: %s\n' % lon
1154 + 'latitude: %s\n' % lat
1155 + 'time: %s\n' % timedescr
1156 + 'satellite: %s\n' % satellite[p]
1157 + 'instrument: %s\n' % instrument[p]
1158 + 'confidence: %s\n' % conf
1159 + 'FRP: %s\n' % frp
1160 + 'scan angle: %s\n' % angle
1161 + 'along-scan: %s\n' % scan
1162 + 'along-track: %s\n' % track
1163 + '</description>\n')
1164 kml.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp)
1165 if prod == 'AF':
1166 if conf < 30:
1167 kml.write('<styleUrl> modis_conf_low </styleUrl>\n')
1168 elif conf < 80:
1169 kml.write('<styleUrl> modis_conf_med </styleUrl>\n')
1170 else:
1171 kml.write('<styleUrl> modis_conf_high </styleUrl>\n')
1172 elif prod == 'TF':
1173 kml.write('<Style>\n'+'<PolyStyle>\n'
1174 +'<color>%s</color>\n' % cols[t]
1175 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1176 +'</Style>\n')
1177 elif prod == 'FRP':
1178 frpx = min(40,np.ceil(frp/10.)-1)
1179 kml.write('<styleUrl> %s </styleUrl>\n' % frp_style[frpx] )
1180 elif prod == 'NF':
1181 kml.write('<styleUrl> no_fire </styleUrl>\n')
1182 elif prod == 'AFN':
1183 if conf < 80:
1184 kml.write('<Style>\n'+'<PolyStyle>\n'
1185 +'<color>7000ffff</color>\n'
1186 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1187 +'</Style>\n')
1188 elif conf < 90:
1189 kml.write('<Style>\n'+'<PolyStyle>\n'
1190 +'<color>7000a5ff</color>\n'
1191 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1192 +'</Style>\n')
1193 else:
1194 kml.write('<Style>\n'+'<PolyStyle>\n'
1195 +'<color>700000ff</color>\n'
1196 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1197 +'</Style>\n')
1199 kml.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1201 km_lon=km_lat/np.cos(lat*np.pi/180) # 1 km in longitude
1203 sq_track_size_km=track
1204 sq2_lat=km_lat * sq_track_size_km/2
1205 sq_scan_size_km=scan
1206 sq2_lon=km_lon * sq_scan_size_km/2
1208 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1209 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat + sq2_lat))
1210 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat + sq2_lat))
1211 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat - sq2_lat))
1212 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1214 kml.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1215 kml.write('</Folder>\n')
1217 kml.write('</Folder>\n')
1219 kml.write('</Document>\n</kml>\n')
1221 print 'Created file %s' % kml_path
1222 else:
1223 print 'Any detections to be saved as %s' % kml_path
1225 def burned_algorithm(data):
1227 Computes mask of burned scar pixels
1229 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1230 :return C: Mask of burned scar pixels
1232 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1233 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1235 # Parameters
1236 RthSub=0.05
1237 Rth=0.81
1238 M07UB=0.19
1239 M08LB=0.00
1240 M08UB=0.28
1241 M10LB=0.07
1242 M10UB=1.00
1243 M11LB=0.05
1244 eps=1e-30
1245 # Bands
1246 M7=data.M7
1247 M8=data.M8
1248 M10=data.M10
1249 M11=data.M11
1250 # Eq 1a
1251 M=(M8.astype(float)-RthSub)/(M11.astype(float)+eps)
1252 C1=np.logical_and(M>0,M<Rth)
1253 # Eq 1b
1254 C2=np.logical_and(M8>M08LB,M8<M08UB)
1255 # Eq 1c
1256 C3=M7<M07UB
1257 # Eq 1d
1258 C4=M11>M11LB
1259 # Eq 1e
1260 C5=np.logical_and(M10>M10LB,M10<M10UB)
1261 # All the conditions at the same time
1262 C=np.logical_and(np.logical_and(np.logical_and(np.logical_and(C1,C2),C3),C4),C5)
1263 return C
1265 if __name__ == "__main__":
1266 bbox=[-132.86966,-102.0868788,44.002495,66.281204]
1267 time = ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1268 data=retrieve_af_data(bbox,time)
1269 # Save the data dictionary into a matlab structure file out.mat
1270 sio.savemat('out.mat', mdict=data)