changing graphics in SVM
[JPSSData.git] / JPSSD.py
blobc55e0df7d557f6a08808d5300a5ac715cff1f677
1 import warnings
2 warnings.filterwarnings("ignore")
3 import numpy as np
4 import json
5 import requests
6 import urlparse
7 import os
8 import sys
9 import re
10 import glob
11 import netCDF4 as nc
12 from cmr import CollectionQuery, GranuleQuery
13 from pyhdf.SD import SD, SDC
14 from utils import *
15 import scipy.io as sio
16 import h5py
17 import datetime
18 import time
19 import pandas as pd
20 import matplotlib.colors as colors
21 from itertools import groupby
22 from subprocess import check_output, call
24 def search_api(sname,bbox,time,maxg=50,platform="",version=""):
25 """
26 API search of the different satellite granules
28 :param sname: short name
29 :param bbox: polygon with the search bounding box
30 :param time: time interval (init_time,final_time)
31 :param maxg: max number of granules to process
32 :param platform: string with the platform
33 :param version: string with the version
34 :return granules: dictionary with the metadata of the search
36 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
37 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
38 """
39 api = GranuleQuery()
40 if not version:
41 if not platform:
42 search = api.parameters(
43 short_name=sname,
44 downloadable=True,
45 polygon=bbox,
46 temporal=time
48 else:
49 search = api.parameters(
50 short_name=sname,
51 platform=platform,
52 downloadable=True,
53 polygon=bbox,
54 temporal=time
56 else:
57 if not platform:
58 search = api.parameters(
59 short_name=sname,
60 downloadable=True,
61 polygon=bbox,
62 temporal=time,
63 version=version
65 else:
66 search = api.parameters(
67 short_name=sname,
68 platform=platform,
69 downloadable=True,
70 polygon=bbox,
71 temporal=time,
72 version=version
74 sh=search.hits()
75 print "%s gets %s hits in this range" % (sname,sh)
76 if sh>maxg:
77 print "The number of hits %s is larger than the limit %s." % (sh,maxg)
78 print "Use a reduced bounding box or a reduced time interval."
79 granules = []
80 else:
81 granules = api.get(sh)
82 return granules
84 def search_archive(url,prod,time,grans):
85 """
86 Archive search of the different satellite granules
88 :param url: base url of the archive domain
89 :param prod: string of product with version, for instance: '5000/VNP09'
90 :param time: time interval (init_time,final_time)
91 :param grans: granules of the geolocation metadata
92 :return granules: dictionary with the metadata of the search
94 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
95 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
96 """
97 ids=['.'.join(k['producer_granule_id'].split('.')[1:3]) for k in grans] # satellite ids in bounding box
98 granules=[]
99 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
100 delta=dts[1]-dts[0]
101 nh=int(delta.total_seconds()/3600)
102 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
103 fold=np.unique(['%d/%03d' % (date.timetuple().tm_year,date.timetuple().tm_yday) for date in dates])
104 urls=[url+'/'+prod+'/'+f for f in fold]
105 for u in urls:
106 try:
107 js=requests.get(u+'.json').json()
108 for j in js:
109 arg=np.argwhere(np.array(ids)=='.'.join(j['name'].split('.')[1:3]))
110 if arg.size!=0:
111 ar=arg[0][0]
112 g=Dict(j)
113 g.links=[{'href':u+'/'+g.name}]
114 g.time_start=grans[ar]["time_start"]
115 g.time_end=grans[ar]["time_end"]
116 granules.append(g)
117 except Exception as e:
118 "warning: some JSON request failed"
119 print "%s gets %s hits in this range" % (prod.split('/')[-1],len(granules))
120 return granules
122 def get_meta(bbox,time,maxg=50,burned=False):
124 Get all the meta data from the API for all the necessary products
126 :param bbox: polygon with the search bounding box
127 :param time: time interval (init_time,final_time)
128 :param maxg: max number of granules to process
129 :return granules: dictionary with the metadata of all the products
131 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
132 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
134 granules=Dict([]);
135 #MOD14: MODIS Terra fire data
136 granules.MOD14=search_api("MOD14",bbox,time,maxg,"Terra")
137 #MOD03: MODIS Terra geolocation data
138 granules.MOD03=search_api("MOD03",bbox,time,maxg,"Terra","6")
139 #MOD09: MODIS Atmospherically corrected surface reflectance
140 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra","6")
141 #MYD14: MODIS Aqua fire data
142 granules.MYD14=search_api("MYD14",bbox,time,maxg,"Aqua")
143 #MYD03: MODIS Aqua geolocation data
144 granules.MYD03=search_api("MYD03",bbox,time,maxg,"Aqua","6")
145 #MOD09: MODIS Atmospherically corrected surface reflectance
146 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra","6")
147 #VNP14: VIIRS fire data, res 750m
148 granules.VNP14=search_api("VNP14",bbox,time,maxg)
149 #VNP03MODLL: VIIRS geolocation data, res 750m
150 granules.VNP03=search_api("VNP03MODLL",bbox,time,maxg)
151 #VNP14hi: VIIRS fire data, res 375m
152 #granules.VNP14hi=search_api("VNP14IMGTDL_NRT",bbox,time,maxg)
153 if burned:
154 #VNP09: VIIRS Atmospherically corrected surface reflectance
155 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
156 prod="5000/VNP09" # product
157 granules.VNP09=search_archive(url,prod,time,granules.VNP03)
158 return granules
160 def group_files(path,reg):
162 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
164 :param path: path to the geolocation (03) and fire (14) files
165 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
166 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
168 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
169 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
171 files=[Dict({'geo':k}) for k in glob.glob(path+'/'+reg+'03*')]
172 filesf=glob.glob(path+'/'+reg+'14*')
173 filesr=glob.glob(path+'/'+reg+'09*')
174 if len(filesf)>0:
175 for f in filesf:
176 mf=re.split("/",f)
177 if mf is not None:
178 m=mf[-1].split('.')
179 if m is not None:
180 for k,g in enumerate(files):
181 mmf=re.split("/",g.geo)
182 mm=mmf[-1].split('.')
183 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
184 files[k].fire=f
185 if len(filesr)>0:
186 for f in filesr:
187 mf=re.split("/",f)
188 if mf is not None:
189 m=mf[-1].split('.')
190 if m is not None:
191 for k,g in enumerate(files):
192 mmf=re.split("/",g.geo)
193 mm=mmf[-1].split('.')
194 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
195 files[k].ref=f
196 return files
198 def group_all(path):
200 Combine all the geolocation (03) and fire (14) files in a path
202 :param path: path to the geolocation (03) and fire (14) files
203 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
205 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
206 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
208 files=Dict({})
209 # MOD files
210 modf=group_files(path,'MOD')
211 # MYD files
212 mydf=group_files(path,'MYD')
213 # VIIRS files
214 vif=group_files(path,'VNP')
215 files.MOD=modf
216 files.MYD=mydf
217 files.VNP=vif
218 return files
220 def read_modis_files(files,bounds):
222 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
224 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
225 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
226 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
228 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
229 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
231 ret=Dict([])
232 # Satellite information
233 N=1354 # Number of columns (maxim number of sample)
234 h=705. # Altitude of the satellite in km
235 p=1. # Nadir pixel resolution in km
236 # Reading MODIS files
237 hdfg=SD(files.geo,SDC.READ)
238 hdff=SD(files.fire,SDC.READ)
239 # Creating all the objects
240 lat_obj=hdfg.select('Latitude')
241 lon_obj=hdfg.select('Longitude')
242 fire_obj=hdff.select('fire mask')
243 lat_fire_obj=hdff.select('FP_latitude')
244 lon_fire_obj=hdff.select('FP_longitude')
245 brig_fire_obj=hdff.select('FP_T21')
246 sample_fire_obj=hdff.select('FP_sample')
247 conf_fire_obj=hdff.select('FP_confidence')
248 t31_fire_obj=hdff.select('FP_T31')
249 frp_fire_obj=hdff.select('FP_power')
250 # Geolocation and mask information
251 ret.lat=lat_obj.get()
252 ret.lon=lon_obj.get()
253 ret.fire=fire_obj.get()
254 # Fire detected information
255 try:
256 flats=lat_fire_obj.get()
257 except:
258 flats=np.array([])
259 try:
260 flons=lon_fire_obj.get()
261 except:
262 flons=np.array([])
263 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]), flats <= bounds[3])
264 ret.lat_fire=flats[fll]
265 ret.lon_fire=flons[fll]
266 try:
267 ret.brig_fire=brig_fire_obj.get()[fll]
268 except:
269 ret.brig_fire=np.array([])
270 ret.sat_fire=hdff.Satellite
271 try:
272 ret.conf_fire=conf_fire_obj.get()[fll]
273 except:
274 ret.conf_fire=np.array([])
275 try:
276 ret.t31_fire=t31_fire_obj.get()[fll]
277 except:
278 ret.t31_fire=np.array([])
279 try:
280 ret.frp_fire=frp_fire_obj.get()[fll]
281 except:
282 ret.frp_fire=np.array([])
283 try:
284 sf=sample_fire_obj.get()[fll]
285 except:
286 sf=np.array([])
287 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
288 # No fire data
289 lats=np.ravel(ret.lat)
290 lons=np.ravel(ret.lon)
291 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
292 lats=lats[ll]
293 lons=lons[ll]
294 fire=np.ravel(ret.fire)
295 fire=fire[ll]
296 nf=np.logical_or(fire == 3, fire == 5)
297 ret.lat_nofire=lats[nf]
298 ret.lon_nofire=lons[nf]
299 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
300 sample=np.ravel(sample)
301 sample=sample[ll]
302 sfn=sample[nf]
303 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p)
304 # Close files
305 hdfg.end()
306 hdff.end()
307 return ret
309 def read_viirs_files(files,bounds):
311 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
313 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
314 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
315 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
317 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
318 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
320 ret=Dict([])
321 # Satellite information
322 N=3200 # Number of columns (maxim number of sample)
323 h=828. # Altitude of the satellite in km
324 alpha=np.array([0,31.59,44.68,56.06])/180*np.pi
325 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
326 p=np.array([0.75,0.75/2,0.75/3])
327 # Reading VIIRS files
328 h5g=h5py.File(files.geo,'r')
329 ncf=nc.Dataset(files.fire,'r')
330 # Geolocation and mask information
331 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
332 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
333 ret.fire=np.array(ncf.variables['fire mask'][:])
334 # Fire detected information
335 flats=np.array(ncf.variables['FP_latitude'][:])
336 flons=np.array(ncf.variables['FP_longitude'][:])
337 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]),flats <= bounds[3])
338 ret.lat_fire=flats[fll]
339 ret.lon_fire=flons[fll]
340 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])[fll]
341 ret.sat_fire=ncf.SatelliteInstrument
342 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])[fll]
343 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])[fll]
344 ret.frp_fire=np.array(ncf.variables['FP_power'][:])[fll]
345 sf=np.array(ncf.variables['FP_sample'][:])[fll]
346 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p,alpha)
347 # No fire data
348 lats=np.ravel(ret.lat)
349 lons=np.ravel(ret.lon)
350 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
351 lats=lats[ll]
352 lons=lons[ll]
353 fire=np.ravel(ret.fire)
354 fire=fire[ll]
355 nf=np.logical_or(fire == 3, fire == 5)
356 ret.lat_nofire=lats[nf]
357 ret.lon_nofire=lons[nf]
358 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
359 sample=np.ravel(sample)
360 sample=sample[ll]
361 sfn=sample[nf]
362 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p,alpha)
363 # Reflectance data for burned scar algorithm
364 if 'ref' in files.keys():
365 # Read reflectance data
366 hdf=SD(files.ref,SDC.READ)
367 # Bands data
368 M7=hdf.select('750m Surface Reflectance Band M7') # 0.86 nm
369 M8=hdf.select('750m Surface Reflectance Band M8') # 1.24 nm
370 M10=hdf.select('750m Surface Reflectance Band M10') # 1.61 nm
371 M11=hdf.select('750m Surface Reflectance Band M11') # 2.25 nm
372 bands=Dict({})
373 bands.M7=M7.get()*1e-4
374 bands.M8=M8.get()*1e-4
375 bands.M10=M10.get()*1e-4
376 bands.M11=M11.get()*1e-4
377 # Burned scar mask using the burned scar granule algorithm
378 ret.burned=burned_algorithm(bands)
379 # Close reflectance file
380 hdf.end()
381 # Close files
382 h5g.close()
383 ncf.close()
384 return ret
386 def read_viirs375_files(path,bounds):
388 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
390 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
391 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
393 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
394 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
396 # Opening files if they exist
397 f1=glob.glob(path+'/fire_archive_*.csv')
398 f2=glob.glob(path+'/fire_nrt_*.csv')
399 if len(f1)==1:
400 df1=pd.read_csv(f1[0])
401 if len(f2)==1:
402 df2=pd.read_csv(f2[0])
403 dfs=pd.concat([df1,df2],sort=True,ignore_index=True)
404 else:
405 dfs=df1
406 else:
407 if len(f2)==1:
408 dfs=pd.read_csv(f2[0])
409 else:
410 return {}
412 ret=Dict({})
413 # In the case something exists, read all the information from the CSV files
414 dfs=dfs[(dfs['longitude']>bounds[0]) & (dfs['longitude']<bounds[1]) & (dfs['latitude']>bounds[2]) & (dfs['latitude']<bounds[3])]
415 date=np.array(dfs['acq_date'])
416 time=np.array(dfs['acq_time'])
417 dfs['time']=np.array(['%s_%04d' % (date[k],time[k]) for k in range(len(date))])
418 dfs['time']=pd.to_datetime(dfs['time'], format='%Y-%m-%d_%H%M')
419 dfs['datetime']=dfs['time']
420 dfs=dfs.set_index('time')
421 for group_name, df in dfs.groupby(pd.TimeGrouper("D")):
422 items=Dict([])
423 items.lat=np.array(df['latitude'])
424 items.lon=np.array(df['longitude'])
425 conf=np.array(df['confidence'])
426 firemask=np.zeros(conf.shape)
427 conf_fire=np.zeros(conf.shape)
428 firemask[conf=='l']=7
429 conf_fire[conf=='l']=30.
430 firemask[conf=='n']=8
431 conf_fire[conf=='n']=60.
432 firemask[conf=='h']=9
433 conf_fire[conf=='h']=90.
434 items.fire=firemask.astype(int)
435 items.lat_fire=items.lat
436 items.lon_fire=items.lon
437 items.brig_fire=np.array(df['bright_ti4'])
438 items.sat_fire='Suomi NPP'
439 items.conf_fire=conf_fire
440 items.t31_fire=np.array(df['bright_ti5'])
441 items.frp_fire=np.array(df['frp'])
442 items.scan_fire=np.array(df['scan'])
443 items.track_fire=np.array(df['track'])
444 items.scan_angle_fire=np.ones(items.scan_fire.shape)*np.nan
445 items.lat_nofire=np.array([])
446 items.lon_nofire=np.array([])
447 items.scan_angle_nofire=np.array([])
448 items.scan_nofire=np.array([])
449 items.track_nofire=np.array([])
450 items.instrument=df['instrument'][0]
451 dt=df['datetime'][0]
452 items.time_start_geo_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
453 items.time_num=time_iso2num(items.time_start_geo_iso)
454 items.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
455 items.acq_time='%02d%02d' % (dt.hour,dt.minute)
456 items.time_start_fire_iso=items.time_start_geo_iso
457 items.time_end_geo_iso=items.time_start_geo_iso
458 items.time_end_fire_iso=items.time_start_geo_iso
459 items.file_geo=f1+f2
460 items.file_fire=items.file_geo
461 tt=df['datetime'][0].timetuple()
462 id='VNPH_A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
463 items.prefix='VNPH'
464 items.name='A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
465 ret.update({id: items})
466 return ret
468 def read_goes_files(files):
470 Read the files for GOES products - geolocation and fire data already included (OS)
472 remove :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
473 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
474 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
476 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
477 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
479 h5g=h5py.File(files.geo,'r')
480 ret=Dict([])
481 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
482 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
483 ncf=nc.Dataset(files.fire,'r')
484 ret.fire=np.array(ncf.variables['fire mask'][:])
485 ret.lat_fire=np.array(ncf.variables['FP_latitude'][:])
486 ret.lon_fire=np.array(ncf.variables['FP_longitude'][:])
487 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])
488 sf=np.array(ncf.variables['FP_sample'][:])
489 # Satellite information
490 N=2500 # Number of columns (maxim number of sample)
491 h=35786. # Altitude of the satellite in km
492 p=2. # Nadir pixel resolution in km
493 ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
494 ret.sat_fire=ncf.SatelliteInstrument
495 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])
496 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])
497 ret.frp_fire=np.array(ncf.variables['FP_power'][:])
498 return ret
500 def read_data(files,file_metadata,bounds):
502 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
504 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
505 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
506 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
507 Where:
508 YYYYDDD = year and Julian day (001-366) of data acquisition
509 HHMM = hour and minute of data acquisition (approximate beginning time)
510 vvv = version number
511 yyyyddd = year and Julian day of data processing
512 hhmmss = hour, minute, and second of data processing
514 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
515 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
516 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
517 Where:
518 YYYYDDD = year and Julian day (001-366) of data acquisition
519 HHMM = hour and minute of data acquisition (approximate beginning time)
520 vvv = version number
521 yyyyddd = year and Julian day of data processing
522 hhmmss = hour, minute, and second of data processing
524 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
525 :param file_metadata: dictionary with file names as key and granules metadata as values
526 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
527 :return data: dictionary with Latitude, Longitude and fire mask arrays read
529 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
530 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
532 print "read_data files=%s" % files
533 data=Dict([])
534 if files=='VIIRS375':
535 data.update(read_viirs375_files('.',bounds))
536 else:
537 for f in files:
538 print "read_data f=%s" % f
539 if 'geo' in f.keys():
540 f0=os.path.basename(f.geo)
541 else:
542 print 'ERROR: read_data cannot read files=%s, not geo file' % f
543 continue
544 if 'fire' in f.keys():
545 f1=os.path.basename(f.fire)
546 else:
547 print 'ERROR: read_data cannot read files=%s, not fire file' % f
548 continue
549 boo=True
550 if 'ref' in f.keys():
551 f2=os.path.basename(f.ref)
552 boo=False
553 prefix = f0[:3]
554 print 'prefix %s' % prefix
555 if prefix != f1[:3]:
556 print 'ERROR: the prefix of %s %s must coincide' % (f0,f1)
557 continue
558 m=f.geo.split('/')
559 mm=m[-1].split('.')
560 key=mm[1]+'_'+mm[2]
561 id = prefix + '_' + key
562 print "id " + id
563 if prefix=="MOD" or prefix=="MYD":
564 try:
565 item=read_modis_files(f,bounds)
566 item.instrument="MODIS"
567 except Exception as e:
568 print 'WARNING: reading the files from MODIS failed with error %s' % e
569 continue
570 elif prefix=="VNP":
571 try:
572 item=read_viirs_files(f,bounds)
573 item.instrument="VIIRS"
574 except Exception as e:
575 print 'WARNING: reading the files from VIIRS failed with error %s' % e
576 continue
577 elif prefix=="OR":
578 try:
579 item=read_goes_files(f)
580 item.instrument="GOES"
581 except Exception as e:
582 print 'WARNING: reading the files from GOES failed with error %s' % e
583 continue
584 else:
585 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0,f1)
586 continue
587 if not boo:
588 if f2 in file_metadata.keys():
589 boo=True
590 if (f0 in file_metadata.keys()) and (f1 in file_metadata.keys()) and boo:
591 # connect the file back to metadata
592 item.time_start_geo_iso=file_metadata[f0]["time_start"]
593 item.time_num=time_iso2num(item.time_start_geo_iso)
594 dt=datetime.datetime.strptime(item.time_start_geo_iso[0:18],'%Y-%m-%dT%H:%M:%S')
595 item.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
596 item.acq_time='%02d%02d' % (dt.hour,dt.minute)
597 item.time_start_fire_iso=file_metadata[f1]["time_start"]
598 item.time_end_geo_iso=file_metadata[f0]["time_end"]
599 item.time_end_fire_iso=file_metadata[f1]["time_end"]
600 item.file_geo=f0
601 item.file_fire=f1
602 if 'ref' in f.keys():
603 item.file_ref=f2
604 item.time_start_ref_iso=file_metadata[f2]["time_start"]
605 item.time_end_ref_iso=file_metadata[f2]["time_end"]
606 item.prefix=prefix
607 item.name=key
608 data.update({id:item})
609 else:
610 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0, f1)
611 continue
613 return data
616 def download(granules):
618 Download files as listed in the granules metadata
620 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
621 :return file_metadata: dictionary with file names as key and granules metadata as values
623 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
624 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
626 file_metadata = {}
627 for granule in granules:
628 #print json.dumps(granule,indent=4, separators=(',', ': '))
629 url = granule['links'][0]['href']
630 filename=os.path.basename(urlparse.urlsplit(url).path)
631 file_metadata[filename]=granule
633 # to store as object in memory (maybe not completely downloaded until accessed?)
634 # with requests.Session() as s:
635 # data.append(s.get(url))
637 # download - a minimal code without various error checking and corrective actions
638 # see wrfxpy/src/ingest/downloader.py
639 if os.path.isfile(filename):
640 print 'file %s already downloaded' % filename
641 continue
642 try:
643 chunk_size = 1024*1024
644 s = 0
645 print 'downloading %s as %s' % (url,filename)
646 r = requests.get(url, stream=True)
647 if r.status_code == 200:
648 content_size = int(r.headers['Content-Length'])
649 print 'downloading %s as %s size %sB' % (url, filename, content_size)
650 with open(filename, 'wb') as f:
651 for chunk in r.iter_content(chunk_size):
652 f.write(chunk)
653 s = s + len(chunk)
654 print('downloaded %sB of %sB' % (s, content_size))
655 else:
656 print 'cannot connect to %s' % url
657 print 'web request status code %s' % r.status_code
658 print 'Make sure you have file ~/.netrc permission 600 with the content'
659 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
660 sys.exit(1)
661 except Exception as e:
662 print 'download failed with error %s' % e
663 return file_metadata
665 def download_GOES16(time):
667 Download the GOES16 data through rclone application
669 :param time: tupple with the start and end times
670 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
672 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
673 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
675 bucket=Dict({})
676 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
677 delta=dts[1]-dts[0]
678 nh=int(delta.total_seconds()/3600)
679 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
680 for date in dates:
681 tt=date.timetuple()
682 argT='%d/%03d/%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour)
683 try:
684 args=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'.','-L']
685 print 'running: '+' '.join(args)
686 res=call(args)
687 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT+' downloaded.'
688 args=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'-L']
689 out=check_output(args)
690 bucket.update({argT : [o.split(' ')[2] for o in out.split('\n')[:-1]]})
691 except Exception as e:
692 print 'download failed with error %s' % e
693 return bucket
695 def retrieve_af_data(bbox,time,burned=False):
697 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
699 :param bbox: polygon with the search bounding box
700 :param time: time interval (init_time,final_time)
701 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
703 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
704 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
707 # Define settings
708 lonmin,lonmax,latmin,latmax = bbox
709 bounds=bbox
710 bbox = [(lonmin,latmax),(lonmin,latmin),(lonmax,latmin),(lonmax,latmax),(lonmin,latmax)]
711 maxg = 1000
713 print "bbox"
714 print bbox
715 print "time:"
716 print time
717 print "maxg:"
718 print maxg
720 # Get data
721 granules=get_meta(bbox,time,maxg,burned=burned)
722 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
724 # Eliminating the NRT data (repeated always)
725 nrt_elimination(granules)
727 file_metadata = {}
728 for k,g in granules.items():
729 print 'Downloading %s files' % k
730 sys.stdout.flush()
731 file_metadata.update(download(g))
732 #print "download g:"
733 #print g
735 print "download complete"
737 # Group all files downloaded
738 files=group_all(".")
739 #print "group all files:"
740 #print files
742 # Generate data dictionary
743 data=Dict({})
744 data.update(read_data(files.MOD,file_metadata,bounds))
745 data.update(read_data(files.MYD,file_metadata,bounds))
746 data.update(read_data(files.VNP,file_metadata,bounds))
747 #data.update(read_data('VIIRS375','',bounds))
749 return data
751 def nrt_elimination(granules):
753 Cleaning all the NRT data which is repeated
755 :param granules: Dictionary of granules products to clean up
756 :return: It will update the granules dictionary
758 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
759 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
762 if 'MOD14' in granules:
763 nlist=[g for g in granules['MOD14'] if g['data_center']=='LPDAAC_ECS']
764 granules['MOD14']=nlist
765 if 'MYD14' in granules:
766 nlist=[g for g in granules['MYD14'] if g['data_center']=='LPDAAC_ECS']
767 granules['MYD14']=nlist
770 def read_fire_mesh(filename):
772 Read necessary variables in the fire mesh of the wrfout file filename
774 :param filename: wrfout file
775 :return fxlon: lon coordinates in the fire mesh
776 :return fxlat: lat coordinates in the fire mesh
777 :return bbox: bounding box
778 :return time_esmf: simulation times in ESMF format
780 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
781 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
783 print 'opening ' + filename
784 d = nc.Dataset(filename)
785 m,n = d.variables['XLONG'][0,:,:].shape
786 fm,fn = d.variables['FXLONG'][0,:,:].shape
787 fm=fm-fm/(m+1) # dimensions corrected for extra strip
788 fn=fn-fn/(n+1)
789 fxlon = d.variables['FXLONG'][0,:fm,:fn] # masking extra strip
790 fxlat = d.variables['FXLAT'][0,:fm,:fn]
791 tign_g = d.variables['TIGN_G'][0,:fm,:fn]
792 time_esmf = ''.join(d.variables['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
793 d.close()
794 bbox = [fxlon.min(),fxlon.max(),fxlat.min(),fxlat.max()]
795 print 'min max longitude latitude %s' % bbox
796 print 'time (ESMF) %s' % time_esmf
798 plot = False
799 if plot:
800 plot_3D(fxlon,fxlat,tign_g)
802 return fxlon,fxlat,bbox,time_esmf
804 def data2json(data,keys,dkeys,N):
806 Create a json dictionary from data dictionary
808 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
809 :param keys: keys which are going to be included into the json
810 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
811 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
812 :return ret: dictionary with all the fire detection information to create the KML file
814 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
815 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
817 ret=Dict({})
818 for i,k in enumerate(keys):
819 if isinstance(data[list(data)[0]][dkeys[i]],(list, tuple, np.ndarray)):
820 dd=[np.ravel(data[d][dkeys[i]]) for d in list(data)]
821 ret.update({k : np.concatenate(dd)})
822 else:
823 dd=[[data[d[1]][dkeys[i]]]*N[d[0]] for d in enumerate(list(data))]
824 ret.update({k : np.concatenate(dd)})
826 return ret
828 def sdata2json(sdata,keys,dkeys,N):
830 Create a json dictionary from sorted array of data dictionaries
832 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
833 :param keys: keys which are going to be included into the json
834 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
835 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
836 :return ret: dictionary with all the fire detection information to create the KML file
838 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
839 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
841 ret=Dict({'granules': [d[0] for d in sdata]})
842 for i,k in enumerate(keys):
843 dd = [d[1][dkeys[i]] if dkeys[i] in d[1] else None for d in sdata]
844 if dd:
845 if np.any([isinstance(d,(list, tuple, np.ndarray)) for d in dd]):
846 out = [d if d is not None else np.array([]) for d in dd]
847 ret.update({k : dd})
848 else:
849 out = [[d[1][1][dkeys[i]]]*N[d[0]] if dkeys[i] in d[1][1] else [] for d in enumerate(sdata)]
850 ret.update({k : out})
852 return ret
855 def write_csv(d,bounds):
857 Write fire detections from data dictionary d to a CSV file
859 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
860 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
861 :return: fire_detections.csv file with all the detections
863 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
864 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
866 df=pd.DataFrame(data=d)
867 df=df[(df['longitude']>bounds[0]) & (df['longitude']<bounds[1]) & (df['latitude']>bounds[2]) & (df['latitude']<bounds[3])]
868 df.to_csv('fire_detections.csv', encoding='utf-8', index=False)
870 def plot_3D(xx,yy,zz):
872 Plot surface of (xx,yy,zz) data
874 :param xx: x arrays
875 :param yy: y arrays
876 :param zz: values at the (x,y) points
877 :return: a plot show of the 3D data
879 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
880 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
882 from mpl_toolkits.mplot3d import Axes3D
883 import matplotlib.pyplot as plt
884 from matplotlib import cm
885 fig = plt.figure()
886 ax = fig.gca(projection='3d')
887 surf = ax.plot_surface(xx,yy,zz,cmap=cm.coolwarm)
888 plt.show()
890 def time_iso2num(time_iso):
892 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
894 :param time_iso: string iso date
895 :return s: integer number of seconds since December 31 1969 at 17:00:00
897 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
898 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
900 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
901 # seconds since December 31 1969 at 17:00:00
902 s=time.mktime(time_datetime.timetuple())
903 return s
905 def time_iso2datetime(time_iso):
907 Transform an iso time string to a datetime element
909 :param time_iso: string iso date
910 :return time_datetime: datetime element
912 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
913 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
915 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
916 return time_datetime
918 def time_datetime2iso(time_datetime):
920 Transform a datetime element to iso time string
922 :param time_datetime: datetime element
923 :return time_iso: string iso date
925 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
926 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
928 time_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime.year,time_datetime.month,
929 time_datetime.day,time_datetime.hour,
930 time_datetime.minute,time_datetime.second)
931 return time_iso
933 def time_num2iso(time_num):
935 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
937 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
938 :return date: time string in ISO date
940 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
941 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
943 dt=datetime.datetime.fromtimestamp(time_num)
944 # seconds since December 31 1969 at 17:00:00
945 date='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
946 return date
948 def pixel_dim(sample,N,h,p,a=None):
950 Computes pixel dimensions (along-scan and track pixel sizes)
952 :param sample: array of integers with the column number (sample variable in files)
953 :param N: scalar, total number of pixels in each row of the image swath
954 :param h: scalar, altitude of the satellite in km
955 :param p: scalar, pixel nadir resolution in km
956 :param a: array of floats of the size of p with the angles where the resolution change
957 :return theta: scan angle in radiands
958 :return scan: along-scan pixel size in km
959 :return track: along-track pixel size in km
961 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
962 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
964 Re=6378 # approximation of the radius of the Earth in km
965 r=Re+h
966 M=(N-1)*0.5
967 s=np.arctan(p/h) # trigonometry (deg/sample)
968 if isinstance(p,(list, tuple, np.ndarray)):
969 Ns=np.array([int((a[k]-a[k-1])/s[k-1]) for k in range(1,len(a)-1)])
970 Ns=np.append(Ns,int(M-Ns.sum()))
971 theta=s[0]*(sample-M)
972 scan=Re*s[0]*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
973 track=r*s[0]*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
974 for k in range(1,len(Ns)):
975 p=sample<=M-Ns[0:k].sum()
976 theta[p]=s[k]*(sample[p]-(M-Ns[0:k].sum()))-(s[0:k]*Ns[0:k]).sum()
977 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
978 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
979 p=sample>=M+Ns[0:k].sum()
980 theta[p]=s[k]*(sample[p]-(M+Ns[0:k].sum()))+(s[0:k]*Ns[0:k]).sum()
981 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
982 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
983 else:
984 theta=s*(sample-M)
985 scan=Re*s*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
986 track=r*s*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
987 return (theta,scan,track)
989 def copyto(partial_path,kml):
991 Copy information in partial_path to kml
993 :param partial_path: path to a partial KML file
994 :param kml: KML object where to write to
995 :return: information from partial_path into kml
997 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
998 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1000 with open(partial_path,'r') as part:
1001 for line in part:
1002 kml.write(line)
1004 def json2kml(d,kml_path,bounds,prods,opt='granule'):
1006 Creates a KML file kml_path from a dictionary d
1008 :param d: dictionary with all the fire detection information to create the KML file
1009 :param kml_path: path in where the KML file is going to be written
1010 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
1011 :return: a KML file
1013 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1014 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1017 if 'latitude' in d:
1018 if not isinstance(d['latitude'][0],(list, tuple, np.ndarray)):
1019 if opt=='dates':
1020 ind=[i[0] for i in sorted(enumerate(d.acq_date), key=lambda x:x[1])]
1021 L=[len(list(grp)) for k, grp in groupby(d['acq_date'][ind])]
1022 L.insert(0,0)
1023 ll=[sum(L[0:k+1]) for k in range(len(L))]
1024 for v in list(d):
1025 sort=[d[v][i] for i in ind]
1026 d[v]=[sort[ll[k]:ll[k+1]] for k in range(len(ll)-1)]
1027 else:
1028 for v in list(d):
1029 d[v]=[d[v]]
1030 opt='pixels'
1032 frp_style={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1033 for i in range(0,40):
1034 frp_style[i]='modis_frp_%s_to_%s' % (i*10, i*10+10)
1036 with open(kml_path,'w') as kml:
1038 copyto('kmls/partial1.kml',kml)
1040 # set some constants
1041 r = 6378 # Earth radius
1042 km_lat = 180/(np.pi*r) # 1km in degrees latitude
1043 ND = len(d['latitude'])
1045 for prod in prods:
1047 kml.write('<Folder>\n')
1048 kml.write('<name>%s</name>\n' % prods[prod])
1050 if prod == 'FRP':
1051 copyto('kmls/partial2.kml',kml)
1053 if prod == 'TF':
1054 col = np.flip(np.divide([(230, 25, 75, 150), (245, 130, 48, 150), (255, 255, 25, 150),
1055 (210, 245, 60, 150), (60, 180, 75, 150), (70, 240, 240, 150),
1056 (0, 0, 128, 150), (145, 30, 180, 150), (240, 50, 230, 150),
1057 (128, 128, 128, 150)],255.),0)
1058 cm = colors.LinearSegmentedColormap.from_list('BuRd',col,ND)
1059 cols = ['%02x%02x%02x%02x' % tuple(255*np.flip(c)) for c in cm(range(cm.N))]
1060 t_range = range(ND-1,-1,-1)
1061 else:
1062 t_range = range(ND)
1064 for t in t_range:
1065 lats=np.array(d['latitude'][t]).astype(float)
1066 lons=np.array(d['longitude'][t]).astype(float)
1067 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
1068 latitude=lats[ll]
1069 longitude=lons[ll]
1070 NN=len(latitude)
1071 if NN > 0:
1072 acq_date=np.array(d['acq_date'][t])[ll]
1073 acq_time=np.array(d['acq_time'][t])[ll]
1074 try:
1075 satellite=np.array(d['satellite'][t])[ll]
1076 except:
1077 satellite=np.array(['Not available']*NN)
1078 try:
1079 instrument=np.array(d['instrument'][t])[ll]
1080 except:
1081 instrument=np.array(['Not available']*NN)
1082 try:
1083 confidence=np.array(d['confidence'][t])[ll].astype(float)
1084 except:
1085 confidence=np.array(np.zeros(NN)).astype(float)
1086 try:
1087 frps=np.array(d['frp'][t])[ll].astype(float)
1088 except:
1089 frps=np.array(np.zeros(NN)).astype(float)
1090 try:
1091 angles=np.array(d['scan_angle'][t])[ll].astype(float)
1092 except:
1093 angles=np.array(['Not available']*NN)
1094 try:
1095 scans=np.array(d['scan'][t])[ll].astype(float)
1096 except:
1097 scans=np.ones(NN).astype(float)
1098 try:
1099 tracks=np.array(d['track'][t])[ll].astype(float)
1100 except:
1101 tracks=np.ones(NN).astype(float)
1103 kml.write('<Folder>\n')
1104 if opt=='date':
1105 kml.write('<name>%s</name>\n' % acq_date[0])
1106 elif opt=='granule':
1107 kml.write('<name>%s</name>\n' % d['granules'][t])
1108 else:
1109 kml.write('<name>Pixels</name>\n')
1111 for p in range(NN):
1112 lat=latitude[p]
1113 lon=longitude[p]
1114 conf=confidence[p]
1115 frp=frps[p]
1116 angle=angles[p]
1117 scan=scans[p]
1118 track=tracks[p]
1119 timestamp=acq_date[p] + 'T' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + 'Z'
1120 timedescr=acq_date[p] + ' ' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + ' UTC'
1122 if prod == 'NF':
1123 kml.write('<Placemark>\n<name>Ground detection square</name>\n')
1124 kml.write('<description>\nlongitude: %s\n' % lon
1125 + 'latitude: %s\n' % lat
1126 + 'time: %s\n' % timedescr
1127 + 'satellite: %s\n' % satellite[p]
1128 + 'instrument: %s\n' % instrument[p]
1129 + 'scan angle: %s\n' % angle
1130 + 'along-scan: %s\n' % scan
1131 + 'along-track: %s\n' % track
1132 + '</description>\n')
1133 else:
1134 kml.write('<Placemark>\n<name>Fire detection square</name>\n')
1135 kml.write('<description>\nlongitude: %s\n' % lon
1136 + 'latitude: %s\n' % lat
1137 + 'time: %s\n' % timedescr
1138 + 'satellite: %s\n' % satellite[p]
1139 + 'instrument: %s\n' % instrument[p]
1140 + 'confidence: %s\n' % conf
1141 + 'FRP: %s\n' % frp
1142 + 'scan angle: %s\n' % angle
1143 + 'along-scan: %s\n' % scan
1144 + 'along-track: %s\n' % track
1145 + '</description>\n')
1146 kml.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp)
1147 if prod == 'AF':
1148 if conf < 30:
1149 kml.write('<styleUrl> modis_conf_low </styleUrl>\n')
1150 elif conf < 80:
1151 kml.write('<styleUrl> modis_conf_med </styleUrl>\n')
1152 else:
1153 kml.write('<styleUrl> modis_conf_high </styleUrl>\n')
1154 elif prod == 'TF':
1155 kml.write('<Style>\n'+'<PolyStyle>\n'
1156 +'<color>%s</color>\n' % cols[t]
1157 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1158 +'</Style>\n')
1159 elif prod == 'FRP':
1160 frpx = min(40,np.ceil(frp/10.)-1)
1161 kml.write('<styleUrl> %s </styleUrl>\n' % frp_style[frpx] )
1162 elif prod == 'NF':
1163 kml.write('<styleUrl> no_fire </styleUrl>\n')
1164 elif prod == 'AFN':
1165 if conf < 30:
1166 kml.write('<Style>\n'+'<PolyStyle>\n'
1167 +'<color>7000ffff</color>\n'
1168 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1169 +'</Style>\n')
1170 elif conf < 80:
1171 kml.write('<Style>\n'+'<PolyStyle>\n'
1172 +'<color>7000a5ff</color>\n'
1173 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1174 +'</Style>\n')
1175 else:
1176 kml.write('<Style>\n'+'<PolyStyle>\n'
1177 +'<color>700000ff</color>\n'
1178 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1179 +'</Style>\n')
1181 kml.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1183 km_lon=km_lat/np.cos(lat*np.pi/180) # 1 km in longitude
1185 sq_track_size_km=track
1186 sq2_lat=km_lat * sq_track_size_km/2
1187 sq_scan_size_km=scan
1188 sq2_lon=km_lon * sq_scan_size_km/2
1190 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1191 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat + sq2_lat))
1192 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat + sq2_lat))
1193 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat - sq2_lat))
1194 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1196 kml.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1197 kml.write('</Folder>\n')
1199 kml.write('</Folder>\n')
1201 kml.write('</Document>\n</kml>\n')
1203 print 'Created file %s' % kml_path
1204 else:
1205 print 'Any detections to be saved as %s' % kml_path
1207 def burned_algorithm(data):
1209 Computes mask of burned scar pixels
1211 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1212 :return C: Mask of burned scar pixels
1214 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1215 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1217 # Parameters
1218 RthSub=0.05
1219 Rth=0.81
1220 M07UB=0.19
1221 M08LB=0.00
1222 M08UB=0.28
1223 M10LB=0.07
1224 M10UB=1.00
1225 M11LB=0.05
1226 eps=1e-30
1227 # Bands
1228 M7=data.M7
1229 M8=data.M8
1230 M10=data.M10
1231 M11=data.M11
1232 # Eq 1a
1233 M=(M8.astype(float)-RthSub)/(M11.astype(float)+eps)
1234 C1=np.logical_and(M>0,M<Rth)
1235 # Eq 1b
1236 C2=np.logical_and(M8>M08LB,M8<M08UB)
1237 # Eq 1c
1238 C3=M7<M07UB
1239 # Eq 1d
1240 C4=M11>M11LB
1241 # Eq 1e
1242 C5=np.logical_and(M10>M10LB,M10<M10UB)
1243 # All the conditions at the same time
1244 C=np.logical_and(np.logical_and(np.logical_and(np.logical_and(C1,C2),C3),C4),C5)
1245 return C
1247 if __name__ == "__main__":
1248 bbox=[-132.86966,-102.0868788,44.002495,66.281204]
1249 time = ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1250 data=retrieve_af_data(bbox,time)
1251 # Save the data dictionary into a matlab structure file out.mat
1252 sio.savemat('out.mat', mdict=data)