new forecast processing for SVM
[JPSSData.git] / JPSSD.py
blobd299bfe8f614210347df1464026b2428337dda20
1 import warnings
2 warnings.filterwarnings("ignore")
3 import numpy as np
4 import json
5 import requests
6 import urlparse
7 import os
8 import sys
9 import re
10 import glob
11 import netCDF4 as nc
12 from cmr import CollectionQuery, GranuleQuery
13 from pyhdf.SD import SD, SDC
14 from utils import *
15 import scipy.io as sio
16 import h5py
17 import datetime
18 import time
19 import pandas as pd
20 from itertools import groupby
21 from subprocess import check_output, call
23 def search_api(sname,bbox,time,maxg=50,platform="",version=""):
24 """
25 API search of the different satellite granules
27 :param sname: short name
28 :param bbox: polygon with the search bounding box
29 :param time: time interval (init_time,final_time)
30 :param maxg: max number of granules to process
31 :param platform: string with the platform
32 :param version: string with the version
33 :return granules: dictionary with the metadata of the search
35 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
36 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
37 """
38 api = GranuleQuery()
39 if not version:
40 if not platform:
41 search = api.parameters(
42 short_name=sname,
43 downloadable=True,
44 polygon=bbox,
45 temporal=time
47 else:
48 search = api.parameters(
49 short_name=sname,
50 platform=platform,
51 downloadable=True,
52 polygon=bbox,
53 temporal=time
55 else:
56 if not platform:
57 search = api.parameters(
58 short_name=sname,
59 downloadable=True,
60 polygon=bbox,
61 temporal=time,
62 version=version
64 else:
65 search = api.parameters(
66 short_name=sname,
67 platform=platform,
68 downloadable=True,
69 polygon=bbox,
70 temporal=time,
71 version=version
73 sh=search.hits()
74 print "%s gets %s hits in this range" % (sname,sh)
75 if sh>maxg:
76 print "The number of hits %s is larger than the limit %s." % (sh,maxg)
77 print "Use a reduced bounding box or a reduced time interval."
78 granules = []
79 else:
80 granules = api.get(sh)
81 return granules
83 def search_archive(url,prod,time,grans):
84 """
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
95 """
96 ids=['.'.join(k['producer_granule_id'].split('.')[1:3]) for k in grans] # satellite ids in bounding box
97 granules=[]
98 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
99 delta=dts[1]-dts[0]
100 nh=int(delta.total_seconds()/3600)
101 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
102 fold=np.unique(['%d/%03d' % (date.timetuple().tm_year,date.timetuple().tm_yday) for date in dates])
103 urls=[url+'/'+prod+'/'+f for f in fold]
104 for u in urls:
105 try:
106 js=requests.get(u+'.json').json()
107 for j in js:
108 arg=np.argwhere(np.array(ids)=='.'.join(j['name'].split('.')[1:3]))
109 if arg.size!=0:
110 ar=arg[0][0]
111 g=Dict(j)
112 g.links=[{'href':u+'/'+g.name}]
113 g.time_start=grans[ar]["time_start"]
114 g.time_end=grans[ar]["time_end"]
115 granules.append(g)
116 except Exception as e:
117 "warning: some JSON request failed"
118 print "%s gets %s hits in this range" % (prod.split('/')[-1],len(granules))
119 return granules
121 def get_meta(bbox,time,maxg=50,burned=False):
123 Get all the meta data from the API for all the necessary products
125 :param bbox: polygon with the search bounding box
126 :param time: time interval (init_time,final_time)
127 :param maxg: max number of granules to process
128 :return granules: dictionary with the metadata of all the products
130 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
131 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
133 granules=Dict([]);
134 #MOD14: MODIS Terra fire data
135 granules.MOD14=search_api("MOD14",bbox,time,maxg,"Terra")
136 #MOD03: MODIS Terra geolocation data
137 granules.MOD03=search_api("MOD03",bbox,time,maxg,"Terra","6")
138 #MOD09: MODIS Atmospherically corrected surface reflectance
139 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra","6")
140 #MYD14: MODIS Aqua fire data
141 granules.MYD14=search_api("MYD14",bbox,time,maxg,"Aqua")
142 #MYD03: MODIS Aqua geolocation data
143 granules.MYD03=search_api("MYD03",bbox,time,maxg,"Aqua","6")
144 #MOD09: MODIS Atmospherically corrected surface reflectance
145 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra","6")
146 #VNP14: VIIRS fire data, res 750m
147 granules.VNP14=search_api("VNP14",bbox,time,maxg)
148 #VNP03MODLL: VIIRS geolocation data, res 750m
149 granules.VNP03=search_api("VNP03MODLL",bbox,time,maxg)
150 #VNP14hi: VIIRS fire data, res 375m
151 #granules.VNP14hi=search_api("VNP14IMGTDL_NRT",bbox,time,maxg)
152 if burned:
153 #VNP09: VIIRS Atmospherically corrected surface reflectance
154 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
155 prod="5000/VNP09" # product
156 granules.VNP09=search_archive(url,prod,time,granules.VNP03)
157 return granules
159 def group_files(path,reg):
161 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
163 :param path: path to the geolocation (03) and fire (14) files
164 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
165 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
167 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
168 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
170 files=[Dict({'geo':k}) for k in glob.glob(path+'/'+reg+'03*')]
171 filesf=glob.glob(path+'/'+reg+'14*')
172 filesr=glob.glob(path+'/'+reg+'09*')
173 if len(filesf)>0:
174 for f in filesf:
175 mf=re.split("/",f)
176 if mf is not None:
177 m=mf[-1].split('.')
178 if m is not None:
179 for k,g in enumerate(files):
180 mmf=re.split("/",g.geo)
181 mm=mmf[-1].split('.')
182 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
183 files[k].fire=f
184 if len(filesr)>0:
185 for f in filesr:
186 mf=re.split("/",f)
187 if mf is not None:
188 m=mf[-1].split('.')
189 if m is not None:
190 for k,g in enumerate(files):
191 mmf=re.split("/",g.geo)
192 mm=mmf[-1].split('.')
193 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
194 files[k].ref=f
195 return files
197 def group_all(path):
199 Combine all the geolocation (03) and fire (14) files in a path
201 :param path: path to the geolocation (03) and fire (14) files
202 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
204 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
205 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
207 files=Dict({})
208 # MOD files
209 modf=group_files(path,'MOD')
210 # MYD files
211 mydf=group_files(path,'MYD')
212 # VIIRS files
213 vif=group_files(path,'VNP')
214 files.MOD=modf
215 files.MYD=mydf
216 files.VNP=vif
217 return files
219 def read_modis_files(files,bounds):
221 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
223 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
224 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
225 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
227 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
228 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
230 ret=Dict([])
231 # Satellite information
232 N=1354 # Number of columns (maxim number of sample)
233 h=705. # Altitude of the satellite in km
234 p=1. # Nadir pixel resolution in km
235 # Reading MODIS files
236 hdfg=SD(files.geo,SDC.READ)
237 hdff=SD(files.fire,SDC.READ)
238 # Creating all the objects
239 lat_obj=hdfg.select('Latitude')
240 lon_obj=hdfg.select('Longitude')
241 fire_obj=hdff.select('fire mask')
242 lat_fire_obj=hdff.select('FP_latitude')
243 lon_fire_obj=hdff.select('FP_longitude')
244 brig_fire_obj=hdff.select('FP_T21')
245 sample_fire_obj=hdff.select('FP_sample')
246 conf_fire_obj=hdff.select('FP_confidence')
247 t31_fire_obj=hdff.select('FP_T31')
248 frp_fire_obj=hdff.select('FP_power')
249 # Geolocation and mask information
250 ret.lat=lat_obj.get()
251 ret.lon=lon_obj.get()
252 ret.fire=fire_obj.get()
253 # Fire detected information
254 try:
255 flats=lat_fire_obj.get()
256 except:
257 flats=np.array([])
258 try:
259 flons=lon_fire_obj.get()
260 except:
261 flons=np.array([])
262 fll=np.logical_and(np.logical_and(np.logical_and(flons>bounds[0],flons<bounds[1]),flats>bounds[2]),flats<bounds[3])
263 ret.lat_fire=flats[fll]
264 ret.lon_fire=flons[fll]
265 try:
266 ret.brig_fire=brig_fire_obj.get()[fll]
267 except:
268 ret.brig_fire=np.array([])
269 ret.sat_fire=hdff.Satellite
270 try:
271 ret.conf_fire=conf_fire_obj.get()[fll]
272 except:
273 ret.conf_fire=np.array([])
274 try:
275 ret.t31_fire=t31_fire_obj.get()[fll]
276 except:
277 ret.t31_fire=np.array([])
278 try:
279 ret.frp_fire=frp_fire_obj.get()[fll]
280 except:
281 ret.frp_fire=np.array([])
282 try:
283 sf=sample_fire_obj.get()[fll]
284 except:
285 sf=np.array([])
286 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
287 # No fire data
288 lats=np.reshape(ret.lat,np.prod(ret.lat.shape))
289 lons=np.reshape(ret.lon,np.prod(ret.lon.shape))
290 ll=np.logical_and(np.logical_and(np.logical_and(lons>bounds[0],lons<bounds[1]),lats>bounds[2]),lats<bounds[3])
291 lats=lats[ll]
292 lons=lons[ll]
293 fire=np.reshape(ret.fire,np.prod(ret.fire.shape))
294 fire=fire[ll]
295 nf=np.logical_or(fire == 3, fire == 5)
296 ret.lat_nofire=lats[nf]
297 ret.lon_nofire=lons[nf]
298 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
299 sample=np.reshape(sample,np.prod(sample.shape))
300 sample=sample[ll]
301 sfn=sample[nf]
302 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p)
303 # Close files
304 hdfg.end()
305 hdff.end()
306 return ret
308 def read_viirs_files(files,bounds):
310 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
312 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
313 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
314 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
316 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
317 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
319 ret=Dict([])
320 # Satellite information
321 N=3200 # Number of columns (maxim number of sample)
322 h=828. # Altitude of the satellite in km
323 alpha=np.array([0,31.59,44.68,56.06])/180*np.pi
324 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
325 p=np.array([0.75,0.75/2,0.75/3])
326 # Reading VIIRS files
327 h5g=h5py.File(files.geo,'r')
328 ncf=nc.Dataset(files.fire,'r')
329 # Geolocation and mask information
330 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
331 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
332 ret.fire=np.array(ncf.variables['fire mask'][:])
333 # Fire detected information
334 flats=np.array(ncf.variables['FP_latitude'][:])
335 flons=np.array(ncf.variables['FP_longitude'][:])
336 fll=np.logical_and(np.logical_and(np.logical_and(flons>bounds[0],flons<bounds[1]),flats>bounds[2]),flats<bounds[3])
337 ret.lat_fire=flats[fll]
338 ret.lon_fire=flons[fll]
339 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])[fll]
340 ret.sat_fire=ncf.SatelliteInstrument
341 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])[fll]
342 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])[fll]
343 ret.frp_fire=np.array(ncf.variables['FP_power'][:])[fll]
344 sf=np.array(ncf.variables['FP_sample'][:])[fll]
345 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p,alpha)
346 # No fire data
347 lats=np.reshape(ret.lat,np.prod(ret.lat.shape))
348 lons=np.reshape(ret.lon,np.prod(ret.lon.shape))
349 ll=np.logical_and(np.logical_and(np.logical_and(lons>bounds[0],lons<bounds[1]),lats>bounds[2]),lats<bounds[3])
350 lats=lats[ll]
351 lons=lons[ll]
352 fire=np.reshape(ret.fire,np.prod(ret.fire.shape))
353 fire=fire[ll]
354 nf=np.logical_or(fire == 3, fire == 5)
355 ret.lat_nofire=lats[nf]
356 ret.lon_nofire=lons[nf]
357 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
358 sample=np.reshape(sample,np.prod(sample.shape))
359 sample=sample[ll]
360 sfn=sample[nf]
361 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p,alpha)
362 # Reflectance data for burned scar algorithm
363 if 'ref' in files.keys():
364 # Read reflectance data
365 hdf=SD(files.ref,SDC.READ)
366 # Bands data
367 M7=hdf.select('750m Surface Reflectance Band M7') # 0.86 nm
368 M8=hdf.select('750m Surface Reflectance Band M8') # 1.24 nm
369 M10=hdf.select('750m Surface Reflectance Band M10') # 1.61 nm
370 M11=hdf.select('750m Surface Reflectance Band M11') # 2.25 nm
371 bands=Dict({})
372 bands.M7=M7.get()*1e-4
373 bands.M8=M8.get()*1e-4
374 bands.M10=M10.get()*1e-4
375 bands.M11=M11.get()*1e-4
376 # Burned scar mask using the burned scar granule algorithm
377 ret.burned=burned_algorithm(bands)
378 # Close reflectance file
379 hdf.end()
380 # Close files
381 h5g.close()
382 ncf.close()
383 return ret
385 def read_viirs375_files(path,bounds):
387 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
389 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
390 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
392 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
393 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
395 # Opening files if they exist
396 f1=glob.glob(path+'/fire_archive_*.csv')
397 f2=glob.glob(path+'/fire_nrt_*.csv')
398 if len(f1)==1:
399 df1=pd.read_csv(f1[0])
400 if len(f2)==1:
401 df2=pd.read_csv(f2[0])
402 dfs=pd.concat([df1,df2],sort=True,ignore_index=True)
403 else:
404 dfs=df1
405 else:
406 if len(f2)==1:
407 dfs=pd.read_csv(f2[0])
408 else:
409 return {}
411 ret=Dict({})
412 # In the case something exists, read all the information from the CSV files
413 dfs=dfs[(dfs['longitude']>bounds[0]) & (dfs['longitude']<bounds[1]) & (dfs['latitude']>bounds[2]) & (dfs['latitude']<bounds[3])]
414 date=np.array(dfs['acq_date'])
415 time=np.array(dfs['acq_time'])
416 dfs['time']=np.array(['%s_%04d' % (date[k],time[k]) for k in range(len(date))])
417 dfs['time']=pd.to_datetime(dfs['time'], format='%Y-%m-%d_%H%M')
418 dfs['datetime']=dfs['time']
419 dfs=dfs.set_index('time')
420 for group_name, df in dfs.groupby(pd.TimeGrouper("D")):
421 items=Dict([])
422 items.lat=np.array(df['latitude'])
423 items.lon=np.array(df['longitude'])
424 conf=np.array(df['confidence'])
425 firemask=np.zeros(conf.shape)
426 conf_fire=np.zeros(conf.shape)
427 firemask[conf=='l']=7
428 conf_fire[conf=='l']=30.
429 firemask[conf=='n']=8
430 conf_fire[conf=='n']=60.
431 firemask[conf=='h']=9
432 conf_fire[conf=='h']=90.
433 items.fire=firemask.astype(int)
434 items.lat_fire=items.lat
435 items.lon_fire=items.lon
436 items.brig_fire=np.array(df['bright_ti4'])
437 items.sat_fire='Suomi NPP'
438 items.conf_fire=conf_fire
439 items.t31_fire=np.array(df['bright_ti5'])
440 items.frp_fire=np.array(df['frp'])
441 items.scan_fire=np.array(df['scan'])
442 items.track_fire=np.array(df['track'])
443 items.scan_angle_fire=np.ones(items.scan_fire.shape)*np.nan
444 items.lat_nofire=np.array([])
445 items.lon_nofire=np.array([])
446 items.scan_angle_nofire=np.array([])
447 items.scan_nofire=np.array([])
448 items.track_nofire=np.array([])
449 items.instrument=df['instrument'][0]
450 dt=df['datetime'][0]
451 items.time_start_geo_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
452 items.time_num=time_iso2num(items.time_start_geo_iso)
453 items.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
454 items.acq_time='%02d%02d' % (dt.hour,dt.minute)
455 items.time_start_fire_iso=items.time_start_geo_iso
456 items.time_end_geo_iso=items.time_start_geo_iso
457 items.time_end_fire_iso=items.time_start_geo_iso
458 items.file_geo=f1+f2
459 items.file_fire=items.file_geo
460 tt=df['datetime'][0].timetuple()
461 id='VNPH_A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
462 items.prefix='VNPH'
463 items.name='A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
464 ret.update({id: items})
465 return ret
467 def read_goes_files(files):
469 Read the files for GOES products - geolocation and fire data already included (OS)
471 remove :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
472 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
473 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
475 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
476 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
478 h5g=h5py.File(files.geo,'r')
479 ret=Dict([])
480 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
481 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
482 ncf=nc.Dataset(files.fire,'r')
483 ret.fire=np.array(ncf.variables['fire mask'][:])
484 ret.lat_fire=np.array(ncf.variables['FP_latitude'][:])
485 ret.lon_fire=np.array(ncf.variables['FP_longitude'][:])
486 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])
487 sf=np.array(ncf.variables['FP_sample'][:])
488 # Satellite information
489 N=2500 # Number of columns (maxim number of sample)
490 h=35786. # Altitude of the satellite in km
491 p=2. # Nadir pixel resolution in km
492 ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
493 ret.sat_fire=ncf.SatelliteInstrument
494 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])
495 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])
496 ret.frp_fire=np.array(ncf.variables['FP_power'][:])
497 return ret
499 def read_data(files,file_metadata,bounds):
501 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
503 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
504 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
505 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
506 Where:
507 YYYYDDD = year and Julian day (001-366) of data acquisition
508 HHMM = hour and minute of data acquisition (approximate beginning time)
509 vvv = version number
510 yyyyddd = year and Julian day of data processing
511 hhmmss = hour, minute, and second of data processing
513 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
514 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
515 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
516 Where:
517 YYYYDDD = year and Julian day (001-366) of data acquisition
518 HHMM = hour and minute of data acquisition (approximate beginning time)
519 vvv = version number
520 yyyyddd = year and Julian day of data processing
521 hhmmss = hour, minute, and second of data processing
523 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
524 :param file_metadata: dictionary with file names as key and granules metadata as values
525 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
526 :return data: dictionary with Latitude, Longitude and fire mask arrays read
528 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
529 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
531 print "read_data files=%s" % files
532 data=Dict([])
533 if files=='VIIRS375':
534 data.update(read_viirs375_files('.',bounds))
535 else:
536 for f in files:
537 print "read_data f=%s" % f
538 if 'geo' in f.keys():
539 f0=os.path.basename(f.geo)
540 else:
541 print 'ERROR: read_data cannot read files=%s, not geo file' % f
542 sys.exit(1)
543 if 'fire' in f.keys():
544 f1=os.path.basename(f.fire)
545 else:
546 print 'ERROR: read_data cannot read files=%s, not geo file' % f
547 sys.exit(1)
548 boo=True
549 if 'ref' in f.keys():
550 f2=os.path.basename(f.ref)
551 boo=False
552 prefix = f0[:3]
553 print 'prefix %s' % prefix
554 if prefix != f1[:3]:
555 print 'ERROR: the prefix of %s %s must coincide' % (f0,f1)
556 continue
557 m=f.geo.split('/')
558 mm=m[-1].split('.')
559 key=mm[1]+'_'+mm[2]
560 id = prefix + '_' + key
561 print "id " + id
562 if prefix=="MOD" or prefix=="MYD":
563 item=read_modis_files(f,bounds)
564 item.instrument="MODIS"
565 elif prefix=="VNP":
566 item=read_viirs_files(f,bounds)
567 item.instrument="VIIRS"
568 elif prefix=="OR":
569 item=read_goes_files(f)
570 item.instrument="GOES"
571 else:
572 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0,f1)
573 continue
574 if not boo:
575 if f2 in file_metadata.keys():
576 boo=True
577 if (f0 in file_metadata.keys()) and (f1 in file_metadata.keys()) and boo:
578 # connect the file back to metadata
579 item.time_start_geo_iso=file_metadata[f0]["time_start"]
580 item.time_num=time_iso2num(item.time_start_geo_iso)
581 dt=datetime.datetime.strptime(item.time_start_geo_iso[0:18],'%Y-%m-%dT%H:%M:%S')
582 item.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
583 item.acq_time='%02d%02d' % (dt.hour,dt.minute)
584 item.time_start_fire_iso=file_metadata[f1]["time_start"]
585 item.time_end_geo_iso=file_metadata[f0]["time_end"]
586 item.time_end_fire_iso=file_metadata[f1]["time_end"]
587 item.file_geo=f0
588 item.file_fire=f1
589 if 'ref' in f.keys():
590 item.file_ref=f2
591 item.time_start_ref_iso=file_metadata[f2]["time_start"]
592 item.time_end_ref_iso=file_metadata[f2]["time_end"]
593 item.prefix=prefix
594 item.name=key
595 data.update({id:item})
596 else:
597 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0, f1)
598 continue
600 return data
603 def download(granules):
605 Download files as listed in the granules metadata
607 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
608 :return file_metadata: dictionary with file names as key and granules metadata as values
610 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
611 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
613 file_metadata = {}
614 for granule in granules:
615 #print json.dumps(granule,indent=4, separators=(',', ': '))
616 url = granule['links'][0]['href']
617 filename=os.path.basename(urlparse.urlsplit(url).path)
618 file_metadata[filename]=granule
620 # to store as object in memory (maybe not completely downloaded until accessed?)
621 # with requests.Session() as s:
622 # data.append(s.get(url))
624 # download - a minimal code without various error checking and corrective actions
625 # see wrfxpy/src/ingest/downloader.py
626 if os.path.isfile(filename):
627 print 'file %s already downloaded' % filename
628 continue
629 try:
630 chunk_size = 1024*1024
631 s = 0
632 print 'downloading %s as %s' % (url,filename)
633 r = requests.get(url, stream=True)
634 if r.status_code == 200:
635 content_size = int(r.headers['Content-Length'])
636 print 'downloading %s as %s size %sB' % (url, filename, content_size)
637 with open(filename, 'wb') as f:
638 for chunk in r.iter_content(chunk_size):
639 f.write(chunk)
640 s = s + len(chunk)
641 print('downloaded %sB of %sB' % (s, content_size))
642 else:
643 print 'cannot connect to %s' % url
644 print 'web request status code %s' % r.status_code
645 print 'Make sure you have file ~/.netrc permission 600 with the content'
646 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
647 sys.exit(1)
648 except Exception as e:
649 print 'download failed with error %s' % e
650 return file_metadata
652 def download_GOES16(time):
654 Download the GOES16 data through rclone application
656 :param time: tupple with the start and end times
657 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
659 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
660 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
662 bucket=Dict({})
663 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
664 delta=dts[1]-dts[0]
665 nh=int(delta.total_seconds()/3600)
666 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
667 for date in dates:
668 tt=date.timetuple()
669 argT='%d/%03d/%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour)
670 try:
671 args=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'.','-L']
672 print 'running: '+' '.join(args)
673 res=call(args)
674 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT+' downloaded.'
675 args=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'-L']
676 out=check_output(args)
677 bucket.update({argT : [o.split(' ')[2] for o in out.split('\n')[:-1]]})
678 except Exception as e:
679 print 'download failed with error %s' % e
680 return bucket
682 def retrieve_af_data(bbox,time,burned=False):
684 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
686 :param bbox: polygon with the search bounding box
687 :param time: time interval (init_time,final_time)
688 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
690 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
691 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
694 # Define settings
695 lonmin,lonmax,latmin,latmax = bbox
696 bounds=bbox
697 bbox = [(lonmin,latmax),(lonmin,latmin),(lonmax,latmin),(lonmax,latmax),(lonmin,latmax)]
698 maxg = 150
700 print "bbox"
701 print bbox
702 print "time:"
703 print time
704 print "maxg:"
705 print maxg
707 # Get data
708 granules=get_meta(bbox,time,maxg,burned=burned)
709 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
711 # Eliminating the NRT data (repeated always)
712 nrt_elimination(granules)
714 file_metadata = {}
715 for k,g in granules.items():
716 print 'Downloading %s files' % k
717 sys.stdout.flush()
718 file_metadata.update(download(g))
719 #print "download g:"
720 #print g
722 print "download complete"
724 # Group all files downloaded
725 files=group_all(".")
726 #print "group all files:"
727 #print files
729 # Generate data dictionary
730 data=Dict({})
731 data.update(read_data(files.MOD,file_metadata,bounds))
732 data.update(read_data(files.MYD,file_metadata,bounds))
733 data.update(read_data(files.VNP,file_metadata,bounds))
734 #data.update(read_data('VIIRS375','',bounds))
736 return data
738 def nrt_elimination(granules):
740 Cleaning all the NRT data which is repeated
742 :param granules: Dictionary of granules products to clean up
743 :return: It will update the granules dictionary
745 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
746 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
749 nlist=[g for g in granules['MOD14'] if g['data_center']=='LPDAAC_ECS']
750 granules['MOD14']=nlist
751 nlist=[g for g in granules['MYD14'] if g['data_center']=='LPDAAC_ECS']
752 granules['MYD14']=nlist
755 def read_fire_mesh(filename):
757 Read necessary variables in the fire mesh of the wrfout file filename
759 :param filename: wrfout file
760 :return fxlon: lon coordinates in the fire mesh
761 :return fxlat: lat coordinates in the fire mesh
762 :return bbox: bounding box
763 :return time_esmf: simulation times in ESMF format
765 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
766 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
768 print 'opening ' + filename
769 d = nc.Dataset(filename)
770 m,n = d.variables['XLONG'][0,:,:].shape
771 fm,fn = d.variables['FXLONG'][0,:,:].shape
772 fm=fm-fm/(m+1) # dimensions corrected for extra strip
773 fn=fn-fn/(n+1)
774 fxlon = d.variables['FXLONG'][0,:fm,:fn] # masking extra strip
775 fxlat = d.variables['FXLAT'][0,:fm,:fn]
776 tign_g = d.variables['TIGN_G'][0,:fm,:fn]
777 time_esmf = ''.join(d.variables['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
778 d.close()
779 bbox = [fxlon.min(),fxlon.max(),fxlat.min(),fxlat.max()]
780 print 'min max longitude latitude %s' % bbox
781 print 'time (ESMF) %s' % time_esmf
783 plot = False
784 if plot:
785 plot_3D(fxlon,fxlat,tign_g)
787 return fxlon,fxlat,bbox,time_esmf
789 def data2json(data,keys,dkeys,N):
791 Create a json dictionary from data dictionary
793 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
794 :param keys: keys which are going to be included into the json
795 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
796 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
797 :return ret: dictionary with all the fire detection information to create the KML file
799 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
800 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
802 ret=Dict({})
803 for i,k in enumerate(keys):
804 if isinstance(data[list(data)[0]][dkeys[i]],(list, tuple, np.ndarray)):
805 dd=[np.reshape(data[d][dkeys[i]],np.prod(data[d][dkeys[i]].shape)) for d in list(data)]
806 ret.update({k : np.concatenate(dd)})
807 else:
808 dd=[[data[d[1]][dkeys[i]]]*N[d[0]] for d in enumerate(list(data))]
809 ret.update({k : np.concatenate(dd)})
811 return ret
813 def sdata2json(sdata,keys,dkeys,N):
815 Create a json dictionary from sorted array of data dictionaries
817 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
818 :param keys: keys which are going to be included into the json
819 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
820 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
821 :return ret: dictionary with all the fire detection information to create the KML file
823 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
824 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
826 ret=Dict({'granules': [d[0] for d in sdata]})
827 for i,k in enumerate(keys):
828 dd = [d[1][dkeys[i]] if dkeys[i] in d[1] else None for d in sdata]
829 if dd:
830 if np.any([isinstance(d,(list, tuple, np.ndarray)) for d in dd]):
831 out = [d if d is not None else np.array([]) for d in dd]
832 ret.update({k : dd})
833 else:
834 out = [[d[1][1][dkeys[i]]]*N[d[0]] if dkeys[i] in d[1][1] else [] for d in enumerate(sdata)]
835 ret.update({k : out})
837 return ret
840 def write_csv(d,bounds):
842 Write fire detections from data dictionary d to a CSV file
844 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
845 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
846 :return: fire_detections.csv file with all the detections
848 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
849 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
851 df=pd.DataFrame(data=d)
852 df=df[(df['longitude']>bounds[0]) & (df['longitude']<bounds[1]) & (df['latitude']>bounds[2]) & (df['latitude']<bounds[3])]
853 df.to_csv('fire_detections.csv', encoding='utf-8', index=False)
855 def plot_3D(xx,yy,zz):
857 Plot surface of (xx,yy,zz) data
859 :param xx: x arrays
860 :param yy: y arrays
861 :param zz: values at the (x,y) points
862 :return: a plot show of the 3D data
864 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
865 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
867 from mpl_toolkits.mplot3d import Axes3D
868 import matplotlib.pyplot as plt
869 from matplotlib import cm
870 fig = plt.figure()
871 ax = fig.gca(projection='3d')
872 surf = ax.plot_surface(xx,yy,zz,cmap=cm.coolwarm)
873 plt.show()
875 def time_iso2num(time_iso):
877 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
879 :param time_iso: string iso date
880 :return s: integer number of seconds since December 31 1969 at 17:00:00
882 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
883 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
885 time_datetime=datetime.datetime.strptime(time_iso[0:18],'%Y-%m-%dT%H:%M:%S')
886 # seconds since December 31 1969 at 17:00:00
887 s=time.mktime(time_datetime.timetuple())
888 return s
890 def time_iso2datetime(time_iso):
892 Transform an iso time string to a datetime element
894 :param time_iso: string iso date
895 :return time_datetime: datetime element
897 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
898 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
900 time_datetime=datetime.datetime.strptime(time_iso[0:18],'%Y-%m-%dT%H:%M:%S')
901 return time_datetime
903 def time_datetime2iso(time_datetime):
905 Transform a datetime element to iso time string
907 :param time_datetime: datetime element
908 :return time_iso: string iso date
910 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
911 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
913 time_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime.year,time_datetime.month,
914 time_datetime.day,time_datetime.hour,
915 time_datetime.minute,time_datetime.second)
916 return time_iso
918 def time_num2iso(time_num):
920 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
922 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
923 :return date: time string in ISO date
925 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
926 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
928 dt=datetime.datetime.fromtimestamp(time_num)
929 # seconds since December 31 1969 at 17:00:00
930 date='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
931 return date
933 def pixel_dim(sample,N,h,p,a=None):
935 Computes pixel dimensions (along-scan and track pixel sizes)
937 :param sample: array of integers with the column number (sample variable in files)
938 :param N: scalar, total number of pixels in each row of the image swath
939 :param h: scalar, altitude of the satellite in km
940 :param p: scalar, pixel nadir resolution in km
941 :param a: array of floats of the size of p with the angles where the resolution change
942 :return theta: scan angle in radiands
943 :return scan: along-scan pixel size in km
944 :return track: along-track pixel size in km
946 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
947 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
949 Re=6378 # approximation of the radius of the Earth in km
950 r=Re+h
951 M=(N-1)*0.5
952 s=np.arctan(p/h) # trigonometry (deg/sample)
953 if isinstance(p,(list, tuple, np.ndarray)):
954 Ns=np.array([int((a[k]-a[k-1])/s[k-1]) for k in range(1,len(a)-1)])
955 Ns=np.append(Ns,int(M-Ns.sum()))
956 theta=s[0]*(sample-M)
957 scan=Re*s[0]*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
958 track=r*s[0]*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
959 for k in range(1,len(Ns)):
960 p=sample<=M-Ns[0:k].sum()
961 theta[p]=s[k]*(sample[p]-(M-Ns[0:k].sum()))-(s[0:k]*Ns[0:k]).sum()
962 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
963 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
964 p=sample>=M+Ns[0:k].sum()
965 theta[p]=s[k]*(sample[p]-(M+Ns[0:k].sum()))+(s[0:k]*Ns[0:k]).sum()
966 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
967 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
968 else:
969 theta=s*(sample-M)
970 scan=Re*s*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
971 track=r*s*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
972 return (theta,scan,track)
974 def copyto(partial_path,kml):
976 Copy information in partial_path to kml
978 :param partial_path: path to a partial KML file
979 :param kml: KML object where to write to
980 :return: information from partial_path into kml
982 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
983 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
985 with open(partial_path,'r') as part:
986 for line in part:
987 kml.write(line)
989 def json2kml(d,kml_path,bounds,prods,opt='granule'):
991 Creates a KML file kml_path from a dictionary d
993 :param d: dictionary with all the fire detection information to create the KML file
994 :param kml_path: path in where the KML file is going to be written
995 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
996 :return: a KML file
998 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
999 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1002 if 'latitude' in d:
1003 if not isinstance(d['latitude'][0],(list, tuple, np.ndarray)):
1004 if opt=='dates':
1005 ind=[i[0] for i in sorted(enumerate(d.acq_date), key=lambda x:x[1])]
1006 L=[len(list(grp)) for k, grp in groupby(d['acq_date'][ind])]
1007 L.insert(0,0)
1008 ll=[sum(L[0:k+1]) for k in range(len(L))]
1009 for v in list(d):
1010 sort=[d[v][i] for i in ind]
1011 d[v]=[sort[ll[k]:ll[k+1]] for k in range(len(ll)-1)]
1012 else:
1013 for v in list(d):
1014 d[v]=[d[v]]
1015 opt='pixels'
1017 frp_style={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1018 for i in range(0,40):
1019 frp_style[i]='modis_frp_%s_to_%s' % (i*10, i*10+10)
1021 with open(kml_path,'w') as kml:
1023 copyto('kmls/partial1.kml',kml)
1025 r = 6378 # Earth radius
1026 km_lat = 180/(np.pi*r) # 1km in degrees latitude
1028 for prod in prods:
1030 kml.write('<Folder>\n')
1031 kml.write('<name>%s</name>\n' % prods[prod])
1033 if prod == 'FRP':
1034 copyto('kmls/partial2.kml',kml)
1036 for t in range(len(d['latitude'])):
1037 lats=np.array(d['latitude'][t]).astype(float)
1038 lons=np.array(d['longitude'][t]).astype(float)
1039 ll=np.logical_and(np.logical_and(np.logical_and(lons>bounds[0],lons<bounds[1]),lats>bounds[2]),lats<bounds[3])
1040 latitude=lats[ll]
1041 longitude=lons[ll]
1042 NN=len(latitude)
1043 if NN > 0:
1044 acq_date=np.array(d['acq_date'][t])[ll]
1045 acq_time=np.array(d['acq_time'][t])[ll]
1046 try:
1047 satellite=np.array(d['satellite'][t])[ll]
1048 except:
1049 satellite=np.array(['Not available']*NN)
1050 try:
1051 instrument=np.array(d['instrument'][t])[ll]
1052 except:
1053 instrument=np.array(['Not available']*NN)
1054 try:
1055 confidence=np.array(d['confidence'][t])[ll].astype(float)
1056 except:
1057 confidence=np.array(np.zeros(NN)).astype(float)
1058 try:
1059 frps=np.array(d['frp'][t])[ll].astype(float)
1060 except:
1061 frps=np.array(np.zeros(NN)).astype(float)
1062 try:
1063 angles=np.array(d['scan_angle'][t])[ll].astype(float)
1064 except:
1065 angles=np.array(['Not available']*NN)
1066 try:
1067 scans=np.array(d['scan'][t])[ll].astype(float)
1068 except:
1069 scans=np.ones(NN).astype(float)
1070 try:
1071 tracks=np.array(d['track'][t])[ll].astype(float)
1072 except:
1073 tracks=np.ones(NN).astype(float)
1075 kml.write('<Folder>\n')
1076 if opt=='date':
1077 kml.write('<name>%s</name>\n' % acq_date[0])
1078 elif opt=='granule':
1079 kml.write('<name>%s</name>\n' % d['granules'][t])
1080 else:
1081 kml.write('<name>Pixels</name>\n')
1083 for p in range(NN):
1084 lat=latitude[p]
1085 lon=longitude[p]
1086 conf=confidence[p]
1087 frp=frps[p]
1088 angle=angles[p]
1089 scan=scans[p]
1090 track=tracks[p]
1091 timestamp=acq_date[p] + 'T' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + 'Z'
1092 timedescr=acq_date[p] + ' ' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + ' UTC'
1094 if prod == 'NF':
1095 kml.write('<Placemark>\n<name>Ground detection square</name>\n')
1096 kml.write('<description>\nlongitude: %s\n' % lon
1097 + 'latitude: %s\n' % lat
1098 + 'time: %s\n' % timedescr
1099 + 'satellite: %s\n' % satellite[p]
1100 + 'instrument: %s\n' % instrument[p]
1101 + 'scan angle: %s\n' % angle
1102 + 'along-scan: %s\n' % scan
1103 + 'along-track: %s\n' % track
1104 + '</description>\n')
1105 else:
1106 kml.write('<Placemark>\n<name>Fire detection square</name>\n')
1107 kml.write('<description>\nlongitude: %s\n' % lon
1108 + 'latitude: %s\n' % lat
1109 + 'time: %s\n' % timedescr
1110 + 'satellite: %s\n' % satellite[p]
1111 + 'instrument: %s\n' % instrument[p]
1112 + 'confidence: %s\n' % conf
1113 + 'FRP: %s\n' % frp
1114 + 'scan angle: %s\n' % angle
1115 + 'along-scan: %s\n' % scan
1116 + 'along-track: %s\n' % track
1117 + '</description>\n')
1118 kml.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp)
1119 if prod == 'AF':
1120 if conf < 30:
1121 kml.write('<styleUrl> modis_conf_low </styleUrl>\n')
1122 elif conf < 80:
1123 kml.write('<styleUrl> modis_conf_med </styleUrl>\n')
1124 else:
1125 kml.write('<styleUrl> modis_conf_high </styleUrl>\n')
1126 elif prod == 'FRP':
1127 frpx = min(40,np.ceil(frp/10.)-1)
1128 kml.write('<styleUrl> %s </styleUrl>\n' % frp_style[frpx] )
1129 elif prod == 'NF':
1130 kml.write('<styleUrl> no_fire </styleUrl>\n')
1132 kml.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1134 km_lon=km_lat/np.cos(lat*np.pi/180) # 1 km in longitude
1136 sq_track_size_km=track
1137 sq2_lat=km_lat * sq_track_size_km/2
1138 sq_scan_size_km=scan
1139 sq2_lon=km_lon * sq_scan_size_km/2
1141 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1142 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat + sq2_lat))
1143 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat + sq2_lat))
1144 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat - sq2_lat))
1145 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1147 kml.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1148 kml.write('</Folder>\n')
1150 kml.write('</Folder>\n')
1152 kml.write('</Document>\n</kml>\n')
1154 print 'Created file %s' % kml_path
1155 else:
1156 print 'Any detections to be saved as %s' % kml_path
1158 def burned_algorithm(data):
1160 Computes mask of burned scar pixels
1162 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1163 :return C: Mask of burned scar pixels
1165 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1166 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1168 # Parameters
1169 RthSub=0.05
1170 Rth=0.81
1171 M07UB=0.19
1172 M08LB=0.00
1173 M08UB=0.28
1174 M10LB=0.07
1175 M10UB=1.00
1176 M11LB=0.05
1177 eps=1e-30
1178 # Bands
1179 M7=data.M7
1180 M8=data.M8
1181 M10=data.M10
1182 M11=data.M11
1183 # Eq 1a
1184 M=(M8.astype(float)-RthSub)/(M11.astype(float)+eps)
1185 C1=np.logical_and(M>0,M<Rth)
1186 # Eq 1b
1187 C2=np.logical_and(M8>M08LB,M8<M08UB)
1188 # Eq 1c
1189 C3=M7<M07UB
1190 # Eq 1d
1191 C4=M11>M11LB
1192 # Eq 1e
1193 C5=np.logical_and(M10>M10LB,M10<M10UB)
1194 # All the conditions at the same time
1195 C=np.logical_and(np.logical_and(np.logical_and(np.logical_and(C1,C2),C3),C4),C5)
1196 return C
1198 if __name__ == "__main__":
1199 bbox=[-132.86966,-102.0868788,44.002495,66.281204]
1200 time = ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1201 data=retrieve_af_data(bbox,time)
1202 # Save the data dictionary into a matlab structure file out.mat
1203 sio.savemat('out.mat', mdict=data)