4 # Driver python code to estimate fire arrival time using L2 Active Fire Satellite Data
7 # In the existence of a 'data' satellite granules file and/or 'result' preprocessed data file, any input is necessary.
8 # Otherwise: python process.py coord start_time days
9 # coord - path to a simulation wrfout file (containing FXLON and FXLAT coordinates)
10 # OR coordinates bounding box, floats separated by comas: min_lon,max_lon,min_lat,max_lat
11 # start_time - date string with format: YYYYMMDDHHMMSS
12 # days - length of the simulation in decimal days
15 # 1) Data acquisition: Methods from JPSSD.py, infrared_perimeters.py, and forecast.py files.
16 # *) Find granules overlaping fire domain and time interval.
17 # *) Download Active Satellite Data.
18 # *) Read and process Active Satellite Data files.
19 # *) Process ignitions.
20 # *) Read and process infrared perimeters files.
21 # *) Read and process forecast files.
22 # *) Save observed data information in 'data' file.
23 # 2) Visualization of input data: Methods from interpolation.py, JPSSD.py, and plot_pixels.py files.
24 # *) Write KML file 'fire_detections.kml' with fire detection pixels (satellite, ignitions and perimeters).
25 # *) Write KMZ file 'googlearth.kmz' with saved images and KML file of observed data (set plot_observed = True).
26 # 3) Pre-process data for SVM:
27 # a) Method process_detections from setup.py file and method preprocess_result_svm from svm.py file (if cloud = False):
28 # *) Sort all the granules from all the sources in time order.
29 # *) Construct upper and lower bounds using a mask to prevent clear ground after fire.
30 # *) Save results in 'result' and 'result.mat' files.
31 # *) Preprocess bounds as an input of Support Vector Machine method.
32 # b) Method preprocess_data_svm from svm.py file (if cloud = True):
33 # *) Define save ground and fire detections as 3D space-time cloud of points.
34 # *) Remove density of detections to be able to run SVM.
35 # *) Save results in 'result' file.
36 # 4) Fire arrival time estimation using SVM: Method SVM3 from svm.py file.
37 # *) Run Support Vector Machine method.
38 # *) Save results in 'svm' and 'svm.mat' files.
39 # 5) Visualization of the SVM results:
40 # *) Google Earth: Methods from contline.py and contour2kml.py files.
41 # *) Construct a smooth contour line representation of the fire arrival time.
42 # *) Write the contour lines in a KML file called 'perimeters_svm.kml'.
43 # *) Matlab: Script plot_svm.m using svm.mat as an input.
44 # *) 3D scatter plot of save ground and fire detections (green and red respectively).
45 # *) 3D mesh plot of the fire arrival time.
48 # - 'data': binary file containing satellite granules information.
49 # - 'result.mat': matlab file containing upper and lower bounds (U and L).
50 # - 'result': binary file containing upper and lower bouds (U and L) or data points for SVM.
51 # - 'svm.mat': matlab file containing the solution to the Support Vector Machine execution.
52 # Contains estimation of the fire arrival time in the fire mesh in tign_g_interp variable.
53 # - 'svm': binary file containing the solution to the Support Vector Machine execution explained above.
54 # - 'fire_detections.kml': KML file with fire detection pixels (satellite, ignitions and perimeters).
55 # - 'googlearth.kmz': KMZ file with saved images and KML file of observed data.
56 # - 'perimeters_svm.kml': KML file with perimeters from estimation of the fire arrival time using SVM.
58 # Developed in Python 2.7.15 :: Anaconda, Inc.
59 # Angel Farguell (angel.farguell@gmail.com), 2019-04-29
60 #---------------------------------------------------------------------------------------------------------------------
61 from JPSSD
import read_fire_mesh
, retrieve_af_data
, sdata2json
, json2kml
, time_iso2num
62 from interpolation
import sort_dates
63 from setup
import process_detections
64 from infrared_perimeters
import process_ignitions
, process_infrared_perimeters
65 from forecast
import process_forecast_wrfout
, process_forecast_slides_wrfout
66 from svm
import preprocess_result_svm
, preprocess_data_svm
, SVM3
67 from mpl_toolkits
.basemap
import Basemap
68 from plot_pixels
import basemap_scatter_mercator
, create_kml
69 from contline
import get_contour_verts
70 from contour2kml
import contour2kml
72 from utils
import Dict
, load_cfg
73 from scipy
.io
import loadmat
, savemat
74 from scipy
import interpolate
81 print 'configuration: ', cfg
84 satellite_file
= 'data'
85 fire_file
= 'fire_detections.kml'
86 gearth_file
= 'googlearth.kmz'
87 bounds_file
= 'result.mat'
90 contour_file
= 'perimeters_svm.kml'
93 return (os
.path
.exists(path
) and os
.access(path
,os
.R_OK
))
95 satellite_exists
= exist(satellite_file
)
96 fire_exists
= exist(fire_file
)
97 gearth_exists
= exist(gearth_file
)
98 bounds_exists
= exist(bounds_file
)
99 cloud_exists
= exist(cloud_file
)
100 perim_exists
= exist(perim_path
)
101 forecast_exists
= exist(forecast_path
)
103 if len(sys
.argv
) != 4 and (not bounds_exists
) and (not satellite_exists
) and (not cloud_exists
):
104 print 'Error: python %s wrfout start_time days' % sys
.argv
[0]
105 print ' * wrfout - string, wrfout file of WRF-SFIRE simulation'
106 print ' OR coordinates bounding box - floats separated by comas:'
107 print ' min_lon,max_lon,min_lat,max_lat'
108 print ' * start_time - string, YYYYMMDDHHMMSS where:'
115 print ' * days - float, number of days of simulation (can be less than a day)'
116 print 'OR link an existent file %s or %s' % (satellite_file
,bounds_file
)
122 if bounds_exists
and not cloud
:
123 print '>> File %s already created! Skipping all satellite processing <<' % bounds_file
124 print 'Loading from %s...' % bounds_file
125 result
= loadmat(bounds_file
)
126 # Taking necessary variables from result dictionary
127 scale
= result
['time_scale_num'][0]
128 time_num_granules
= result
['time_num_granules'][0]
129 time_num_interval
= result
['time_num'][0]
130 lon
= np
.array(result
['fxlon']).astype(float)
131 lat
= np
.array(result
['fxlat']).astype(float)
132 if 'ofxlon' in result
.keys():
133 fxlon
= result
['ofxlon']
134 fxlat
= result
['ofxlat']
135 elif cloud_exists
and cloud
:
136 print '>> File %s already created! Skipping all satellite processing <<' % cloud_file
137 print 'Loading from %s...' % cloud_file
138 lon
,lat
,X
,y
,c
,time_num_interval
,time_num_granules
,scale
,fxlon
,fxlat
= sl
.load(cloud_file
)
141 print '>> File %s already created! Skipping satellite retrieval <<' % satellite_file
142 print 'Loading from %s...' % satellite_file
143 data
,fxlon
,fxlat
,time_num
= sl
.load(satellite_file
)
144 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
146 argument
= sys
.argv
[1].split(',')
147 if len(argument
) > 1:
148 print '>> Creating the fire mesh <<'
151 bounds
= map(float,argument
)
152 fxlon
,fxlat
= np
.meshgrid(np
.arange(bounds
[0],bounds
[1],dlon
),
153 np
.arange(bounds
[2],bounds
[3],dlat
))
154 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
155 print 'min max longitude latitude %s' % bbox
157 print '>> Reading the fire mesh <<'
159 fxlon
,fxlat
,bbox
,time_esmf
= read_fire_mesh(argument
[0])
162 print '>> Retrieving satellite data <<'
164 # converting times to ISO
165 dti
= dt
.datetime
.strptime(sys
.argv
[2],'%Y%m%d%H%M%S')
166 time_start_iso
= '%d-%02d-%02dT%02d:%02d:%02dZ' % (dti
.year
,dti
.month
,dti
.day
,dti
.hour
,dti
.minute
,dti
.second
)
167 dtf
= dti
+dt
.timedelta(days
=float(sys
.argv
[3]))
168 time_final_iso
= '%d-%02d-%02dT%02d:%02d:%02dZ' % (dtf
.year
,dtf
.month
,dtf
.day
,dtf
.hour
,dtf
.minute
,dtf
.second
)
169 time_iso
= (time_start_iso
,time_final_iso
)
170 data
= retrieve_af_data(bbox
,time_iso
,appkey
=appkey
)
173 print '>> Creating ignitions <<'
175 data
.update(process_ignitions(igns
,bbox
,time
=time_iso
))
178 print '>> Getting perimeters from %s <<' % perim_path
179 data
.update(process_infrared_perimeters(perim_path
,bbox
,time
=time_iso
))
180 if forecast_exists
and not cloud
:
182 print '>> Getting forecast from %s <<' % forecast_path
183 data
.update(process_forecast_slides_wrfout(forecast_path
,bbox
,time
=time_iso
))
187 print '>> Saving satellite data file (data) <<'
189 time_num
= map(time_iso2num
,time_iso
)
190 sl
.save((data
,fxlon
,fxlat
,time_num
),satellite_file
)
191 print 'data file saved correctly!'
194 print 'ERROR: No data obtained...'
198 if (not fire_exists
) or (not gearth_exists
and plot_observed
):
199 print '>> Generating KML of fire and ground detections <<'
201 # sort the granules by dates
202 sdata
=sort_dates(data
)
204 print '>> File %s already created! <<' % fire_file
206 # writting fire detections file
207 print 'writting KML with fire detections'
208 keys
= ['latitude','longitude','brightness','scan','track','acq_date','acq_time','satellite','instrument','confidence','bright_t31','frp','scan_angle']
209 dkeys
= ['lat_fire','lon_fire','brig_fire','scan_fire','track_fire','acq_date','acq_time','sat_fire','instrument','conf_fire','t31_fire','frp_fire','scan_angle_fire']
210 prods
= {'AF':'Active Fires','FRP':'Fire Radiative Power','TF':'Temporal Fire coloring','AFN':'Active Fires New'}
211 # filter out perimeter, ignition, and forecast information (too many pixels)
212 regex
= re
.compile(r
'^((?!(PER_A|IGN_A|FOR_A)).)*$')
213 nsdata
= [d
for d
in sdata
if regex
.match(d
[0])]
214 # compute number of elements for each granule
215 N
= [len(d
[1]['lat_fire']) if 'lat_fire' in d
[1] else 0 for d
in nsdata
]
216 # transform dictionary notation to json notation
217 json
= sdata2json(nsdata
,keys
,dkeys
,N
)
218 # write KML file from json notation
219 json2kml(json
,fire_file
,bbox
,prods
,minconf
=minconf
)
222 print '>> File %s already created! <<' % gearth_file
223 elif not plot_observed
:
224 print '>> Creation of %s skipped (set plot_observed = True) <<' % gearth_file
226 print '>> Generating KMZ with png overlays for Google Earth <<'
227 # creating KMZ overlay of each information
228 # create the Basemap to plot into
229 bmap
= Basemap(projection
='merc',llcrnrlat
=bbox
[2], urcrnrlat
=bbox
[3], llcrnrlon
=bbox
[0], urcrnrlon
=bbox
[1])
232 # for each observed information
233 for idx
, g
in enumerate(sdata
):
235 pngfile
= g
[0]+'.png'
236 # create timestamp for KML
237 timestamp
= g
[1].acq_date
+ 'T' + g
[1].acq_time
[0:2] + ':' + g
[1].acq_time
[2:4] + 'Z'
238 if not exist(pngfile
):
239 # plot a scatter basemap
240 raster_png_data
,corner_coords
= basemap_scatter_mercator(g
[1],bbox
,bmap
,only_fire
)
242 bounds
= (corner_coords
[0][0],corner_coords
[1][0],corner_coords
[0][1],corner_coords
[2][1])
244 with
open(pngfile
, 'w') as f
:
245 f
.write(raster_png_data
)
246 print '> File %s saved.' % g
[0]
248 print '> File %s already created.' % g
[0]
249 # append dictionary information for the KML creation
250 kmld
.append(Dict({'name': g
[0], 'png_file': pngfile
, 'bounds': bbox
, 'time': timestamp
}))
252 create_kml(kmld
,'./doc.kml')
253 # create KMZ with all the PNGs included
254 os
.system('zip -r %s doc.kml *_A*_*.png' % gearth_file
)
255 print 'Created file %s' % gearth_file
256 # eliminate images and KML after creation of KMZ
257 os
.system('rm doc.kml *_A*_*.png')
261 print '>> Processing satellite data <<'
265 coarsening
= np
.int(1+np
.max(fxlon
.shape
)/maxsize
)
266 lon
= fxlon
[::coarsening
,::coarsening
]
267 lat
= fxlat
[::coarsening
,::coarsening
]
268 sdata
= sort_dates(data
)
269 time_num_granules
= [ dd
[1]['time_num'] for dd
in sdata
]
270 time_num_interval
= time_num
271 scale
= [time_num
[0]-0.5*(time_num
[1]-time_num
[0]),time_num
[1]+2*(time_num
[1]-time_num
[0])]
272 X
,y
,c
= preprocess_data_svm(data
,scale
,minconf
=minconf
)
275 print '>> Getting forecast from %s <<' % forecast_path
276 Xf
,yf
,cf
= process_forecast_wrfout(forecast_path
,scale
,time_num_granules
)
277 X
= np
.concatenate((X
,Xf
))
278 y
= np
.concatenate((y
,yf
))
279 c
= np
.concatenate((c
,cf
))
280 sl
.save((lon
,lat
,X
,y
,c
,time_num_interval
,time_num_granules
,scale
,fxlon
,fxlat
),'result')
287 result
= process_detections(data
,fxlon
,fxlat
,time_num
,bbox
,maxsize
)
289 result
= process_detections(data
,fxlon
,fxlat
,time_num
,bbox
)
290 # Taking necessary variables from result dictionary
291 scale
= result
['time_scale_num']
292 time_num_granules
= result
['time_num_granules']
293 time_num_interval
= result
['time_num']
294 lon
= np
.array(result
['fxlon']).astype(float)
295 lat
= np
.array(result
['fxlat']).astype(float)
298 U
= np
.array(result
['U']).astype(float)
299 L
= np
.array(result
['L']).astype(float)
300 T
= np
.array(result
['T']).astype(float)
302 if 'C' in result
.keys():
303 conf
= np
.array(result
['C'])
304 if 'Cg' in result
.keys():
305 conf
= (np
.array(result
['Cg']),conf
)
307 conf
= (10*np
.ones(L
.shape
),conf
)
312 print '>> Preprocessing the data <<'
314 X
,y
,c
= preprocess_result_svm(lon
,lat
,U
,L
,T
,scale
,time_num_granules
,C
=conf
)
317 print '>> Running Support Vector Machine <<'
320 C
= np
.power(c
,3)/100.
321 kgam
= np
.sqrt(len(y
))/12.
324 kgam
= np
.sqrt(len(y
))/2.
327 tscale
= 24*3600 # scale from seconds to days
328 it
= (np
.array(time_num_interval
)-scale
[0])/tscale
# time interval in days
330 F
= SVM3(X
,y
,C
=C
,kgam
=kgam
,fire_grid
=(lon
,lat
),it
=it
,**svm_settings
)
333 print '>> Saving the results <<'
335 # Fire arrival time in seconds from the begining of the simulation
336 tign_g
= np
.array(F
[2])*float(tscale
)+scale
[0]-time_num_interval
[0]
337 # Creating the dictionary with the results
338 svm
= {'dxlon': np
.array(lon
), 'dxlat': np
.array(lat
),
339 'X': np
.array(X
), 'y': np
.array(y
), 'c': np
.array(c
),
340 'fxlon': np
.array(F
[0]), 'fxlat': np
.array(F
[1]),
341 'Z': np
.array(F
[2]), 'tign_g': np
.array(tign_g
),
342 'C': C
, 'kgam': kgam
, 'tscale': tscale
,
343 'time_num_granules': time_num_granules
,
344 'time_scale_num': scale
, 'time_num': time_num_interval
}
346 svm
.update({'U': np
.array(U
)/tscale
, 'L': np
.array(L
)/tscale
})
348 # Interpolation of tign_g
351 print '>> Interpolating the results in the fire mesh'
353 points
= np
.c_
[np
.ravel(F
[0]),np
.ravel(F
[1])]
354 values
= np
.ravel(tign_g
)
355 tign_g_interp
= interpolate
.griddata(points
,values
,(fxlon
,fxlat
))
358 with np
.errstate(invalid
='ignore'):
359 tign_g_interp
[np
.isnan(tign_g_interp
)] = np
.nanmax(tign_g_interp
)
360 print 'elapsed time: %ss.' % str(abs(t_interp_2
-t_interp_1
))
361 svm
.update({'fxlon_interp': np
.array(fxlon
),
362 'fxlat_interp': np
.array(fxlat
),
363 'tign_g_interp': np
.array(tign_g_interp
)})
365 print 'Warning: longitudes and latitudes from the original grid are not defined...'
366 print '%s file is not compatible with fire_interp=True! Run again the experiment from the begining.' % bounds_file
368 # Save resulting file
369 savemat(svm_file
, mdict
=svm
)
370 print 'The results are saved in svm.mat file'
373 print '>> Computing contour lines of the fire arrival time <<'
374 print 'Computing the contours...'
376 # Granules numeric times
377 Z
= np
.array(F
[2])*tscale
+scale
[0]
378 # Creating contour lines
379 contour_data
= get_contour_verts(F
[0], F
[1], Z
, time_num_granules
, contour_dt_hours
=6, contour_dt_init
=6, contour_dt_final
=6)
380 print 'Creating the KML file...'
381 # Creating the KML file
382 contour2kml(contour_data
,contour_file
)
383 print 'The resulting contour lines are saved in perimeters_svm.kml file'
385 print 'Warning: contour creation problem'
386 print 'Run: python contlinesvm.py'
391 print 'Elapsed time for all the process: %ss.' % str(abs(t_final
-t_init
))