4 # Driver python code to estimate fire arrival time using Active Fire Satellite Data
7 # In the existence of a 'data' satellite granules file and/or 'results' bounds file, any input is necessary.
8 # Otherwise: python process.py coord start_time days
9 # coord - path to a simulation wrfout file (containing FXLON and FXLAT coordinates)
10 # OR coordinates bounding box, floats separated by comas: min_lon,max_lon,min_lat,max_lat
11 # start_time - date string with format: YYYYMMDDHHMMSS
12 # days - length of the simulation in decimal days
15 # 1) Data acquisition: Methods from JPSSD.py and infrared_perimeters.py file.
16 # *) Find granules overlaping fire domain and time interval.
17 # *) Download Active Satellite Data.
18 # *) Read and process Active Satellite Data files.
19 # *) Process ignitions.
20 # *) Read and process infrared perimeters files.
21 # *) Save observed data information in 'data' file.
22 # 2) Visualization of input data: Methods from interpolation.py, JPSSD.py, and plot_pixels.py files.
23 # *) Write KML file 'fire_detections.kml' with fire detection pixels (satellite, ignitions and perimeters).
24 # *) Write KMZ file 'googlearth.kmz' with saved images and KML file of observed data (set plot_observed = True).
25 # 3) Pre-process data for SVM:
26 # a) Method process_detections from setup.py file and method preprocess_result_svm from svm.py file (if cloud = False):
27 # *) Sort all the granules from all the sources in time order.
28 # *) Construct upper and lower bounds using a mask to prevent clear ground after fire.
29 # *) Save results in 'result' and 'result.mat' files.
30 # *) Preprocess bounds as an input of Support Vector Machine method.
31 # b) Method preprocess_data_svm from svm.py file (if cloud = True):
32 # *) Define save ground and fire detections as 3D space-time cloud of points.
33 # *) Remove density of detections to be able to run SVM.
34 # *) Save results in 'result' and 'result.mat' files.
35 # 4) Fire arrival time estimation using SVM: Method SVM3 from svm.py file.
36 # *) Run Support Vector Machine method.
37 # *) Save results in 'svm' and 'svm.mat' files.
38 # 5) Visualization of the SVM results: Methods from contline.py and contour2kml.py files.
39 # *) Construct a smooth contour line representation of the fire arrival time.
40 # *) Write the contour lines in a KML file called 'perimeters_svm.kml'.
43 # - 'data': binary file containing satellite granules information.
44 # - 'result.mat': matlab file containing upper and lower bounds (U and L).
45 # - 'result': binary file containing upper and lower bouds (U and L) or data points for SVM.
46 # - 'svm.mat': matlab file containing the solution to the Support Vector Machine execution.
47 # Contains estimation of the fire arrival time in the fire mesh in tign_g_interp variable.
48 # - 'svm': binary file containing the solution to the Support Vector Machine execution explained above.
49 # - 'fire_detections.kml': KML file with fire detection pixels (satellite, ignitions and perimeters).
50 # - 'googlearth.kmz': KMZ file with saved images and KML file of observed data.
51 # - 'perimeters_svm.kml': KML file with perimeters from estimation of the fire arrival time using SVM.
53 # Developed in Python 2.7.15 :: Anaconda, Inc.
54 # Angel Farguell (angel.farguell@gmail.com), 2019-04-29
55 #---------------------------------------------------------------------------------------------------------------------
56 from JPSSD
import read_fire_mesh
, retrieve_af_data
, sdata2json
, json2kml
, time_iso2num
57 from interpolation
import sort_dates
58 from setup
import process_detections
59 from infrared_perimeters
import process_ignitions
, process_infrared_perimeters
60 from forecast
import process_forecast_wrfout
61 from svm
import preprocess_result_svm
, preprocess_data_svm
, SVM3
62 from mpl_toolkits
.basemap
import Basemap
63 from plot_pixels
import basemap_scatter_mercator
, create_kml
64 from contline
import get_contour_verts
65 from contour2kml
import contour2kml
67 from utils
import Dict
68 from scipy
.io
import loadmat
, savemat
69 from scipy
import interpolate
75 # plot observed information (googlearth.kmz with png files)
77 # if plot_observed = True: only fire detections?
79 # dynamic penalization term
81 # if dyn_pen = False: 5-fold cross validation for C and gamma?
83 # interpolate the results into fire mesh (if apply to spinup case)
85 # using cloud of temporal-spatial nodes or bounds matrices
87 # minimum confidence level for the satellite pixels to be considered
90 # if ignitions are known: ([lons],[lats],[dates]) where lons and lats in degrees and dates in ESMF format
91 # examples: igns = ([100],[45],['2015-05-15T20:09:00']) or igns = ([100,105],[45,39],['2015-05-15T20:09:00','2015-05-15T23:09:00'])
93 # if infrared perimeters: path to KML files
94 # examples: perim_path = './pioneer/perim'
96 # if forecast wrfout: path to netcdf wrfout forecast file
97 # example: forecast_path = './patch/wrfout_patch'
100 satellite_file
= 'data'
101 fire_file
= 'fire_detections.kml'
102 gearth_file
= 'googlearth.kmz'
103 bounds_file
= 'result.mat'
105 contour_file
= 'perimeters_svm.kml'
108 return (os
.path
.isfile(path
) and os
.access(path
,os
.R_OK
))
110 satellite_exists
= exist(satellite_file
)
111 fire_exists
= exist(fire_file
)
112 gearth_exists
= exist(gearth_file
)
113 bounds_exists
= exist(bounds_file
)
115 if len(sys
.argv
) != 4 and (not bounds_exists
) and (not satellite_exists
):
116 print 'Error: python %s wrfout start_time days' % sys
.argv
[0]
117 print ' * wrfout - string, wrfout file of WRF-SFIRE simulation'
118 print ' OR coordinates bounding box - floats separated by comas:'
119 print ' min_lon,max_lon,min_lat,max_lat'
120 print ' * start_time - string, YYYYMMDDHHMMSS where:'
127 print ' * days - float, number of days of simulation (can be less than a day)'
128 print 'OR link an existent file %s or %s' % (satellite_file
,bounds_file
)
134 if bounds_exists
and not cloud
:
135 print '>> File %s already created! Skipping all satellite processing <<' % bounds_file
136 print 'Loading from %s...' % bounds_file
137 result
= loadmat(bounds_file
)
138 # Taking necessary variables from result dictionary
139 scale
= result
['time_scale_num'][0]
140 time_num_granules
= result
['time_num_granules'][0]
141 time_num_interval
= result
['time_num'][0]
142 lon
= np
.array(result
['fxlon']).astype(float)
143 lat
= np
.array(result
['fxlat']).astype(float)
144 if 'ofxlon' in result
.keys():
145 fxlon
= result
['ofxlon']
146 fxlat
= result
['ofxlat']
150 print '>> File %s already created! Skipping satellite retrieval <<' % satellite_file
151 print 'Loading from %s...' % satellite_file
152 data
,fxlon
,fxlat
,time_num
= sl
.load(satellite_file
)
153 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
155 argument
= sys
.argv
[1].split(',')
156 if len(argument
) > 1:
157 print '>> Creating the fire mesh <<'
160 bounds
= map(float,argument
)
161 fxlon
,fxlat
= np
.meshgrid(np
.arange(bounds
[0],bounds
[1],dlon
),
162 np
.arange(bounds
[2],bounds
[3],dlat
))
163 bbox
= [fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
164 print 'min max longitude latitude %s' % bbox
166 print '>> Reading the fire mesh <<'
168 fxlon
,fxlat
,bbox
,time_esmf
= read_fire_mesh(argument
[0])
171 print '>> Retrieving satellite data <<'
173 # converting times to ISO
174 dti
= dt
.datetime
.strptime(sys
.argv
[2],'%Y%m%d%H%M%S')
175 time_start_iso
= '%d-%02d-%02dT%02d:%02d:%02dZ' % (dti
.year
,dti
.month
,dti
.day
,dti
.hour
,dti
.minute
,dti
.second
)
176 dtf
= dti
+dt
.timedelta(days
=float(sys
.argv
[3]))
177 time_final_iso
= '%d-%02d-%02dT%02d:%02d:%02dZ' % (dtf
.year
,dtf
.month
,dtf
.day
,dtf
.hour
,dtf
.minute
,dtf
.second
)
178 time_iso
= (time_start_iso
,time_final_iso
)
179 data
= retrieve_af_data(bbox
,time_iso
)
181 data
.update(process_ignitions(igns
,bbox
,time
=time_iso
))
183 data
.update(process_infrared_perimeters(perim_path
,bbox
,time
=time_iso
))
185 data
.update(process_forecast_wrfout(forecast_path
,bbox
,time
=time_iso
))
189 print '>> Saving satellite data file (data) <<'
191 time_num
= map(time_iso2num
,time_iso
)
192 sl
.save((data
,fxlon
,fxlat
,time_num
),satellite_file
)
193 print 'data file saved correctly!'
196 print 'ERROR: No data obtained...'
200 if (not fire_exists
) or (not gearth_exists
and plot_observed
):
201 print '>> Generating KML of fire and ground detections <<'
203 # sort the granules by dates
204 sdata
=sort_dates(data
)
206 print '>> File %s already created! <<' % fire_file
208 # writting fire detections file
209 print 'writting KML with fire detections'
210 keys
= ['latitude','longitude','brightness','scan','track','acq_date','acq_time','satellite','instrument','confidence','bright_t31','frp','scan_angle']
211 dkeys
= ['lat_fire','lon_fire','brig_fire','scan_fire','track_fire','acq_date','acq_time','sat_fire','instrument','conf_fire','t31_fire','frp_fire','scan_angle_fire']
212 prods
= {'AF':'Active Fires','FRP':'Fire Radiative Power','TF':'Temporal Fire coloring','AFN':'Active Fires New'}
213 # filter out perimeter, ignition, and forecast information (too many pixels)
214 regex
= re
.compile(r
'^((?!(PER_A|IGN_A|FOR_A)).)*$')
215 nsdata
= [d
for d
in sdata
if regex
.match(d
[0])]
216 # compute number of elements for each granule
217 N
= [len(d
[1]['lat_fire']) if 'lat_fire' in d
[1] else 0 for d
in nsdata
]
218 # transform dictionary notation to json notation
219 json
= sdata2json(nsdata
,keys
,dkeys
,N
)
220 # write KML file from json notation
221 json2kml(json
,fire_file
,bbox
,prods
,minconf
=minconf
)
224 print '>> File %s already created! <<' % gearth_file
225 elif not plot_observed
:
226 print '>> Creation of %s skipped (set plot_observed = True) <<' % gearth_file
228 print '>> Generating KMZ with png overlays for Google Earth <<'
229 # creating KMZ overlay of each information
230 # create the Basemap to plot into
231 bmap
= Basemap(projection
='merc',llcrnrlat
=bbox
[2], urcrnrlat
=bbox
[3], llcrnrlon
=bbox
[0], urcrnrlon
=bbox
[1])
234 # for each observed information
235 for idx
, g
in enumerate(sdata
):
237 pngfile
= g
[0]+'.png'
238 # create timestamp for KML
239 timestamp
= g
[1].acq_date
+ 'T' + g
[1].acq_time
[0:2] + ':' + g
[1].acq_time
[2:4] + 'Z'
240 if not exist(pngfile
):
241 # plot a scatter basemap
242 raster_png_data
,corner_coords
= basemap_scatter_mercator(g
[1],bbox
,bmap
,only_fire
)
244 bounds
= (corner_coords
[0][0],corner_coords
[1][0],corner_coords
[0][1],corner_coords
[2][1])
246 with
open(pngfile
, 'w') as f
:
247 f
.write(raster_png_data
)
248 print '> File %s saved.' % g
[0]
250 print '> File %s already created.' % g
[0]
251 # append dictionary information for the KML creation
252 kmld
.append(Dict({'name': g
[0], 'png_file': pngfile
, 'bounds': bbox
, 'time': timestamp
}))
254 create_kml(kmld
,'./doc.kml')
255 # create KMZ with all the PNGs included
256 os
.system('zip -r %s doc.kml *_A*_*.png' % gearth_file
)
257 print 'Created file %s' % gearth_file
258 # eliminate images and KML after creation of KMZ
259 os
.system('rm doc.kml *_A*_*.png')
263 print '>> Processing satellite data <<'
267 coarsening
=np
.int(1+np
.max(fxlon
.shape
)/maxsize
)
268 lon
= fxlon
[::coarsening
,::coarsening
]
269 lat
= fxlat
[::coarsening
,::coarsening
]
270 sdata
= sort_dates(data
)
271 time_num_granules
= [ dd
[1]['time_num'] for dd
in sdata
]
272 time_num_interval
= time_num
273 scale
= [time_num
[0]-0.5*(time_num
[1]-time_num
[0]),time_num
[1]+2*(time_num
[1]-time_num
[0])]
274 X
,y
,c
= preprocess_data_svm(data
,scale
,minconf
=minconf
)
275 sl
.save((lon
,lat
,X
,y
,c
,time_num_interval
,scale
,time_num_granules
,scale
,fxlon
,fxlat
),'result')
282 result
= process_detections(data
,fxlon
,fxlat
,time_num
,bbox
,maxsize
)
284 result
= process_detections(data
,fxlon
,fxlat
,time_num
,bbox
)
285 # Taking necessary variables from result dictionary
286 scale
= result
['time_scale_num']
287 time_num_granules
= result
['time_num_granules']
288 time_num_interval
= result
['time_num']
289 lon
= np
.array(result
['fxlon']).astype(float)
290 lat
= np
.array(result
['fxlat']).astype(float)
293 U
= np
.array(result
['U']).astype(float)
294 L
= np
.array(result
['L']).astype(float)
295 T
= np
.array(result
['T']).astype(float)
297 if 'C' in result
.keys():
298 conf
= np
.array(result
['C'])
299 if 'Cg' in result
.keys():
300 conf
= (np
.array(result
['Cg']),conf
)
302 conf
= (10*np
.ones(L
.shape
),conf
)
307 print '>> Preprocessing the data <<'
309 X
,y
,c
= preprocess_result_svm(lon
,lat
,U
,L
,T
,scale
,time_num_granules
,C
=conf
)
312 print '>> Running Support Vector Machine <<'
316 kgam
= np
.sqrt(len(y
))/4.
319 kgam
= np
.sqrt(len(y
))/4.
322 F
= SVM3(X
,y
,C
=C
,kgam
=kgam
,search
=search
,fire_grid
=(lon
,lat
))
325 print '>> Saving the results <<'
327 tscale
= 24*3600 # scale from seconds to days
328 # Fire arrival time in seconds from the begining of the simulation
329 tign_g
= np
.array(F
[2])*float(tscale
)+scale
[0]-time_num_interval
[0]
330 # Creating the dictionary with the results
331 svm
= {'dxlon': lon
, 'dxlat': lat
, 'X': X
, 'y': y
, 'c': c
,
332 'fxlon': F
[0], 'fxlat': F
[1], 'Z': F
[2],
333 'tign_g': tign_g
, 'C': C
, 'kgam': kgam
,
334 'tscale': tscale
, 'time_num_granules': time_num_granules
,
335 'time_scale_num': scale
, 'time_num': time_num_interval
}
337 svm
.update({'U': U
/tscale
, 'L': L
/tscale
})
339 # Interpolation of tign_g
342 print '>> Interpolating the results in the fire mesh'
344 points
= np
.c_
[np
.ravel(F
[0]),np
.ravel(F
[1])]
345 values
= np
.ravel(tign_g
)
346 tign_g_interp
= interpolate
.griddata(points
,values
,(fxlon
,fxlat
))
348 print 'elapsed time: %ss.' % str(abs(t_interp_2
-t_interp_1
))
349 svm
.update({'fxlon_interp': fxlon
, 'fxlat_interp': fxlat
,
350 'tign_g_interp': tign_g_interp
})
352 print 'Warning: longitudes and latitudes from the original grid are not defined...'
353 print '%s file is not compatible with fire_interp=True! Run again the experiment from the begining.' % bounds_file
355 # Save resulting file
356 savemat(svm_file
, mdict
=svm
)
357 print 'The results are saved in svm.mat file'
360 print '>> Computing contour lines of the fire arrival time <<'
361 print 'Computing the contours...'
363 # Granules numeric times
364 Z
= np
.transpose(F
[2])*tscale
+scale
[0]
365 # Creating contour lines
366 contour_data
= get_contour_verts(F
[0], F
[1], Z
, time_num_granules
, contour_dt_hours
=6, contour_dt_init
=6, contour_dt_final
=6)
367 print 'Creating the KML file...'
368 # Creating the KML file
369 contour2kml(contour_data
,contour_file
)
370 print 'The resulting contour lines are saved in perimeters_svm.kml file'
372 print 'Warning: contour creation problem'
373 print 'Run: python contlinesvm.py'
378 print 'Elapsed time for all the process: %ss.' % str(abs(t_final
-t_init
))