2 warnings
.filterwarnings("ignore")
5 from scipy
import spatial
6 import matplotlib
.pyplot
as plt
9 from interpolation
import sort_dates
, nearest_scipy
, neighbor_indices_ball
, neighbor_indices_pixel
, neighbor_indices_ellipse
10 import os
, sys
, time
, itertools
12 def process_detections(data
,fxlon
,fxlat
,time_num
,bounds
=None,maxsize
=500,confl
=0.):
14 Process detections to obtain upper and lower bounds
16 :param data: data obtained from JPSSD
17 :param fxlon: longitude coordinates of the fire mesh (from wrfout)
18 :param fxlat: latitude coordinates of the fire mesh (from wrfout)
19 :param time_num: numerical value of the starting and ending time
20 :param bounds: optional, spatial bounds to consider (lon_min,lon_max,lat_min,lat_max)
21 :param maxsize: optional, maxsize of the mesh in both directions
22 :param confl: optional, minimum confidence level for the pixels
23 :return result: upper and lower bounds with some parameters
25 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
26 Angel Farguell (angel.farguell@gmail.com), 2019-04-01
29 # process satellite settings
30 ut
=1 # Upper bound technique, ut=1: Center of the pixel -- ut=2: Ellipse inscribed in the pixel
31 lt
=1 # Lower bound technique, lt=1: Center of the pixel -- lt=2: Ellipse inscribed in the pixel (very slow)
32 mt
=2 # Mask technique, mt=1: Ball -- mt=2: Pixel -- mt=3: Ellipse
33 dist
=8 # If mt=1 (ball neighbours), radius of the balls is R=sqrt(2*dist^2)
34 mm
=5 # If mt=3 (ellipse neighbours), larger ellipses constant: (x/a)^2+(x/b)^2<=mm
35 confa
=False # Histogram plot of the confidence level distribution
36 confm
=True # Store confidence of each fire and ground detection
37 conf_nofire
=70. # In absence of nofire confidence, value for nofire confidence (satellite data)
38 burn
=False # Using or not the burned scar product
40 ofxlon
= np
.copy(fxlon
)
41 ofxlat
= np
.copy(fxlat
)
42 print 'mesh shape %s %s' % fxlon
.shape
43 coarsening
=np
.int(1+np
.max(fxlon
.shape
)/maxsize
)
44 print 'maximum size is %s, coarsening %s' % (maxsize
, coarsening
)
45 fxlon
= fxlon
[0::coarsening
,0::coarsening
]
46 fxlat
= fxlat
[0::coarsening
,0::coarsening
]
47 print 'coarsened %s %s' % fxlon
.shape
50 bounds
=[fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
51 vfxlon
=np
.ravel(fxlon
)
52 vfxlat
=np
.ravel(fxlat
)
53 vfgrid
=np
.column_stack((vfxlon
,vfxlat
))
54 print 'Setting up interpolation'
55 stree
=spatial
.cKDTree(vfgrid
)
56 vfind
=np
.array(list(itertools
.product(np
.array(range(fxlon
.shape
[0])),np
.array(range(fxlon
.shape
[1])))))
57 itree
=spatial
.cKDTree(vfind
)
59 # Sort dictionary by time_num into an array of tuples (key, dictionary of values)
60 print 'Sort the granules by dates'
61 sdata
=sort_dates(data
)
62 tt
=[ dd
[1]['time_num'] for dd
in sdata
] # array of times
67 # Max and min time_num
70 # time_scale_num = time_num
71 time_scale_num
=[mint
-0.5*(maxt
-mint
),maxt
+2*(maxt
-mint
)]
73 # Creating the resulting arrays
74 DD
=np
.prod(fxlon
.shape
)
76 U
[:]=time_scale_num
[1]
78 L
[:]=time_scale_num
[0]
80 T
[:]=time_scale_num
[1]
87 confanalysis
=Dict({'f7': np
.array([]),'f8': np
.array([]), 'f9': np
.array([])})
89 # For granules in order increasing in time
91 for gran
in range(GG
):
93 print 'Loading data of granule %d/%d' % (gran
+1,GG
)
94 print 'Granule name: %s' % sdata
[gran
][0]
95 # Load granule lon, lat, fire arrays and time number
96 slon
=sdata
[gran
][1]['lon']
97 slat
=sdata
[gran
][1]['lat']
98 ti
=sdata
[gran
][1]['time_num']
99 fire
=sdata
[gran
][1]['fire']
100 print 'Interpolation to fire grid'
102 # Interpolate all the granule coordinates in bounds in the wrfout fire mesh
103 # gg: mask in the granule of g-points = pixel coordinates inside the fire mesh
104 # ff: the closed points in fire mesh indexed by g-points
105 (ff
,gg
)=nearest_scipy(slon
,slat
,stree
,bounds
) ## indices to flattened granule array
106 vfire
=np
.ravel(fire
) ## flaten the fire detection array
107 gfire
=vfire
[gg
] # the part withing the fire mesh bounds
108 fi
=gfire
>= 7 # where fire detected - low, nominal or high confidence (all the fire data in the granule)
109 ffi
=ff
[fi
] # indices in the fire mesh where the fire detections are
110 nofi
=np
.logical_or(gfire
== 3, gfire
== 5) # where no fire detected
111 unkn
=np
.logical_not(np
.logical_or(fi
,nofi
)) # where unknown
112 print 'fire detected %s' % fi
.sum()
113 print 'no fire detected %s' % nofi
.sum()
114 print 'unknown %s' % unkn
.sum()
115 if fi
.any(): # at fire points
116 rfire
=gfire
[gfire
>=7]
117 conf
=sdata
[gran
][1]['conf_fire'] # confidence of the fire detections
119 confanalysis
.f7
=np
.concatenate((confanalysis
.f7
,conf
[rfire
==7]))
120 confanalysis
.f8
=np
.concatenate((confanalysis
.f8
,conf
[rfire
==8]))
121 confanalysis
.f9
=np
.concatenate((confanalysis
.f9
,conf
[rfire
==9]))
122 flc
=conf
>=confl
# fire large confidence indexes
123 ffa
=U
[ffi
][flc
]>ti
# first fire arrival
126 # taking lon, lat, scan and track of the fire detections which fire large confidence indexes
127 lon
=sdata
[gran
][1]['lon_fire'][flc
][ffa
]
128 lat
=sdata
[gran
][1]['lat_fire'][flc
][ffa
]
129 scan
=sdata
[gran
][1]['scan_fire'][flc
][ffa
]
130 track
=sdata
[gran
][1]['track_fire'][flc
][ffa
]
134 # indices with high confidence
137 # creating the indices for all the pixel neighbours of the upper bound
138 iu
=neighbor_indices_ellipse(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
140 print 'ERROR: invalid ut option.'
142 mu
= U
[iu
] > ti
# only upper bounds did not set yet
145 C
[iu
[mu
]]=conf
[flc
][ffa
][mu
]
147 print 'ERROR: ut=2 and confm=True not implemented!'
149 print 'U set at %s points' % mu
.sum()
151 U
[iu
[mu
]]=ti
# set U to granule time where fire detected and not detected before
153 U
[iu
][mu
]=ti
# set U to granule time where fire detected and not detected before
157 # creating the indices for all the pixel neighbours of the upper bound indices
158 kk
=neighbor_indices_ball(itree
,np
.unique(ffi
[flc
]),fxlon
.shape
,dist
)
159 im
=np
.array(sorted(np
.unique([x
[0]+x
[1]*fxlon
.shape
[0] for x
in vfind
[kk
]])))
161 # creating the indices for all the pixel neighbours of the upper bound indices
162 im
=neighbor_indices_pixel(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
164 # creating the indices for all the pixel neighbours of the upper bound indices
165 im
=neighbor_indices_ellipse(vfxlon
,vfxlat
,lon
,lat
,scan
,track
,mm
)
167 print 'ERROR: invalid mt option.'
170 ind
= np
.where(im
)[0]
171 mmt
= ind
[ti
< T
[im
]] # only mask did not set yet
172 print 'T set at %s points' % mmt
.shape
173 T
[mmt
]=ti
# update mask T
175 print 'T set at %s points' % im
[T
[im
] > ti
].shape
176 T
[im
[T
[im
] > ti
]]=ti
# update mask T
178 # Set mask from burned scar data
180 if 'burned' in sdata
[gran
][1].keys():
181 # if burned scar exists, set the mask in the burned scar pixels
182 burned
=sdata
[gran
][1]['burned']
183 bm
=ff
[np
.ravel(burned
)[gg
]]
186 if nofi
.any(): # set L at no-fire points and not masked
188 # indices of clear ground
189 jj
=np
.logical_and(nofi
,ti
<T
[ff
])
192 # taking lon, lat, scan and track of the ground detections
193 lon
=sdata
[gran
][1]['lon_nofire']
194 lat
=sdata
[gran
][1]['lat_nofire']
195 scan
=sdata
[gran
][1]['scan_nofire']
196 track
=sdata
[gran
][1]['track_nofire']
197 # creating the indices for all the pixel neighbours of lower bound indices
198 nofi
=neighbor_indices_pixel(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
199 il
=np
.logical_and(nofi
,ti
<T
)
201 print 'ERROR: invalid lt option.'
205 mask_nofi
= gg
[np
.logical_or(vfire
== 3, vfire
== 5)]
207 # get nofire confidence if we have it
208 confg
=sdata
[gran
][1]['conf_nofire'][mask_nofi
]
210 # if not, define confidence from conf_nofire value
211 confg
=conf_nofire
*np
.ones(nofi
.sum())
212 Cg
[il
]=confg
[(ti
<T
[ff
])[nofi
]]
214 print 'ERROR: lt=2 and confm=True not implemented!'
216 L
[il
]=ti
# set L to granule time where fire detected
217 print 'L set at %s points' % jj
.sum()
218 t_final
= time
.time()
219 print 'elapsed time: %ss.' % str(t_final
-t_init
)
221 print "L<U: %s" % (L
<U
).sum()
222 print "L=U: %s" % (L
==U
).sum()
223 print "L>U: %s" % (L
>U
).sum()
224 print "average U-L %s" % ((U
-L
).sum()/np
.prod(U
.shape
))
225 print np
.histogram((U
-L
)/(24*3600))
228 print "Inconsistency in the data, removing lower bounds..."
229 L
[L
>U
]=time_scale_num
[0]
230 print "L<U: %s" % (L
<U
).sum()
231 print "L=U: %s" % (L
==U
).sum()
232 print "L>U: %s" % (L
>U
).sum()
233 print "average U-L %s" % ((U
-L
).sum()/np
.prod(U
.shape
))
234 print np
.histogram((U
-L
)/(24*3600))
236 print 'Confidence analysis'
239 plt
.hist(x
=confanalysis
.f7
,bins
='auto',color
='#ff0000',alpha
=0.7, rwidth
=0.85)
240 plt
.xlabel('Confidence')
241 plt
.ylabel('Frequency')
242 plt
.title('Fire label 7: %d' % len(confanalysis
.f7
))
244 plt
.hist(x
=confanalysis
.f8
,bins
='auto',color
='#00ff00',alpha
=0.7, rwidth
=0.85)
245 plt
.xlabel('Confidence')
246 plt
.ylabel('Frequency')
247 plt
.title('Fire label 8: %d' % len(confanalysis
.f8
))
249 plt
.hist(x
=confanalysis
.f9
,bins
='auto',color
='#0000ff',alpha
=0.7, rwidth
=0.85)
250 plt
.xlabel('Confidence')
251 plt
.ylabel('Frequency')
252 plt
.title('Fire label 9: %d' % len(confanalysis
.f9
))
255 print 'Saving results'
257 U
=np
.reshape(U
-time_scale_num
[0],fxlon
.shape
,'F')
258 L
=np
.reshape(L
-time_scale_num
[0],fxlon
.shape
,'F')
259 T
=np
.reshape(T
-time_scale_num
[0],fxlon
.shape
,'F')
261 print 'U L R are shifted so that zero there is time_scale_num[0] = %s' % time_scale_num
[0]
263 result
= {'U': U
, 'L': L
, 'T': T
,
264 'fxlon': fxlon
, 'fxlat': fxlat
,
265 'time_num': time_num
, 'time_scale_num' : time_scale_num
,
266 'time_num_granules': tt
, 'ofxlon': ofxlon
, 'ofxlat': ofxlat
}
268 C
=np
.reshape(C
,fxlon
.shape
,'F')
269 Cg
=np
.reshape(Cg
,fxlon
.shape
,'F')
270 result
.update({'C': C
, 'Cg': Cg
})
272 sio
.savemat('result.mat', mdict
=result
)
273 sl
.save(result
,'result')
275 print 'To visualize, run in Matlab the script plot_results.m'
276 print 'Multigrid using in fire_interpolation the script jpss_mg.m'
280 if __name__
== "__main__":
284 if os
.path
.isfile(sat_file
) and os
.access(sat_file
,os
.R_OK
):
285 print 'Loading the data...'
286 data
,fxlon
,fxlat
,time_num
=sl
.load('data')
288 print 'Error: file %s not exist or not readable' % sat_file
291 process_detections(data
,fxlon
,fxlat
,time_num
)