2 warnings
.filterwarnings("ignore")
5 from scipy
import spatial
6 import matplotlib
.pyplot
as plt
9 from interpolation
import sort_dates
, nearest_scipy
, neighbor_indices_ball
, neighbor_indices_pixel
, neighbor_indices_ellipse
10 import os
, sys
, time
, itertools
12 def process_detections(data
,fxlon
,fxlat
,time_num
,bounds
=None,maxsize
=500):
14 Process detections to obtain upper and lower bounds
16 :param data: data obtained from JPSSD
17 :param fxlon: longitude coordinates of the fire mesh (from wrfout)
18 :param fxlat: latitude coordinates of the fire mesh (from wrfout)
19 :param time_num: numerical value of the starting and ending time
20 :return result: upper and lower bounds with some parameters
22 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
23 Angel Farguell (angel.farguell@gmail.com), 2019-04-01
26 # process satellite settings
27 ut
=1 # Upper bound technique, ut=1: Center of the pixel -- ut=2: Ellipse inscribed in the pixel
28 lt
=1 # Lower bound technique, lt=1: Center of the pixel -- lt=2: Ellipse inscribed in the pixel (very slow)
29 mt
=2 # Mask technique, mt=1: Ball -- mt=2: Pixel -- mt=3: Ellipse
30 dist
=8 # If mt=1 (ball neighbours), radius of the balls is R=sqrt(2*dist^2)
31 mm
=5 # If mt=3 (ellipse neighbours), larger ellipses constant: (x/a)^2+(x/b)^2<=mm
32 confl
=0. # Minimum confidence level for the pixels
33 confa
=False # Histogram plot of the confidence level distribution
34 confm
=True # Store confidence of each fire and ground detection
35 conf_nofire
=70. # In absence of nofire confidence, value for nofire confidence (satellite data)
36 burn
=False # Using or not the burned scar product
38 ofxlon
= np
.copy(fxlon
)
39 ofxlat
= np
.copy(fxlat
)
40 print 'mesh shape %s %s' % fxlon
.shape
41 coarsening
=np
.int(1+np
.max(fxlon
.shape
)/maxsize
)
42 print 'maximum size is %s, coarsening %s' % (maxsize
, coarsening
)
43 fxlon
= fxlon
[0::coarsening
,0::coarsening
]
44 fxlat
= fxlat
[0::coarsening
,0::coarsening
]
45 print 'coarsened %s %s' % fxlon
.shape
48 bounds
=[fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
49 vfxlon
=np
.ravel(fxlon
)
50 vfxlat
=np
.ravel(fxlat
)
51 vfgrid
=np
.column_stack((vfxlon
,vfxlat
))
52 print 'Setting up interpolation'
53 stree
=spatial
.cKDTree(vfgrid
)
54 vfind
=np
.array(list(itertools
.product(np
.array(range(fxlon
.shape
[0])),np
.array(range(fxlon
.shape
[1])))))
55 itree
=spatial
.cKDTree(vfind
)
57 # Sort dictionary by time_num into an array of tuples (key, dictionary of values)
58 print 'Sort the granules by dates'
59 sdata
=sort_dates(data
)
60 tt
=[ dd
[1]['time_num'] for dd
in sdata
] # array of times
65 # Max and min time_num
68 # time_scale_num = time_num
69 time_scale_num
=[mint
-0.5*(maxt
-mint
),maxt
+2*(maxt
-mint
)]
71 # Creating the resulting arrays
72 DD
=np
.prod(fxlon
.shape
)
74 U
[:]=time_scale_num
[1]
76 L
[:]=time_scale_num
[0]
78 T
[:]=time_scale_num
[1]
85 confanalysis
=Dict({'f7': np
.array([]),'f8': np
.array([]), 'f9': np
.array([])})
87 # For granules in order increasing in time
89 for gran
in range(GG
):
91 print 'Loading data of granule %d/%d' % (gran
+1,GG
)
92 print 'Granule name: %s' % sdata
[gran
][0]
93 # Load granule lon, lat, fire arrays and time number
94 slon
=sdata
[gran
][1]['lon']
95 slat
=sdata
[gran
][1]['lat']
96 ti
=sdata
[gran
][1]['time_num']
97 fire
=sdata
[gran
][1]['fire']
98 print 'Interpolation to fire grid'
100 # Interpolate all the granule coordinates in bounds in the wrfout fire mesh
101 # gg: mask in the granule of g-points = pixel coordinates inside the fire mesh
102 # ff: the closed points in fire mesh indexed by g-points
103 (ff
,gg
)=nearest_scipy(slon
,slat
,stree
,bounds
) ## indices to flattened granule array
104 vfire
=np
.ravel(fire
) ## flaten the fire detection array
105 gfire
=vfire
[gg
] # the part withing the fire mesh bounds
106 fi
=gfire
>= 7 # where fire detected - low, nominal or high confidence (all the fire data in the granule)
107 ffi
=ff
[fi
] # indices in the fire mesh where the fire detections are
108 nofi
=np
.logical_or(gfire
== 3, gfire
== 5) # where no fire detected
109 unkn
=np
.logical_not(np
.logical_or(fi
,nofi
)) # where unknown
110 print 'fire detected %s' % fi
.sum()
111 print 'no fire detected %s' % nofi
.sum()
112 print 'unknown %s' % unkn
.sum()
113 if fi
.any(): # at fire points
114 rfire
=gfire
[gfire
>=7]
115 conf
=sdata
[gran
][1]['conf_fire'] # confidence of the fire detections
117 confanalysis
.f7
=np
.concatenate((confanalysis
.f7
,conf
[rfire
==7]))
118 confanalysis
.f8
=np
.concatenate((confanalysis
.f8
,conf
[rfire
==8]))
119 confanalysis
.f9
=np
.concatenate((confanalysis
.f9
,conf
[rfire
==9]))
120 flc
=conf
>=confl
# fire large confidence indexes
121 ffa
=U
[ffi
][flc
]>ti
# first fire arrival
124 # taking lon, lat, scan and track of the fire detections which fire large confidence indexes
125 lon
=sdata
[gran
][1]['lon_fire'][flc
][ffa
]
126 lat
=sdata
[gran
][1]['lat_fire'][flc
][ffa
]
127 scan
=sdata
[gran
][1]['scan_fire'][flc
][ffa
]
128 track
=sdata
[gran
][1]['track_fire'][flc
][ffa
]
132 # indices with high confidence
135 # creating the indices for all the pixel neighbours of the upper bound
136 iu
=neighbor_indices_ellipse(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
138 print 'ERROR: invalid ut option.'
140 mu
= U
[iu
] > ti
# only upper bounds did not set yet
143 C
[iu
[mu
]]=conf
[flc
][ffa
][mu
]
145 print 'ERROR: ut=2 and confm=True not implemented!'
147 print 'U set at %s points' % mu
.sum()
149 U
[iu
[mu
]]=ti
# set U to granule time where fire detected and not detected before
151 U
[iu
][mu
]=ti
# set U to granule time where fire detected and not detected before
155 # creating the indices for all the pixel neighbours of the upper bound indices
156 kk
=neighbor_indices_ball(itree
,np
.unique(ffi
[flc
]),fxlon
.shape
,dist
)
157 im
=np
.array(sorted(np
.unique([x
[0]+x
[1]*fxlon
.shape
[0] for x
in vfind
[kk
]])))
159 # creating the indices for all the pixel neighbours of the upper bound indices
160 im
=neighbor_indices_pixel(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
162 # creating the indices for all the pixel neighbours of the upper bound indices
163 im
=neighbor_indices_ellipse(vfxlon
,vfxlat
,lon
,lat
,scan
,track
,mm
)
165 print 'ERROR: invalid mt option.'
168 ind
= np
.where(im
)[0]
169 mmt
= ind
[ti
< T
[im
]] # only mask did not set yet
170 print 'T set at %s points' % mmt
.shape
171 T
[mmt
]=ti
# update mask T
173 print 'T set at %s points' % im
[T
[im
] > ti
].shape
174 T
[im
[T
[im
] > ti
]]=ti
# update mask T
176 # Set mask from burned scar data
178 if 'burned' in sdata
[gran
][1].keys():
179 # if burned scar exists, set the mask in the burned scar pixels
180 burned
=sdata
[gran
][1]['burned']
181 bm
=ff
[np
.ravel(burned
)[gg
]]
184 if nofi
.any(): # set L at no-fire points and not masked
186 # indices of clear ground
187 jj
=np
.logical_and(nofi
,ti
<T
[ff
])
190 # taking lon, lat, scan and track of the ground detections
191 lon
=sdata
[gran
][1]['lon_nofire']
192 lat
=sdata
[gran
][1]['lat_nofire']
193 scan
=sdata
[gran
][1]['scan_nofire']
194 track
=sdata
[gran
][1]['track_nofire']
195 # creating the indices for all the pixel neighbours of lower bound indices
196 nofi
=neighbor_indices_pixel(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
197 il
=np
.logical_and(nofi
,ti
<T
)
199 print 'ERROR: invalid lt option.'
203 mask_nofi
= gg
[np
.logical_or(vfire
== 3, vfire
== 5)]
205 # get nofire confidence if we have it
206 confg
=sdata
[gran
][1]['conf_nofire'][mask_nofi
]
208 # if not, define confidence from conf_nofire value
209 confg
=conf_nofire
*np
.ones(nofi
.sum())
210 Cg
[il
]=confg
[(ti
<T
[ff
])[nofi
]]
212 print 'ERROR: lt=2 and confm=True not implemented!'
214 L
[il
]=ti
# set L to granule time where fire detected
215 print 'L set at %s points' % jj
.sum()
216 t_final
= time
.time()
217 print 'elapsed time: %ss.' % str(t_final
-t_init
)
219 print "L<U: %s" % (L
<U
).sum()
220 print "L=U: %s" % (L
==U
).sum()
221 print "L>U: %s" % (L
>U
).sum()
222 print "average U-L %s" % ((U
-L
).sum()/np
.prod(U
.shape
))
223 print np
.histogram((U
-L
)/(24*3600))
226 print "Inconsistency in the data, removing lower bounds..."
227 L
[L
>U
]=time_scale_num
[0]
228 print "L<U: %s" % (L
<U
).sum()
229 print "L=U: %s" % (L
==U
).sum()
230 print "L>U: %s" % (L
>U
).sum()
231 print "average U-L %s" % ((U
-L
).sum()/np
.prod(U
.shape
))
232 print np
.histogram((U
-L
)/(24*3600))
234 print 'Confidence analysis'
237 plt
.hist(x
=confanalysis
.f7
,bins
='auto',color
='#ff0000',alpha
=0.7, rwidth
=0.85)
238 plt
.xlabel('Confidence')
239 plt
.ylabel('Frequency')
240 plt
.title('Fire label 7: %d' % len(confanalysis
.f7
))
242 plt
.hist(x
=confanalysis
.f8
,bins
='auto',color
='#00ff00',alpha
=0.7, rwidth
=0.85)
243 plt
.xlabel('Confidence')
244 plt
.ylabel('Frequency')
245 plt
.title('Fire label 8: %d' % len(confanalysis
.f8
))
247 plt
.hist(x
=confanalysis
.f9
,bins
='auto',color
='#0000ff',alpha
=0.7, rwidth
=0.85)
248 plt
.xlabel('Confidence')
249 plt
.ylabel('Frequency')
250 plt
.title('Fire label 9: %d' % len(confanalysis
.f9
))
253 print 'Saving results'
255 U
=np
.transpose(np
.reshape(U
-time_scale_num
[0],fxlon
.shape
))
256 L
=np
.transpose(np
.reshape(L
-time_scale_num
[0],fxlon
.shape
))
257 T
=np
.transpose(np
.reshape(T
-time_scale_num
[0],fxlon
.shape
))
259 print 'U L R are shifted so that zero there is time_scale_num[0] = %s' % time_scale_num
[0]
261 result
= {'U': U
, 'L': L
, 'T': T
, 'fxlon': fxlon
, 'fxlat': fxlat
,
262 'time_num': time_num
, 'time_scale_num' : time_scale_num
,
263 'time_num_granules': tt
, 'ofxlon': ofxlon
, 'ofxlat': ofxlat
}
265 C
=np
.transpose(np
.reshape(C
,fxlon
.shape
))
266 Cg
=np
.transpose(np
.reshape(Cg
,fxlon
.shape
))
267 result
.update({'C': C
, 'Cg': Cg
})
269 sio
.savemat('result.mat', mdict
=result
)
271 print 'To visualize, run in Matlab the script plot_results.m'
272 print 'Multigrid using in fire_interpolation the script jpss_mg.m'
274 result
['fxlon'] = np
.transpose(result
['fxlon'])
275 result
['fxlat'] = np
.transpose(result
['fxlat'])
279 if __name__
== "__main__":
283 if os
.path
.isfile(sat_file
) and os
.access(sat_file
,os
.R_OK
):
284 print 'Loading the data...'
285 data
,fxlon
,fxlat
,time_num
=sl
.load('data')
287 print 'Error: file %s not exist or not readable' % sat_file
290 process_detections(data
,fxlon
,fxlat
,time_num
)