2 warnings
.filterwarnings("ignore")
5 from scipy
import spatial
6 import matplotlib
.pyplot
as plt
9 from interpolation
import sort_dates
, nearest_scipy
, neighbor_indices_ball
, neighbor_indices_pixel
, neighbor_indices_ellipse
10 import os
, sys
, time
, itertools
12 def process_detections(data
,fxlon
,fxlat
,time_num
):
14 Process detections to obtain upper and lower bounds
16 :param data: data obtained from JPSSD
17 :param fxlon: longitude coordinates of the fire mesh (from wrfout)
18 :param fxlat: latitude coordinates of the fire mesh (from wrfout)
19 :param time_num: numerical value of the starting and ending time
20 :return result: upper and lower bounds with some parameters
22 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
23 Angel Farguell (angel.farguell@gmail.com), 2019-04-01
26 # process satellite settings
27 maxsize
=400 # Max size of the fire mesh
28 ut
=1 # Upper bound technique, ut=1: Center of the pixel -- ut=2: Ellipse inscribed in the pixel
29 lt
=1 # Lower bound technique, lt=1: Center of the pixel -- lt=2: Ellipse inscribed in the pixel (very slow)
30 mt
=2 # Mask technique, mt=1: Ball -- mt=2: Pixel -- mt=3: Ellipse
31 dist
=8 # If mt=1 (ball neighbours), radius of the balls is R=sqrt(2*dist^2)
32 mm
=5 # If mt=3 (ellipse neighbours), larger ellipses constant: (x/a)^2+(x/b)^2<=mm
33 confl
=70. # Minimum confidence level for the pixels
34 confa
=False # Histogram plot of the confidence level distribution
35 confm
=True # Store confidence of each fire detection
36 burn
=False # Using or not the burned scar product
38 print 'mesh shape %s %s' % fxlon
.shape
39 coarsening
=np
.int(1+np
.max(fxlon
.shape
)/maxsize
)
40 print 'maximum size is %s, coarsening %s' % (maxsize
, coarsening
)
41 fxlon
= fxlon
[0::coarsening
,0::coarsening
]
42 fxlat
= fxlat
[0::coarsening
,0::coarsening
]
43 print 'coarsened %s %s' % fxlon
.shape
45 bounds
=[fxlon
.min(),fxlon
.max(),fxlat
.min(),fxlat
.max()]
46 vfxlon
=np
.reshape(fxlon
,np
.prod(fxlon
.shape
))
47 vfxlat
=np
.reshape(fxlat
,np
.prod(fxlat
.shape
))
48 vfgrid
=np
.column_stack((vfxlon
,vfxlat
))
49 print 'Setting up interpolation'
50 stree
=spatial
.cKDTree(vfgrid
)
51 vfind
=np
.array(list(itertools
.product(np
.array(range(0,fxlon
.shape
[0])),np
.array(range(0,fxlon
.shape
[1])))))
52 itree
=spatial
.cKDTree(vfind
)
54 # Sort dictionary by time_num into an array of tuples (key, dictionary of values)
55 print 'Sort the granules by dates'
56 sdata
=sort_dates(data
)
57 tt
=[ dd
[1]['time_num'] for dd
in sdata
] # array of times
62 # Max and min time_num
65 # time_scale_num = time_num
66 time_scale_num
=[mint
-0.5*(maxt
-mint
),maxt
+2*(maxt
-mint
)]
68 # Creating the resulting arrays
69 DD
=np
.prod(fxlon
.shape
)
71 U
[:]=time_scale_num
[1]
73 L
[:]=time_scale_num
[0]
75 T
[:]=time_scale_num
[1]
80 confanalysis
=Dict({'f7': np
.array([]),'f8': np
.array([]), 'f9': np
.array([])})
82 # For granules in order increasing in time
84 for gran
in range(GG
):
86 print 'Loading data of granule %d/%d' % (gran
+1,GG
)
87 print 'Granule name: %s' % sdata
[gran
][0]
88 # Load granule lon, lat, fire arrays and time number
89 slon
=sdata
[gran
][1]['lon']
90 slat
=sdata
[gran
][1]['lat']
91 ti
=sdata
[gran
][1]['time_num']
92 fire
=sdata
[gran
][1]['fire']
93 print 'Interpolation to fire grid'
95 # Interpolate all the granule coordinates in bounds in the wrfout fire mesh
96 # gg: mask in the granule of g-points = pixel coordinates inside the fire mesh
97 # ff: the closed points in fire mesh indexed by g-points
98 (ff
,gg
)=nearest_scipy(slon
,slat
,stree
,bounds
) ## indices to flattened granule array
99 vfire
=np
.reshape(fire
,np
.prod(fire
.shape
)) ## flaten the fire detection array
100 gfire
=vfire
[gg
] # the part withing the fire mesh bounds
101 fi
=gfire
>= 7 # where fire detected - low, nominal or high confidence (all the fire data in the granule)
102 ffi
=ff
[fi
] # indices in the fire mesh where the fire detections are
103 nofi
=np
.logical_or(gfire
== 3, gfire
== 5) # where no fire detected
104 unkn
=np
.logical_not(np
.logical_or(fi
,nofi
)) # where unknown
105 print 'fire detected %s' % fi
.sum()
106 print 'no fire detected %s' % nofi
.sum()
107 print 'unknown %s' % unkn
.sum()
108 if fi
.any(): # at fire points
109 rfire
=gfire
[gfire
>=7]
110 conf
=sdata
[gran
][1]['conf_fire'] # confidence of the fire detections
111 confanalysis
.f7
=np
.concatenate((confanalysis
.f7
,conf
[rfire
==7]))
112 confanalysis
.f8
=np
.concatenate((confanalysis
.f8
,conf
[rfire
==8]))
113 confanalysis
.f9
=np
.concatenate((confanalysis
.f9
,conf
[rfire
==9]))
114 flc
=conf
>confl
# fire large confidence indexes
117 # taking lon, lat, scan and track of the fire detections which fire large confidence indexes
118 lon
=sdata
[gran
][1]['lon_fire'][flc
]
119 lat
=sdata
[gran
][1]['lat_fire'][flc
]
120 scan
=sdata
[gran
][1]['scan_fire'][flc
]
121 track
=sdata
[gran
][1]['track_fire'][flc
]
125 # indices with high confidence
128 # creating the indices for all the pixel neighbours of the upper bound
129 iu
=neighbor_indices_ellipse(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
131 print 'ERROR: invalid ut option.'
133 mu
= U
[iu
] > ti
# only upper bounds did not set yet
135 C
[iu
[mu
]]=conf
[flc
][mu
]
136 U
[iu
[mu
]]=ti
# set U to granule time where fire detected and not detected before
140 # creating the indices for all the pixel neighbours of the upper bound indices
141 kk
=neighbor_indices_ball(itree
,ffi
[flc
],fxlon
.shape
,dist
)
142 im
=sorted(np
.unique([x
[0]+x
[1]*fxlon
.shape
[0] for x
in vfind
[kk
]]))
144 # creating the indices for all the pixel neighbours of the upper bound indices
145 im
=neighbor_indices_pixel(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
147 # creating the indices for all the pixel neighbours of the upper bound indices
148 im
=neighbor_indices_ellipse(vfxlon
,vfxlat
,lon
,lat
,scan
,track
,mm
)
150 print 'ERROR: invalid mt option.'
152 mmt
= T
> ti
# only mask did not set yet
154 T
[im
]=ti
# update mask T
156 # Set mask from burned scar data
158 if 'burned' in sdata
[gran
][1].keys():
159 # if burned scar exists, set the mask in the burned scar pixels
160 burned
=sdata
[gran
][1]['burned']
161 bm
=ff
[np
.reshape(burned
,np
.prod(burned
.shape
))[gg
]]
164 if nofi
.any(): # set L at no-fire points and not masked
166 # indices of clear ground
167 jj
=np
.logical_and(nofi
,ti
<T
[ff
])
170 # taking lon, lat, scan and track of the ground detections
171 lon
=sdata
[gran
][1]['lon_nofire']
172 lat
=sdata
[gran
][1]['lat_nofire']
173 scan
=sdata
[gran
][1]['scan_nofire']
174 track
=sdata
[gran
][1]['track_nofire']
175 # creating the indices for all the pixel neighbours of lower bound indices
176 nofi
=neighbor_indices_pixel(vfxlon
,vfxlat
,lon
,lat
,scan
,track
)
177 il
=np
.logical_and(nofi
,ti
<T
)
179 print 'ERROR: invalid lt option.'
181 L
[il
]=ti
# set L to granule time where fire detected
182 print 'L set at %s points' % jj
.sum()
183 t_final
= time
.time()
184 print 'elapsed time: %ss.' % str(t_final
-t_init
)
186 print "L<U: %s" % (L
<U
).sum()
187 print "L=U: %s" % (L
==U
).sum()
188 print "L>U: %s" % (L
>U
).sum()
189 print "average U-L %s" % ((U
-L
).sum()/np
.prod(U
.shape
))
190 print np
.histogram((U
-L
)/(24*3600))
192 print 'Confidence analysis'
195 plt
.hist(x
=confanalysis
.f7
,bins
='auto',color
='#ff0000',alpha
=0.7, rwidth
=0.85)
196 plt
.xlabel('Confidence')
197 plt
.ylabel('Frequency')
198 plt
.title('Fire label 7: %d' % len(confanalysis
.f7
))
200 plt
.hist(x
=confanalysis
.f8
,bins
='auto',color
='#00ff00',alpha
=0.7, rwidth
=0.85)
201 plt
.xlabel('Confidence')
202 plt
.ylabel('Frequency')
203 plt
.title('Fire label 8: %d' % len(confanalysis
.f8
))
205 plt
.hist(x
=confanalysis
.f9
,bins
='auto',color
='#0000ff',alpha
=0.7, rwidth
=0.85)
206 plt
.xlabel('Confidence')
207 plt
.ylabel('Frequency')
208 plt
.title('Fire label 9: %d' % len(confanalysis
.f9
))
211 print 'Saving results'
213 U
=np
.transpose(np
.reshape(U
-time_scale_num
[0],fxlon
.shape
))
214 L
=np
.transpose(np
.reshape(L
-time_scale_num
[0],fxlon
.shape
))
215 T
=np
.transpose(np
.reshape(T
-time_scale_num
[0],fxlon
.shape
))
217 print 'U L R are shifted so that zero there is time_scale_num[0] = %s' % time_scale_num
[0]
220 C
=np
.transpose(np
.reshape(C
,fxlon
.shape
))
221 result
= {'U':U
, 'L':L
, 'T':T
, 'fxlon': fxlon
, 'fxlat': fxlat
,
222 'time_num':time_num
, 'time_scale_num' : time_scale_num
,
223 'time_num_granules' : tt
, 'C':C
}
225 result
= {'U':U
, 'L':L
, 'T':T
, 'fxlon': fxlon
, 'fxlat': fxlat
,
226 'time_num':time_num
, 'time_scale_num' : time_scale_num
,
227 'time_num_granules' : tt
}
229 sio
.savemat('result.mat', mdict
=result
)
231 print 'To visualize, run in Matlab the script plot_results.m'
232 print 'Multigrid using in fire_interpolation the script jpss_mg.m'
234 result
['fxlon'] = np
.transpose(result
['fxlon'])
235 result
['fxlat'] = np
.transpose(result
['fxlat'])
239 if __name__
== "__main__":
243 if os
.path
.isfile(sat_file
) and os
.access(sat_file
,os
.R_OK
):
244 print 'Loading the data...'
245 data
,fxlon
,fxlat
,time_num
=sl
.load('data')
247 print 'Error: file %s not exist or not readable' % sat_file
250 process_detections(data
,fxlon
,fxlat
,time_num
)