solving not having anything to plot in a specific granule
[JPSSData.git] / setup.py
blob11e44b90ce89d7dda33c485d6ff672b153f23054
1 import warnings
2 warnings.filterwarnings("ignore")
3 import scipy.io as sio
4 import numpy as np
5 from scipy import spatial
6 import matplotlib.pyplot as plt
7 import saveload as sl
8 from utils import Dict
9 from interpolation import sort_dates, nearest_scipy, neighbor_indices_ball, neighbor_indices_pixel, neighbor_indices_ellipse
10 import os, sys, time, itertools
12 def process_detections(data,fxlon,fxlat,time_num):
13 """
14 Process detections to obtain upper and lower bounds
16 :param data: data obtained from JPSSD
17 :param fxlon: longitude coordinates of the fire mesh (from wrfout)
18 :param fxlat: latitude coordinates of the fire mesh (from wrfout)
19 :param time_num: numerical value of the starting and ending time
20 :return result: upper and lower bounds with some parameters
22 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
23 Angel Farguell (angel.farguell@gmail.com), 2019-04-01
24 """
26 # process satellite settings
27 maxsize=400 # Max size of the fire mesh
28 ut=1 # Upper bound technique, ut=1: Center of the pixel -- ut=2: Ellipse inscribed in the pixel
29 lt=1 # Lower bound technique, lt=1: Center of the pixel -- lt=2: Ellipse inscribed in the pixel (very slow)
30 mt=2 # Mask technique, mt=1: Ball -- mt=2: Pixel -- mt=3: Ellipse
31 dist=8 # If mt=1 (ball neighbours), radius of the balls is R=sqrt(2*dist^2)
32 mm=5 # If mt=3 (ellipse neighbours), larger ellipses constant: (x/a)^2+(x/b)^2<=mm
33 confl=70. # Minimum confidence level for the pixels
34 confa=False # Histogram plot of the confidence level distribution
35 confm=True # Store confidence of each fire detection
36 burn=False # Using or not the burned scar product
38 print 'mesh shape %s %s' % fxlon.shape
39 coarsening=np.int(1+np.max(fxlon.shape)/maxsize)
40 print 'maximum size is %s, coarsening %s' % (maxsize, coarsening)
41 fxlon = fxlon[0::coarsening,0::coarsening]
42 fxlat = fxlat[0::coarsening,0::coarsening]
43 print 'coarsened %s %s' % fxlon.shape
45 bounds=[fxlon.min(),fxlon.max(),fxlat.min(),fxlat.max()]
46 vfxlon=np.reshape(fxlon,np.prod(fxlon.shape))
47 vfxlat=np.reshape(fxlat,np.prod(fxlat.shape))
48 vfgrid=np.column_stack((vfxlon,vfxlat))
49 print 'Setting up interpolation'
50 stree=spatial.cKDTree(vfgrid)
51 vfind=np.array(list(itertools.product(np.array(range(0,fxlon.shape[0])),np.array(range(0,fxlon.shape[1])))))
52 itree=spatial.cKDTree(vfind)
54 # Sort dictionary by time_num into an array of tuples (key, dictionary of values)
55 print 'Sort the granules by dates'
56 sdata=sort_dates(data)
57 tt=[ dd[1]['time_num'] for dd in sdata ] # array of times
58 print 'Sorted?'
59 stt=sorted(tt)
60 print tt==stt
62 # Max and min time_num
63 maxt=time_num[1]
64 mint=time_num[0]
65 # time_scale_num = time_num
66 time_scale_num=[mint-0.5*(maxt-mint),maxt+2*(maxt-mint)]
68 # Creating the resulting arrays
69 DD=np.prod(fxlon.shape)
70 U=np.empty(DD)
71 U[:]=time_scale_num[1]
72 L=np.empty(DD)
73 L[:]=time_scale_num[0]
74 T=np.empty(DD)
75 T[:]=time_scale_num[1]
76 if confm:
77 C=np.zeros(DD)
79 # Confidence analysis
80 confanalysis=Dict({'f7': np.array([]),'f8': np.array([]), 'f9': np.array([])})
82 # For granules in order increasing in time
83 GG=len(sdata)
84 for gran in range(GG):
85 t_init = time.time()
86 print 'Loading data of granule %d/%d' % (gran+1,GG)
87 print 'Granule name: %s' % sdata[gran][0]
88 # Load granule lon, lat, fire arrays and time number
89 slon=sdata[gran][1]['lon']
90 slat=sdata[gran][1]['lat']
91 ti=sdata[gran][1]['time_num']
92 fire=sdata[gran][1]['fire']
93 print 'Interpolation to fire grid'
94 sys.stdout.flush()
95 # Interpolate all the granule coordinates in bounds in the wrfout fire mesh
96 # gg: mask in the granule of g-points = pixel coordinates inside the fire mesh
97 # ff: the closed points in fire mesh indexed by g-points
98 (ff,gg)=nearest_scipy(slon,slat,stree,bounds) ## indices to flattened granule array
99 vfire=np.reshape(fire,np.prod(fire.shape)) ## flaten the fire detection array
100 gfire=vfire[gg] # the part withing the fire mesh bounds
101 fi=gfire >= 7 # where fire detected - low, nominal or high confidence (all the fire data in the granule)
102 ffi=ff[fi] # indices in the fire mesh where the fire detections are
103 nofi=np.logical_or(gfire == 3, gfire == 5) # where no fire detected
104 unkn=np.logical_not(np.logical_or(fi,nofi)) # where unknown
105 print 'fire detected %s' % fi.sum()
106 print 'no fire detected %s' % nofi.sum()
107 print 'unknown %s' % unkn.sum()
108 if fi.any(): # at fire points
109 rfire=gfire[gfire>=7]
110 conf=sdata[gran][1]['conf_fire'] # confidence of the fire detections
111 confanalysis.f7=np.concatenate((confanalysis.f7,conf[rfire==7]))
112 confanalysis.f8=np.concatenate((confanalysis.f8,conf[rfire==8]))
113 confanalysis.f9=np.concatenate((confanalysis.f9,conf[rfire==9]))
114 flc=conf>confl # fire large confidence indexes
115 if ut>1 or mt>1:
116 # taking lon, lat, scan and track of the fire detections which fire large confidence indexes
117 lon=sdata[gran][1]['lon_fire'][flc]
118 lat=sdata[gran][1]['lat_fire'][flc]
119 scan=sdata[gran][1]['scan_fire'][flc]
120 track=sdata[gran][1]['track_fire'][flc]
122 # Set upper bounds
123 if ut==1:
124 # indices with high confidence
125 iu=ffi[flc]
126 if confm:
127 C[iu]=conf[flc]
128 elif ut==2:
129 # creating the indices for all the pixel neighbours of the upper bound
130 iu=neighbor_indices_ellipse(vfxlon,vfxlat,lon,lat,scan,track)
131 else:
132 print 'ERROR: invalid ut option.'
133 sys.exit()
134 U[iu][U[iu] > ti]=ti # set U to granule time where fire detected and not detected before
136 # Set mask
137 if mt==1:
138 # creating the indices for all the pixel neighbours of the upper bound indices
139 kk=neighbor_indices_ball(itree,ffi[flc],fxlon.shape,dist)
140 im=sorted(np.unique([x[0]+x[1]*fxlon.shape[0] for x in vfind[kk]]))
141 elif mt==2:
142 # creating the indices for all the pixel neighbours of the upper bound indices
143 im=neighbor_indices_pixel(vfxlon,vfxlat,lon,lat,scan,track)
144 elif mt==3:
145 # creating the indices for all the pixel neighbours of the upper bound indices
146 im=neighbor_indices_ellipse(vfxlon,vfxlat,lon,lat,scan,track,mm)
147 else:
148 print 'ERROR: invalid mt option.'
149 sys.exit()
150 T[im]=ti # update mask T
152 # Set mask from burned scar data
153 if burn:
154 if 'burned' in sdata[gran][1].keys():
155 # if burned scar exists, set the mask in the burned scar pixels
156 burned=sdata[gran][1]['burned']
157 bm=ff[np.reshape(burned,np.prod(burned.shape))[gg]]
158 T[bm]=ti
160 if nofi.any(): # set L at no-fire points and not masked
161 if lt==1:
162 # indices of clear ground
163 jj=np.logical_and(nofi,ti<T[ff])
164 il=ff[jj]
165 elif lt==2:
166 # taking lon, lat, scan and track of the ground detections
167 lon=sdata[gran][1]['lon_nofire']
168 lat=sdata[gran][1]['lat_nofire']
169 scan=sdata[gran][1]['scan_nofire']
170 track=sdata[gran][1]['track_nofire']
171 # creating the indices for all the pixel neighbours of lower bound indices
172 nofi=neighbor_indices_pixel(vfxlon,vfxlat,lon,lat,scan,track)
173 il=np.logical_and(nofi,ti<T)
174 else:
175 print 'ERROR: invalid lt option.'
176 sys.exit()
177 L[il]=ti # set L to granule time where fire detected
178 print 'L set at %s points' % jj.sum()
179 t_final = time.time()
180 print 'elapsed time: %ss.' % str(t_final-t_init)
182 print "L<U: %s" % (L<U).sum()
183 print "L=U: %s" % (L==U).sum()
184 print "L>U: %s" % (L>U).sum()
185 print "average U-L %s" % ((U-L).sum()/np.prod(U.shape))
186 print np.histogram((U-L)/(24*3600))
188 print 'Confidence analysis'
189 if confa:
190 plt.subplot(1,3,1)
191 plt.hist(x=confanalysis.f7,bins='auto',color='#ff0000',alpha=0.7, rwidth=0.85)
192 plt.xlabel('Confidence')
193 plt.ylabel('Frequency')
194 plt.title('Fire label 7: %d' % len(confanalysis.f7))
195 plt.subplot(1,3,2)
196 plt.hist(x=confanalysis.f8,bins='auto',color='#00ff00',alpha=0.7, rwidth=0.85)
197 plt.xlabel('Confidence')
198 plt.ylabel('Frequency')
199 plt.title('Fire label 8: %d' % len(confanalysis.f8))
200 plt.subplot(1,3,3)
201 plt.hist(x=confanalysis.f9,bins='auto',color='#0000ff',alpha=0.7, rwidth=0.85)
202 plt.xlabel('Confidence')
203 plt.ylabel('Frequency')
204 plt.title('Fire label 9: %d' % len(confanalysis.f9))
205 plt.show()
207 print 'Saving results'
208 # Result
209 U=np.transpose(np.reshape(U-time_scale_num[0],fxlon.shape))
210 L=np.transpose(np.reshape(L-time_scale_num[0],fxlon.shape))
211 T=np.transpose(np.reshape(T-time_scale_num[0],fxlon.shape))
213 print 'U L R are shifted so that zero there is time_scale_num[0] = %s' % time_scale_num[0]
215 if confm:
216 C=np.transpose(np.reshape(C,fxlon.shape))
217 result = {'U':U, 'L':L, 'T':T, 'fxlon': fxlon, 'fxlat': fxlat,
218 'time_num':time_num, 'time_scale_num' : time_scale_num,
219 'time_num_granules' : tt, 'C':C}
220 else:
221 result = {'U':U, 'L':L, 'T':T, 'fxlon': fxlon, 'fxlat': fxlat,
222 'time_num':time_num, 'time_scale_num' : time_scale_num,
223 'time_num_granules' : tt}
225 sio.savemat('result.mat', mdict=result)
227 print 'To visualize, run in Matlab the script plot_results.m'
228 print 'Multigrid using in fire_interpolation the script jpss_mg.m'
230 result['fxlon'] = np.transpose(result['fxlon'])
231 result['fxlat'] = np.transpose(result['fxlat'])
233 return result
235 if __name__ == "__main__":
237 sat_file = 'data'
239 if os.path.isfile(sat_file) and os.access(sat_file,os.R_OK):
240 print 'Loading the data...'
241 data,fxlon,fxlat,time_num=sl.load('data')
242 else:
243 print 'Error: file %s not exist or not readable' % sat_file
244 sys.exit(1)
246 process_detections(data,fxlon,fxlat,time_num)