2 # Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
3 # Angel Farguell (angel.farguell@gmail.com)
6 # conda install scikit-learn
7 # conda install scikit-image
9 from sklearn
import svm
10 from scipy
import interpolate
, spatial
11 import matplotlib
.pyplot
as plt
12 import matplotlib
.font_manager
13 import matplotlib
.colors
as colors
14 from mpl_toolkits
.mplot3d
import axes3d
15 from mpl_toolkits
.mplot3d
.art3d
import Poly3DCollection
18 from infrared_perimeters
import process_infrared_perimeters
21 def preprocess_data_svm(lons
, lats
, U
, L
, T
, scale
, time_num_granules
, C
=None):
23 Preprocess satellite data from JPSSD and setup to use in Support Vector Machine
25 :param lons: longitud grid
26 :param lats: latitde grid
27 :param U: upper bound grid
28 :param L: lower bound grid
30 :param scale: time scales
31 :param time_num_granules: times of the granules
32 :return X: matrix of features for SVM
33 :return y: vector of labels for SVM
35 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
36 Angel Farguell (angel.farguell@gmail.com), 2019-04-01
39 # Confidence of nofire detections
43 lon
= np
.reshape(lons
,np
.prod(lons
.shape
)).astype(float)
44 lat
= np
.reshape(lats
,np
.prod(lats
.shape
)).astype(float)
46 # Temporal scale to days
51 # Ensuring U>=L always
52 print 'U>L: ',(U
>L
).sum()
53 print 'U<L: ',(U
<L
).sum()
54 print 'U==L: ',(U
==L
).sum()
57 uu
= np
.reshape(U
,np
.prod(U
.shape
))
58 ll
= np
.reshape(L
,np
.prod(L
.shape
))
59 tt
= np
.reshape(T
,np
.prod(T
.shape
))
61 # Maximum and minimums to NaN data
62 uu
[uu
==uu
.max()] = np
.nan
63 ll
[ll
==ll
.min()] = np
.nan
65 # Mask created during computation of lower and upper bounds
66 mk
= tt
==scale
[1]-scale
[0]
67 # Masking upper bounds outside the mask
69 # Creating maximum value considered of the upper bounds
70 nuu
= uu
[~np
.isnan(uu
)]
71 muu
= nuu
.max() # could be a different value like a mean value
72 # Create a mask with lower bound less than the previous maximum upper bound value
73 with np
.errstate(invalid
='ignore'):
76 # Create a mask with all False of low size
77 mask
= np
.repeat(False,len(low
[low
== True]))
78 # Take just a subset of the nodes
80 mask
[0::clear_level
] = True
82 low
[low
== True] = mask
83 # Eliminate all the previous elements from the mask
85 # Masking lower bounds outside the mask
88 # Values different than NaN in the upper and lower bounds
89 um
= np
.array(~np
.isnan(uu
))
90 lm
= np
.array(~np
.isnan(ll
))
91 # Define all the x, y, and z components of upper and lower bounds
99 # Create the data to call SVM3 function from svm3test.py
100 X
= np
.c_
[np
.concatenate((lx
,ux
)),np
.concatenate((ly
,uy
)),np
.concatenate((lz
,uz
))]
101 y
= np
.concatenate((-np
.ones(len(lx
)),np
.ones(len(ux
))))
102 # Print the shape of the data
103 print 'shape X: ', X
.shape
104 print 'shape y: ', y
.shape
107 c
= 80*np
.ones(y
.shape
)
109 c
= np
.concatenate((nofire_conf
*np
.ones(len(lx
)),np
.reshape(C
,np
.prod(C
.shape
))[um
]))
111 # Clean data if not in bounding box
112 bbox
= (lon
.min(),lon
.max(),lat
.min(),lat
.max(),time_num_granules
)
113 geo_mask
= np
.logical_and(np
.logical_and(np
.logical_and(X
[:,0] > bbox
[0],X
[:,0] < bbox
[1]), X
[:,1] > bbox
[2]), X
[:,1] < bbox
[3])
114 btime
= (0,(scale
[1]-scale
[0])/tscale
)
115 time_mask
= np
.logical_and(X
[:,2] > btime
[0], X
[:,2] < btime
[1])
116 whole_mask
= np
.logical_and(geo_mask
,time_mask
)
123 def make_fire_mesh(fxlon
, fxlat
, it
, nt
):
125 Create a mesh of points to evaluate the decision function
127 :param fxlon: data to base x-axis meshgrid on
128 :param fxlat: data to base y-axis meshgrid on
129 :param it: data to base z-axis meshgrid on
130 :param nt: tuple of number of nodes at each direction, optional
131 :param coarse: coarsening of the fire mesh
132 :return xx, yy, zz: ndarray
134 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
135 Angel Farguell (angel.farguell@gmail.com), 2019-04-01
138 xx
= np
.repeat(fxlon
[:, :, np
.newaxis
], nt
, axis
=2)
139 yy
= np
.repeat(fxlat
[:, :, np
.newaxis
], nt
, axis
=2)
140 tt
= np
.linspace(it
[0],it
[1],nt
)
141 zz
= np
.swapaxes(np
.swapaxes(np
.array([np
.ones(fxlon
.shape
)*t
for t
in tt
]),0,1),1,2)
145 def make_meshgrid(x
, y
, z
, s
=(50,50,50), exp
=.1):
147 Create a mesh of points to evaluate the decision function
149 :param x: data to base x-axis meshgrid on
150 :param y: data to base y-axis meshgrid on
151 :param z: data to base z-axis meshgrid on
152 :param s: tuple of number of nodes at each direction, optional
153 :param exp: extra percentage of time steps in each direction (between 0 and 1), optional
154 :return xx, yy, zz: ndarray
156 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
157 Angel Farguell (angel.farguell@gmail.com), 2019-02-20
159 https://scikit-learn.org/stable/auto_examples/svm/plot_iris.html#sphx-glr-auto-examples-svm-plot-iris-py
162 if not isinstance(s
, tuple):
164 print 'The number of nodes at each direction is not a tuple: ', s
166 # number of nodes in each direction
167 sx
, sy
, sz
= np
.array(s
).astype(int)
168 # extra step sizes in each direction
172 # grid lengths in each directon
173 lx
= x
.max() - x
.min()
174 ly
= y
.max() - y
.min()
175 lz
= z
.max() - z
.min()
176 # grid resolutions in each direction
177 hx
= lx
/ (sx
- 2*brx
- 1)
178 hy
= ly
/ (sy
- 2*bry
- 1)
179 hz
= lz
/ (sz
- 2*brz
- 1)
180 # extrem values for each dimension
181 x_min
, x_max
= x
.min() - brx
* hx
, x
.max() + brx
* hx
182 y_min
, y_max
= y
.min() - bry
* hy
, y
.max() + bry
* hy
183 z_min
, z_max
= z
.min() - brz
* hz
, z
.max() + brz
* hz
184 # generating the mesh grid
185 xx
, yy
, zz
= np
.meshgrid(np
.linspace(y_min
, y_max
, sy
),
186 np
.linspace(x_min
, x_max
, sx
),
187 np
.linspace(z_min
, z_max
, sz
))
190 def frontier(clf
, xx
, yy
, zz
, bal
=.5, plot_poly
=False):
192 Compute the surface decision frontier for a classifier.
194 :param clf: a classifier
195 :param xx: meshgrid ndarray
196 :param yy: meshgrid ndarray
197 :param zz: meshgrid ndarray
198 :param bal: number between 0 and 1, balance between lower and upper bounds in decision function (in case not level 0)
199 :param plot_poly: boolean of plotting polynomial approximation (if tech=='poly')
200 :return F: 2D meshes with xx, yy coordinates and the hyperplane z which gives decision functon 0
202 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
203 Angel Farguell (angel.farguell@gmail.com), 2019-02-20
205 https://www.semipol.de/2015/10/29/SVM-separating-hyperplane-3d-matplotlib.html
208 # Creating the 3D grid
209 XX
= np
.c_
[xx
.ravel(), yy
.ravel(), zz
.ravel()]
211 # Evaluating the decision function
212 print '>> Evaluating the decision function...'
215 ZZ
= clf
.decision_function(XX
)
217 print 'elapsed time: %ss.' % str(abs(t_2
-t_1
))
218 hist
= np
.histogram(ZZ
)
219 print 'counts: ', hist
[0]
220 print 'values: ', hist
[1]
221 print 'decision function range: ', ZZ
.min(), '~', ZZ
.max()
223 # Reshaping decision function volume
224 Z
= ZZ
.reshape(xx
.shape
)
225 print 'decision function shape: ', Z
.shape
229 # Computing fire arrival time from previous decision function
230 print '>> Computing fire arrival time...'
233 # xx 2-dimensional array
235 # yy 2-dimensional array
237 # zz 1-dimensional array
239 # Initializing fire arrival time
240 Fz
= np
.zeros(Fx
.shape
)
242 for k1
in range(Fx
.shape
[0]):
243 for k2
in range(Fx
.shape
[1]):
244 # Approximate the vertical decision function by a piecewise polynomial (cubic spline interpolation)
245 pz
= interpolate
.CubicSpline(zr
, Z
[k1
,k2
])
246 # Compute the real roots of the the piecewise polynomial
248 # Just take the real roots between min(zz) and max(zz)
249 realr
= rr
.real
[np
.logical_and(abs(rr
.imag
) < 1e-5, np
.logical_and(rr
.real
> zr
.min(), rr
.real
< zr
.max()))]
251 # Take the minimum root
252 Fz
[k1
,k2
] = realr
.min()
253 # Plotting the approximated polynomial with the decision function
257 plt
.plot(Z
[k1
,k2
],zr
,'+')
258 plt
.plot(np
.zeros(len(realr
)),realr
,'o',c
='g')
259 plt
.plot(0,Fz
[k1
,k2
],'o',markersize
=3,c
='r')
260 plt
.title('Polynomial approximation of decision_function(%f,%f,z)' % (Fx
[k1
,k2
],Fy
[k1
,k2
]))
261 plt
.xlabel('decision function value')
263 plt
.legend(['polynomial','decision values','roots','fire arrival time'])
264 plt
.xlim([Z
.min(),Z
.max()])
265 plt
.ylim([zz
.min(),zz
.max()])
270 # If there is not a real root of the polynomial between zz.min() and zz.max(), just define as a Nan
273 print 'elapsed time: %ss.' % str(abs(t_2
-t_1
))
278 def SVM3(X
, y
, C
=1., kgam
=1., norm
=True, fire_grid
=None, weights
=None):
280 3D SuperVector Machine analysis and plot
282 :param X: Training vectors, where n_samples is the number of samples and n_features is the number of features.
283 :param y: Target values
284 :param C: Weight to not having outliers (argument of svm.SVC class), optional
285 :param kgam: Scalar multiplier for gamma (capture more details increasing it)
286 :param norm: Normalize the data in the interval (0,1) in all the directions, optional
287 :param fire_grid: The longitud and latitude grid where to have the fire arrival time
288 :return F: tuple with (longitude grid, latitude grid, fire arrival time grid)
290 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
291 Angel Farguell (angel.farguell@gmail.com), 2019-02-20
293 https://scikit-learn.org/stable/auto_examples/svm/plot_iris.html#sphx-glr-auto-examples-svm-plot-iris-py
301 # plot scaled data with artificial data
303 # plot polynomial approximation (if postech=='poly')
305 # plot full hyperplane vs detections with support vectors
307 # plot resulting fire arrival time vs detections
311 # number of vertical nodes per observation
313 # interpolate into the original fire mesh
315 # if not Nans in the data are wanted (all Nans are going to be replaced by the maximum value)
318 # Options better to not change
319 # number of horizontal nodes per observation (if fire_grid==None)
321 # creation of over and under artificial upper and lower bounds in the pre-processing
323 # resolution of artificial upper bounds vertical to the fire detections
325 # resolution of artificial lower bounds vertical to the ground detections
327 # creation of an artifitial mesh of top upper bounds
329 # proportion over max of z direction for upper bound artifitial creation
331 # creation of an artifitial mesh of down lower bounds
333 # below min of z direction for lower bound artifitial creation
337 X
= np
.array(X
).astype(float)
341 oX
= np
.array(X
).astype(float)
344 # Visualization of the data
345 X0
, X1
, X2
= X
[:, 0], X
[:, 1], X
[:, 2]
349 ax
= fig
.gca(projection
='3d')
350 fig
.suptitle("Plotting the original data to fit")
351 ax
.scatter(X0
, X1
, X2
, c
=y
, cmap
=plt
.cm
.coolwarm
, s
=20, edgecolors
='k', vmin
=y
.min(), vmax
=y
.max())
352 ax
.set_xlabel("Longitude")
353 ax
.set_ylabel("Latitude")
354 ax
.set_zlabel("Time (days)")
355 plt
.savefig('original_data.png')
356 except Exception as e
:
357 print 'Warning: something went wrong when plotting...'
360 # Normalization of the data into [0,1]^3
363 xlen
= X0
.max() - X0
.min()
364 x0
= np
.divide(X0
- xmin
, xlen
)
366 ylen
= X1
.max() - X1
.min()
367 x1
= np
.divide(X1
- ymin
, ylen
)
369 zlen
= X2
.max() - X2
.min()
370 x2
= np
.divide(X2
- zmin
, zlen
)
371 X0
, X1
, X2
= x0
, x1
, x2
376 # Creation of fire and ground artificial detections
378 # Extreme values at z direction
381 # Division of lower and upper bounds
382 fl
= X
[y
==np
.unique(y
)[0]]
383 fu
= X
[y
==np
.unique(y
)[1]]
384 # Create artificial lower bounds
385 flz
= np
.array([ np
.unique(np
.append(np
.arange(f
[2],minz
,-hartil
),f
[2])) for f
in fl
])
386 # Create artificial upper bounds
387 fuz
= np
.array([ np
.unique(np
.append(np
.arange(f
[2],maxz
,hartiu
),f
[2])) for f
in fu
])
388 # Definition of new ground detections after artificial detections added
389 Xg
= np
.concatenate([ np
.c_
[(np
.repeat(fl
[k
][0],len(flz
[k
])),np
.repeat(fl
[k
][1],len(flz
[k
])),flz
[k
])] for k
in range(len(flz
)) ])
390 # Definition of new fire detections after artificial detections added
391 Xf
= np
.concatenate([ np
.c_
[(np
.repeat(fu
[k
][0],len(fuz
[k
])),np
.repeat(fu
[k
][1],len(fuz
[k
])),fuz
[k
])] for k
in range(len(fuz
)) ])
393 # Top artificial upper bounds
395 # Creation of the x,y new mesh of artificial upper bounds
396 xn
, yn
= np
.meshgrid(np
.linspace(X
[:, 0].min(), X
[:, 0].max(), 20),
397 np
.linspace(X
[:, 1].min(), X
[:, 1].max(), 20))
398 # All the artificial new mesh are going to be over the data
399 znf
= np
.repeat(maxz
+dmaxz
,len(xn
.ravel()))
400 # Artifitial upper bounds
401 Xfa
= np
.c_
[(xn
.ravel(),yn
.ravel(),znf
.ravel())]
402 # Definition of new fire detections after top artificial upper detections
403 Xfn
= np
.concatenate((Xf
,Xfa
))
407 # Bottom artificial lower bounds
409 # Creation of the x,y new mesh of artificial lower bounds
410 xn
, yn
= np
.meshgrid(np
.linspace(X
[:, 0].min(), X
[:, 0].max(), 20),
411 np
.linspace(X
[:, 1].min(), X
[:, 1].max(), 20))
412 # All the artificial new mesh are going to be below the data
413 zng
= np
.repeat(minz
-dminz
,len(xn
.ravel()))
414 # Artifitial lower bounds
415 Xga
= np
.c_
[(xn
.ravel(),yn
.ravel(),zng
.ravel())]
416 # Definition of new ground detections after down artificial lower detections
417 Xgn
= np
.concatenate((Xg
,Xga
))
421 # New definition of the training vectors
422 X
= np
.concatenate((Xgn
, Xfn
))
423 # New definition of the target values
424 y
= np
.concatenate((np
.repeat(np
.unique(y
)[0],len(Xgn
)),np
.repeat(np
.unique(y
)[1],len(Xfn
))))
425 # New definition of each feature vector
426 X0
, X1
, X2
= X
[:, 0], X
[:, 1], X
[:, 2]
428 # Printing number of samples and features
429 n0
= (y
==np
.unique(y
)[0]).sum().astype(float)
430 n1
= (y
==np
.unique(y
)[1]).sum().astype(float)
431 n_samples
, n_features
= X
.shape
432 print 'n_samples =', n_samples
433 print 'n_samples_{-1} =', int(n0
)
434 print 'n_samples_{+1} =', int(n1
)
435 print 'n_features =', n_features
437 # Visualization of scaled data
441 ax
= fig
.gca(projection
='3d')
442 fig
.suptitle("Plotting the data scaled to fit")
443 ax
.scatter(X0
, X1
, X2
, c
=y
, cmap
=plt
.cm
.coolwarm
, s
=20, edgecolors
='k', vmin
=y
.min(), vmax
=y
.max())
444 ax
.set_xlabel("Longitude normalized")
445 ax
.set_ylabel("Latitude normalized")
446 ax
.set_zlabel("Time normalized")
447 plt
.savefig('scaled_data.png')
448 except Exception as e
:
449 print 'Warning: something went wrong when plotting...'
452 # Reescaling gamma to include more detailed results
453 gamma
= kgam
/ (n_features
* X
.std())
454 print 'gamma =', gamma
456 # Creating the SVM model
457 print '>> Creating the SVM model...'
459 clf
= svm
.SVC(C
=C
, kernel
="rbf", gamma
=gamma
, cache_size
=1000, class_weight
="balanced") # default kernel: exp(-gamma||x-x'||^2)
462 # Fitting the data using Super Vector Machine technique
463 print '>> Fitting the SVM model...'
468 print 'elapsed time: %ss.' % str(abs(t_2
-t_1
))
470 # Check if the classification failed
473 print 'Failed fitting the data'
475 print 'number of support vectors: ', clf
.n_support_
476 print 'score of trained data: ', clf
.score(X
,y
)
478 # Creating the mesh grid to evaluate the classification
479 print '>> Creating mesh grid to evaluate the classification...'
480 nnodes
= np
.ceil(np
.power(n_samples
,1./n_features
))
481 if fire_grid
is None:
482 # Number of necessary nodes
485 print 'number of horizontal nodes (%d meshgrid nodes for each observation): %d' % (hN
,hnodes
)
486 print 'number of vertical nodes (%d meshgrid nodes for each observation): %d' % (vN
,vnodes
)
487 # Computing resolution of the mesh to evaluate
488 sdim
= (hnodes
,hnodes
,vnodes
)
489 print 'grid_size = %dx%dx%d = %d' % (sdim
[0],sdim
[1],sdim
[2],np
.prod(sdim
))
491 xx
, yy
, zz
= make_meshgrid(X0
, X1
, X2
, s
=sdim
)
494 fxlon
= np
.divide(fire_grid
[0] - xmin
, xlen
)
495 fxlat
= np
.divide(fire_grid
[1] - ymin
, ylen
)
496 it
= (X2
.min(),X2
.max())
498 sdim
= (fxlon
.shape
[0],fxlon
.shape
[1],vnodes
)
499 print 'fire_grid_size = %dx%dx%d = %d' % (sdim
+ (np
.prod(sdim
),))
501 xx
, yy
, zz
= make_fire_mesh(fxlon
, fxlat
, it
, sdim
[2])
503 print 'grid_created = %dx%dx%d = %d' % (zz
.shape
+ (np
.prod(zz
.shape
),))
504 print 'elapsed time: %ss.' % str(abs(t_2
-t_1
))
506 # Computing the 2D fire arrival time, F
507 print '>> Computing the 2D fire arrival time, F...'
509 F
= frontier(clf
, xx
, yy
, zz
, plot_poly
=plot_poly
)
511 print '>> Creating final results...'
513 # Plotting the Separating Hyperplane of the SVM classification with the support vectors
517 ax
= fig
.gca(projection
='3d')
518 fig
.suptitle("Plotting the 3D Separating Hyperplane of an SVM")
519 # plotting the separating hyperplane
520 ax
.plot_wireframe(F
[0], F
[1], F
[2], color
='orange')
521 # computing the indeces where no support vectors
522 rr
= np
.array(range(len(y
)))
523 ms
= np
.isin(rr
,clf
.support_
)
525 # plotting no-support vectors (smaller)
526 ax
.scatter(X0
[nsupp
], X1
[nsupp
], X2
[nsupp
], c
=y
[nsupp
], cmap
=plt
.cm
.coolwarm
, s
=.5, vmin
=y
.min(), vmax
=y
.max())
527 # plotting support vectors (bigger)
528 ax
.scatter(clf
.support_vectors_
[:, 0], clf
.support_vectors_
[:, 1], clf
.support_vectors_
[:, 2], c
=y
[clf
.support_
], cmap
=plt
.cm
.coolwarm
, s
=20, edgecolors
='k');
529 ax
.set_xlim(xx
.min(),xx
.max())
530 ax
.set_ylim(yy
.min(),yy
.max())
531 ax
.set_zlim(zz
.min(),zz
.max())
532 ax
.set_xlabel("Longitude normalized")
533 ax
.set_ylabel("Latitude normalized")
534 ax
.set_zlabel("Time normalized")
535 plt
.savefig('support.png')
536 except Exception as e
:
537 print 'Warning: something went wrong when plotting...'
540 # Plot the fire arrival time resulting from the SVM classification normalized
543 Fx
, Fy
, Fz
= np
.array(F
[0]), np
.array(F
[1]), np
.array(F
[2])
544 with np
.errstate(invalid
='ignore'):
545 Fz
[Fz
> X2
.max()] = np
.nan
547 Fz
[np
.isnan(Fz
)] = X2
.max()
548 Fz
= np
.minimum(Fz
, X2
.max())
550 ax
= fig
.gca(projection
='3d')
551 fig
.suptitle("Fire arrival time normalized")
552 # plotting fire arrival time
553 p
= ax
.plot_surface(Fx
, Fy
, Fz
, cmap
=plt
.cm
.coolwarm
,
554 linewidth
=0, antialiased
=False)
555 ax
.set_xlim(xx
.min(),xx
.max())
556 ax
.set_ylim(yy
.min(),yy
.max())
557 ax
.set_zlim(zz
.min(),zz
.max())
558 cbar
= fig
.colorbar(p
)
559 cbar
.set_label('Fire arrival time normalized', labelpad
=20, rotation
=270)
560 ax
.set_xlabel("Longitude normalized")
561 ax
.set_ylabel("Latitude normalized")
562 ax
.set_zlabel("Time normalized")
563 plt
.savefig('tign_g.png')
564 except Exception as e
:
565 print 'Warning: something went wrong when plotting...'
568 # Translate the result again into initial data scale
570 f0
= F
[0] * xlen
+ xmin
571 f1
= F
[1] * ylen
+ ymin
572 f2
= F
[2] * zlen
+ zmin
575 # Set all the larger values at the end to be the same maximum value
576 oX0
, oX1
, oX2
= oX
[:, 0], oX
[:, 1], oX
[:, 2]
577 FFx
, FFy
, FFz
= FF
[0], FF
[1], FF
[2]
579 with np
.errstate(invalid
='ignore'):
580 FFz
[FFz
> oX2
.max()] = np
.nan
583 FFz
[np
.isnan(FFz
)] = oX2
.max()
584 FFz
= np
.minimum(FFz
, oX2
.max())
586 if (not fire_grid
is None) and (interp
):
587 print '>> Interpolating the results in the fire mesh'
590 points
= np
.c_
[Fx
.ravel(),Fy
.ravel()]
592 Ffire
= interpolate
.griddata(points
,values
,(Flon
,Flat
))
593 FF
= [Flon
,Flat
,Ffire
]
597 # Plot the fire arrival time resulting from the SVM classification
600 # Plotting the result
602 ax
= fig
.gca(projection
='3d')
603 fig
.suptitle("Plotting the 3D graph function of a SVM")
604 FFx
, FFy
, FFz
= np
.array(FF
[0]), np
.array(FF
[1]), np
.array(FF
[2])
605 # plotting original data
606 ax
.scatter(oX0
, oX1
, oX2
, c
=oy
, cmap
=plt
.cm
.coolwarm
, s
=2, vmin
=y
.min(), vmax
=y
.max())
607 # plotting fire arrival time
608 ax
.plot_wireframe(FFx
, FFy
, FFz
, color
='orange')
609 ax
.set_xlabel("Longitude")
610 ax
.set_ylabel("Latitude")
611 ax
.set_zlabel("Time (days)")
612 plt
.savefig('result.png')
613 except Exception as e
:
614 print 'Warning: something went wrong when plotting...'
617 print '>> SUCCESS <<'
619 print 'TOTAL elapsed time: %ss.' % str(abs(t_final
-t_init
))
624 if __name__
== "__main__":
628 # Defining ground and fire detections
630 Xg
= [[0, 0, 0], [2, 2, 0], [2, 0, 0], [0, 2, 0]]
631 Xf
= [[0, 0, 1], [1, 1, 0], [2, 2, 1], [2, 0, 1], [0, 2, 1]]
634 Xg
= [[0, 0, 0], [2, 2, 0], [2, 0, 0], [0, 2, 0], [4, 2, 0], [4, 0, 0], [2, 1, 0.5]]
635 Xf
= [[0, 0, 1], [1, 1, 0.25], [2, 2, 1], [2, 0, 1], [0, 2, 1], [3, 1, 0.25], [4, 2, 1], [4, 0, 1]]
638 # Creating the options
639 options
= {1 : exp1
, 2 : exp2
}
641 # Defining the option depending on the experiment
642 Xg
, Xf
= options
[exp
]()
644 # Creating the data necessary to run SVM3 function
645 X
= np
.concatenate((Xg
, Xf
))
646 y
= np
.concatenate((-np
.ones(len(Xg
)), np
.ones(len(Xf
))))
648 # Running SVM classification