Replaced the nextDouble function with a Fibonacci random number generator from Boost...
[PaGMO.git] / GOclasses / algorithms / SGA.cpp
blob358a881d241d86cd7280027f83b9dc40417d4961
1 /*
2 * SGA.cpp
3 * PaGMO
5 * Created by Dario Izzo on 10/5/08.
6 * Copyright 2008 __MyCompanyName__. All rights reserved.
8 */
10 #include "SGA.h"
11 #include "vector"
13 using namespace std;
15 void SGAalgorithm::initSGA(int generationsInit, int SolDimInit, double CRInit, double MInit, int insert_bestInit, uint32_t randomSeed){
16 generations = generationsInit;
17 SolDim = SolDimInit;
18 CR = CRInit;
19 M = MInit;
20 insert_best = insert_bestInit;
21 rng.seed(randomSeed);
22 drng.seed(randomSeed);
25 Population SGAalgorithm::evolve(Population deme, GOProblem& problem){
27 const std::vector<double>& LB = problem.getLB();
28 const std::vector<double>& UB = problem.getUB();
30 int NP = deme.size();
31 int D = LB.size();
33 vector<double> dummy(D,0); //used for initialisation purposes
34 vector< vector<double> > X(NP,dummy), Xnew(NP,dummy);
36 vector<double> fit(NP); //fitness
38 double bestfit=0;
39 vector<double> bestX(D,0);
41 vector<double> selectionfitness(NP), cumsum(NP), cumsumTemp(NP);
42 vector <int> selection(NP);
45 // Initialise the chromosomes and their fitness to that of the initial deme
46 for ( int i = 0; i<NP; i++ ){
47 X[i] = deme[i].getDecisionVector();
48 fit[i] = deme[i].getFitness();
51 // Find the best member and store in bestX and bestfit
52 bestfit = fit[0];
53 bestX = X[0];
54 for (int i = 1; i<NP; i++){ //the int i = 1 jumps the first member as it is already set as the best
55 if (fit[i] < bestfit){
56 bestfit = fit[i];
57 bestX = X[i];
62 // Main SGA loop
63 for (int j = 0; j<generations; j++){
66 //1 - Selection
67 selectionfitness.clear();
68 cumsum.clear();
69 cumsumTemp.clear();
70 double worstfit=fit[0];
71 for (int i = 1; i < NP;i++){
72 if (fit[i] > worstfit) worstfit=fit[i];
75 double factor = log((double)(j+1))/10;
76 for (int i = 0; i < NP; i++){
78 selectionfitness.push_back(pow((worstfit - fit[i]),1.0 + factor)); //works for minimisation
80 } // selectionfitness contains a modification of the objective function (something like a fitness) used for the solution ranking
82 cumsumTemp.push_back(selectionfitness[0]);
83 for (int i = 1; i< NP; i++){
84 cumsumTemp.push_back(cumsumTemp[i - 1] + selectionfitness[i]);
85 } // cumsumTemp contains the cumulative sum of the objective functions in pold
87 for (int i = 0; i < NP; i++){
88 cumsum.push_back(cumsumTemp[i]/cumsumTemp[NP-1]);
89 } // cumsum is cumsumTemp normalised so that it increases from 0 to 1.
91 selection.resize(NP,0);
92 double r2;
93 for (int i = 0; i < NP; i++){
94 r2 = drng();
95 for (int j = 0; j < NP; j++){
96 if (cumsum[j] > r2){
97 selection[i]=j;
98 break;
101 } //selection contains the roulette selection indexes
103 for (int i = 0; i < NP; i++)
105 Xnew[i]=X[selection[i]];
106 } //Xnew is the selected population
110 //2 - Crossover
112 int r1,n,L;
113 vector<double> member1,member2;
115 for (int i=0; i< NP; i++){
116 //for each member selected from pold (i.e. in pnew)
117 member1 = Xnew[i];
118 //we select a mating patner (different from the self (i.e. no masturbation))
119 do { //FIXME: [MaRu] YOU DON'T DO IT THAT WAY, MAN!!!
120 r1 = rng() % NP;
121 } while ( r1 == i );
122 member2 = Xnew[r1];
123 //and we operate crossover
124 n = rng() % D;
125 L = 0;
126 do {
127 member1[n] = member2[n];
128 n = (n+1) % D;
129 L++;
130 } while ( (drng() < CR) && (L < D) );
131 Xnew[i] = member1;
135 //3 - Mutation
137 for (int i = 0; i < NP;i++){
138 //generate a random mutation vector
139 for (int j = 0; j < D;j++)
140 if (drng() < M)
142 Xnew[i][j] = (UB[j]-LB[j])*drng() + LB[j];
147 //4 - Evaluate Xnew
149 for (int i = 0; i < NP;i++){
150 fit[i] = problem.objfun(Xnew[i]);
151 if (fit[i] < bestfit){
152 bestfit = fit[i];
153 bestX = Xnew[i];
158 //5 - Reinsert best individual every insert_best generations (TODO: sort the population?)
160 if (j % insert_best == 0)
162 int worst=0;
163 for (int i = 1; i < NP;i++){
164 if (fit[i] > fit[worst]) worst=i;
166 Xnew[worst] = bestX;
167 fit[worst] = bestfit;
169 X = Xnew;
170 } // end of main SGA loop
172 //we end by constructing the object Population containing the final results
173 Population popout;
174 Individual dummy2;
175 for (int i=0; i<NP; i++){
176 dummy2.setDecisionVector(X[i]);
177 dummy2.setFitness(fit[i]);
178 //dummy2.setVelocity(V[i]);
179 popout.addIndividual(dummy2);
181 return popout;