COPASI API  4.16.103
COptMethodHGASA.cpp
Go to the documentation of this file.
1 // Begin CVS Header
2 // $Source: /Volumes/Home/Users/shoops/cvs/copasi_dev/copasi/optimization/COptMethodHGASA.cpp,v $
3 // $Revision: 1.9 $
4 // $Name: $
5 // $Author: shoops $
6 // $Date: 2012/04/23 21:11:20 $
7 // End CVS Header
8 
9 // Copyright (C) 2012 by Pedro Mendes, Virginia Tech Intellectual
10 // Properties, Inc., University of Heidelberg, and The University
11 // of Manchester.
12 // All rights reserved.
13 
14 // Copyright (C) 2008 by Pedro Mendes, Virginia Tech Intellectual
15 // Properties, Inc., EML Research, gGmbH, University of Heidelberg,
16 // and The University of Manchester.
17 // All rights reserved.
18 
19 // Copyright (C) 2001 - 2007 by Pedro Mendes, Virginia Tech Intellectual
20 // Properties, Inc. and EML Research, gGmbH.
21 // All rights reserved.
22 
23 /***************************************************************************
24  COptMethodHGASA.cpp - Hybrid GA/SA Optimizer
25  -------------------------------------------------
26 
27  Implemented by Dingjun Chen
28 
29  Starting Date: Mar. 2004
30 
31  COPASI project group
32  ***************************************************************************/
33 
34 /*************************************************************************************/
35 #define BESTFOUNDSOFAR 2
36 #define NumDirection 10
37 #define TRUE 1
38 #define FALSE 0
39 #define PI 3.1415926
40 
41 #include <vector>
42 
43 #include "copasi.h"
44 #include "COptMethod.h"
45 #include "CRealProblem.h"
47 
49  COptMethod(src)
50 {}
51 
53 {
54  addParameter("HybridGASA.Iterations",
56  (unsigned C_INT32) 10000);
57  addParameter("HybridGASA.PopulationSize",
59  (unsigned C_INT32) 600);
60  addParameter("HybridGASA.RandomGenerator.Type",
62  (C_INT32) CRandom::mt19937);
63  addParameter("HybridGASA.RandomGenerator.Seed",
65  (C_INT32) 0);
66 }
67 
68 /**
69  * Destructor
70  */
72 {}
73 
74 /**
75  * GA Optimizer Function:
76  * Returns: nothing
77  */
79 {
80  NumGeneration = (C_INT32) getValue("HybridGASA.Iterations");
81  PopulationSize = (C_INT32) getValue("HybridGASA.PopulationSize");
82  NumParameter = mOptProblem->getVariableSize();
83 
84  /* Create a random number generator */
86  Type = (CRandom::Type)(C_INT32) getValue("HybridGASA.RandomGenerator.Type");
87  C_INT32 Seed;
88  Seed = (C_INT32) getValue("HybridGASA.RandomGenerator.Seed");
89  CRandom * pRand = CRandom::createGenerator(Type, Seed);
90 
91  assert(pRand);
92 
93  const double ** Minimum = mOptProblem->getParameterMin().array();
94  const double ** Maximum = mOptProblem->getParameterMax().array();
95 
96  CVector< C_FLOAT64 > & Parameter = mOptProblem->getCalculateVariables();
97 
98 #ifdef XXXX
99  double **Parameter;
100  Parameter = new double * [2 * PopulationSize];
101 
102  for (int ii = 0; ii < 2*PopulationSize; ii++)
103  {
104  Parameter[ii] = new double[NumParameter];
105  }
106 
107  for (int dd = 0; dd < 2*PopulationSize; dd++)
108  Parameter[dd] = mParameters->array();
109 
110 #endif // XXXX
111 
112  double current_best_value, la;
113  int i, j, last_update, u10, u30, u50;
114  bool linear;
115 
116  // create array for function value of candidate
117  CandidateValue = new double[2 * PopulationSize];
118 
119  // create array for tournament competition strategy
120  WinScore = new int[2 * PopulationSize];
121 
122  // create array for crossover points
123  CrossPoint = new int[NumParameter];
124 
125  // create the population array
126  individual = new double * [2 * PopulationSize];
127 
128  // create the individuals
129  for (i = 0; i < 2*PopulationSize; i++)
130  {
131  individual[i] = new double[NumParameter];
132  }
133 
134  // Prepare inital population
135  //Generate the initial population at random
136  for (i = 0; i < PopulationSize; i++)
137  {
138  for (j = 0; j < NumParameter; j++)
139  {
140  try
141  {
142  // determine if linear or log scale
143  linear = FALSE; la = 1.0;
144 
145  if ((*Maximum[j] <= 0.0) || (*Minimum[j] < 0.0)) linear = TRUE;
146  else
147  {
148  la = log10(*Maximum[j]) - log10(std::min(*Minimum[j], std::numeric_limits< C_FLOAT64 >::epsilon()));
149 
150  if (la < 1.8) linear = TRUE;
151  }
152 
153  // set it to a random value within the interval
154  if (linear)
155  individual[i][j] = *Minimum[j] + pRand->getRandomCC() * (*Maximum[j] - *Minimum[j]);
156  else
157  individual[i][j] = *Minimum[j] * pow(10.0, la * pRand->getRandomCC());
158  }
159  catch (int)
160  {
161  individual[i][j] = (*Maximum[j] - *Minimum[j]) * 0.5 + *Minimum[j];
162  }
163  }
164 
165  try
166  {
167  // calculate its fitness value
168  for (int kk = 0; kk < NumParameter; kk++) {Parameter[kk] = individual[i][kk];}
169 
170  CandidateValue[i] = mOptProblem->calculate();
171  }
172  catch (int)
173  {
175  }
176  } // initialization ends
177 
178  // get the index of the fittest
180 
181  // initialise the update registers
182  last_update = 0;
183  u10 = u30 = u50 = 0;
184 
185  // store that value
186  current_best_value = Get_BestFoundSoFar_candidate();
187 
188  std::ofstream finalout("debugopt.dat");
189 
190  if (!finalout)
191  {
192  std::cout << "debugopt.dat cannot be opened!" << std::endl;
193  exit(1);
194  }
195 
196  finalout << "----------------------------- the best result at each generation---------------------" << std::endl;
197  finalout << "Generation\t" << "Best candidate value for object function\t" << "Display " << NumParameter << " parameters" << std::endl;
198  finalout << std::endl;
199  finalout.close();
200 
201  srand(time(NULL)); rand();
202 
203  //double a1=0.00000000001;
204  //double b1=0.9;
205 
206  // Iterations of GA
207  for (i = 0; i < NumGeneration; i++)
208  {
209  std::cout << std::endl;
210  std::cout << "GA is processing at generation " << i << std::endl;
211 
212  TrackDataFile(i);
213  // replicate the individuals
214  replicate();
215 
216  //Mutating some offspring
217  for (int nn = PopulationSize; nn < 2*PopulationSize; nn++)
218  {
219  double mut;
220 
221  // mutate the parameters
222  // calculate the mutatated parameter, only some of offspring mutated
223  //if (floor(10*rand()/RAND_MAX) >= 2)
224 
225  if (pRand->getRandomCC() >= 0.15)
226  {
227  for (int j = 0; j < NumParameter; j++)
228  {
229  if (floor(10*rand() / RAND_MAX) > 5)
230  {
231  //mut = individual[nn][j] + floor(1000000*rand()/RAND_MAX)/1000000;
232  // if(i<5000) mut = individual[nn][j] + pRand->getRandomCC()*(*Maximum[j]-*Minimum[j])/(i*i);
233 
234  // if(i<NumGeneration/2) mut = individual[nn][j]*(1 + pRand->getRandomCC());
235  // mut = individual[nn][j]+(*Maximum[j]-individual[nn][j])*(1-pow((pRand->getRandomCC()),pow((1-i/NumGeneration),2)));
236  // if(i>=NumGeneration/2) mut = individual[nn][j] *(1+ (1-pow((pRand->getRandomCC()),pow((1-i/NumGeneration),2))));
237 
238  // if(i>=5000) mut = individual[nn][j] + pRand->getRandomCC()*(*Maximum[j]-*Minimum[j])/sqrt(i);
239  // if(i>=5000) mut = individual[nn][j] + pRand->getRandomCC()*(*Maximum[j]-*Minimum[j])/sqrt(i);
240  // if(i<5000) mut = individual[nn][j] *(1+ (1-pow((pRand->getRandomCC()),pow((1-i/NumGeneration),2))));
241 
242  //if(i < NumGeneration/16) mut = individual[nn][j]*(1 + pRand->getRandomCC());
243  // if(i < NumGeneration/16) mut = individual[nn][j]+ pRand->getRandomCC();
244  //else mut = individual[nn][j]*(1 + pRand->getRandomCC()*(pow((a1+1-b1),i/NumGeneration)-(1-b1)));
245  // else mut = individual[nn][j]+ pRand->getRandomCC()*(pow((a1+1-b1),i/NumGeneration)-(1-b1));
246 
247  if (i < NumGeneration*0.5) mut = individual[nn][j] * (1 + pRand->getRandomCC());
248  else
249  {
250  if (i < NumGeneration*0.6) mut = individual[nn][j] * (1 + 0.5 * pRand->getRandomCC());
251  else
252  {
253  if (i < NumGeneration*0.7) mut = individual[nn][j] * (1 + 0.25 * pRand->getRandomCC());
254  else
255  {
256  if (i < NumGeneration*0.8) mut = individual[nn][j] * (1 + 0.1 * pRand->getRandomCC());
257  else
258  {
259  if (i < NumGeneration*0.9) mut = individual[nn][j] * (1 + 0.01 * pRand->getRandomCC());
260  else
261  {
262  if (i < NumGeneration*0.95) mut = individual[nn][j] * (1 + 0.001 * pRand->getRandomCC());
263  else mut = individual[nn][j] * (1 + 0.0001 * pRand->getRandomCC());
264  }
265  }
266  }
267  }
268  }
269  }
270  else
271  {
272  //mut = individual[nn][j] - floor(1000000*rand()/RAND_MAX)/1000000;
273  // if(i<5000) mut = individual[nn][j] - pRand->getRandomCC()*(*Maximum[j]-*Minimum[j])/(i*i);
274  // if(i>=5000) mut = individual[nn][j] - pRand->getRandomCC()*(*Maximum[j]-*Minimum[j])/sqrt(i);
275 
276  // mut = individual[nn][j]+(individual[nn][j]-*Minimum[j])*(1-pow((pRand->getRandomCC()),pow((1-i/NumGeneration),2)));
277  // if(i< NumGeneration/2) mut = individual[nn][j]*(1 - pRand->getRandomCC());
278  // if(i>= NumGeneration/2) mut = individual[nn][j] *(1- (1-pow((pRand->getRandomCC()),pow((1-i/NumGeneration),2))));
279 
280  //if(i< NumGeneration/16) mut = individual[nn][j]*(1 - pRand->getRandomCC());
281  // if(i< NumGeneration/16) mut = individual[nn][j] - pRand->getRandomCC();
282  //else mut = individual[nn][j]*(1 - pRand->getRandomCC()*(pow((a1+1-b1),i/NumGeneration)-(1-b1)));
283  // else mut = individual[nn][j] - pRand->getRandomCC()*(pow((a1+1-b1),i/NumGeneration)-(1-b1));
284 
285  if (i < NumGeneration*0.5) mut = individual[nn][j] * (1 - pRand->getRandomCC());
286  else
287  {
288  if (i < NumGeneration*0.6) mut = individual[nn][j] * (1 - 0.5 * pRand->getRandomCC());
289  else
290  {
291  if (i < NumGeneration*0.7) mut = individual[nn][j] * (1 - 0.25 * pRand->getRandomCC());
292  else
293  {
294  if (i < NumGeneration*0.8) mut = individual[nn][j] * (1 - 0.1 * pRand->getRandomCC());
295  else
296  {
297  if (i < NumGeneration*0.9) mut = individual[nn][j] * (1 - 0.01 * pRand->getRandomCC());
298  else
299  {
300  if (i < NumGeneration*0.95) mut = individual[nn][j] * (1 - 0.001 * pRand->getRandomCC());
301  else mut = individual[nn][j] * (1 - 0.0001 * pRand->getRandomCC());
302  }
303  }
304  }
305  }
306  }
307 
308  // mut = individual[nn][j] * (1 + floor(1000000*rand()/RAND_MAX)/1000000);
309  //mut = individual[nn][j] * (1 + pRand->getRandomCC());
310  }
311 
312  // check boundary and force it to be within the bounds
313  if (mut <= *Minimum[j]) mut = *Minimum[j] + std::numeric_limits< C_FLOAT64 >::epsilon();
314  else
315  {
316  if (mut < *Minimum[j]) mut = *Minimum[j];
317  }
318 
319  if (mut >= *Maximum[j]) mut = *Maximum[j] - std::numeric_limits< C_FLOAT64 >::epsilon();
320  else
321  {
322  if (mut > *Maximum[j]) mut = *Maximum[j];
323  }
324 
325  // store it
326  individual[nn][j] = mut;
327  }
328  }
329 
330  // evaluate the fitness
331  for (int kk = 0; kk < NumParameter; kk++)
332  {
333  Parameter[kk] = individual[nn][kk];
334  }
335 
336  CandidateValue[nn] = mOptProblem->calculate();
337  }
338 
339  // select approprate individuals as new population
340  // select(1);
341  //select(2);
342  if ((i % 2) == 0) select(2);
343  else select(3);
344 
345  // if((i%2)==0) select(3);
346  // else select(4);
347 
348  // Here introduce SA to optimise some top indiviudals
349  //if(i>=0.5*NumGeneration)
350  if ((i >= 1) && (i % 1000 == 0))
351  {
352  for (int optKK = 0; optKK < 6; optKK++)
353  {
354  optimise(optKK);
355  }
356  }
357 
358  // get the index of the fittest
360 
361  if (Get_BestFoundSoFar_candidate() != current_best_value)
362  {
363  last_update = i;
364  current_best_value = Get_BestFoundSoFar_candidate();
365  }
366 
367  if (u10) u10--;
368 
369  if (u30) u30--;
370 
371  if (u50) u50--;
372 
373  if ((u50 == 0) && (i - last_update > 50))
374  //if((u50==0)&&(i-last_update>150))
375  {
376  for (int mm = PopulationSize / 2; mm < PopulationSize; mm++)
377  {
378  for (int jj = 0; jj < NumParameter; jj++)
379  {
380  try
381  {
382  // determine if linear or log scale
383  linear = FALSE; la = 1.0;
384 
385  if ((*Maximum[jj] <= 0.0) || (*Minimum[jj] < 0.0)) linear = TRUE;
386  else
387  {
388  la = log10(*Maximum[jj]) - log10(std::min(*Minimum[jj], std::numeric_limits< C_FLOAT64 >::epsilon()));
389 
390  if (la < 1.8) linear = TRUE;
391  }
392 
393  // set it to a random value within the interval
394  if (linear)
395  individual[mm][jj] = *Minimum[jj] + pRand->getRandomCC() * (*Maximum[jj] - *Minimum[jj]);
396  else
397  individual[mm][jj] = *Minimum[jj] * pow(10.0, la * pRand->getRandomCC());
398  }
399  catch (int)
400  {
401  individual[mm][jj] = (*Maximum[jj] - *Minimum[jj]) * 0.5 + *Minimum[jj];
402  }
403  }
404 
405  try
406  {
407  // calculate its fitness
408  for (int kk = 0; kk < NumParameter; kk++) {Parameter[kk] = individual[mm][kk];}
409 
410  CandidateValue[mm] = mOptProblem->calculate();
411  }
412  catch (int)
413  {
415  }
416  }
417 
418  //end external for loop
420  u50 = 50; u30 = 30; u10 = 10;
421  //u50=150; u30=100; u10=50;
422  }
423  else
424  {
425  if ((u30 == 0) && (i - last_update > 30))
426  //if((u30==0)&&(i-last_update>100))
427  {
428  for (int mm = (int)floor(PopulationSize * 0.7); mm < PopulationSize; mm++)
429  {
430  for (int jj = 0; jj < NumParameter; jj++)
431  {
432  try
433  {
434  // determine if linear or log scale
435  linear = FALSE; la = 1.0;
436 
437  if ((*Maximum[jj] <= 0.0) || (*Minimum[jj] < 0.0)) linear = TRUE;
438  else
439  {
440  la = log10(*Maximum[jj]) - log10(std::min(*Minimum[jj], std::numeric_limits< C_FLOAT64 >::epsilon()));
441 
442  if (la < 1.8) linear = TRUE;
443  }
444 
445  // set it to a random value within the interval
446  if (linear)
447  individual[mm][jj] = *Minimum[jj] + pRand->getRandomCC() * (*Maximum[jj] - *Minimum[jj]);
448  else
449  individual[mm][jj] = *Minimum[jj] * pow(10.0, la * pRand->getRandomCC());
450  }
451  catch (int)
452  {
453  individual[mm][jj] = (*Maximum[jj] - *Minimum[jj]) * 0.5 + *Minimum[jj];
454  }
455  }
456 
457  try
458  {
459  // calculate its fitness
460  for (int kk = 0; kk < NumParameter; kk++) {Parameter[kk] = individual[mm][kk];}
461 
462  CandidateValue[mm] = mOptProblem->calculate();
463  }
464  catch (int)
465  {
467  }
468  }
469 
470  //end external for loop
472  u30 = 30; u10 = 10;
473  //u30=100; u10=50;
474  }
475  else
476  {
477  if ((u10 == 0) && (i - last_update > 10))
478  //if((u10==0)&&(i-last_update>50))
479  {
480  for (int mm = (int) floor(PopulationSize * 0.9); mm < PopulationSize; mm++)
481  {
482  for (int jj = 0; jj < NumParameter; jj++)
483  {
484  try
485  {
486  // determine if linear or log scale
487  linear = FALSE; la = 1.0;
488 
489  if ((*Maximum[jj] <= 0.0) || (*Minimum[jj] < 0.0)) linear = TRUE;
490  else
491  {
492  la = log10(*Maximum[jj]) - log10(std::min(*Minimum[jj], std::numeric_limits< C_FLOAT64 >::epsilon()));
493 
494  if (la < 1.8) linear = TRUE;
495  }
496 
497  // set it to a random value within the interval
498  if (linear)
499  individual[mm][jj] = *Minimum[jj] + pRand->getRandomCC() * (*Maximum[jj] - *Minimum[jj]);
500  else
501  individual[mm][jj] = *Minimum[jj] * pow(10.0, la * pRand->getRandomCC());
502  }
503  catch (int)
504  {
505  individual[mm][jj] = (*Maximum[jj] - *Minimum[jj]) * 0.5 + *Minimum[jj];
506  }
507  }
508 
509  try
510  {
511  // calculate its fitness
512  for (int kk = 0; kk < NumParameter; kk++) {Parameter[kk] = individual[mm][kk];}
513 
514  CandidateValue[mm] = mOptProblem->calculate();
515  }
516  catch (int)
517  {
519  }
520  }
521 
522  //end external for loop
524  u10 = 10;
525  //u10=50;
526  }
527  }
528  }
529  } // end iteration of generations
530 
531  for (int kk = 0; kk < NumParameter; kk++)
532  {
533  Parameter[kk] = individual[BestFoundSoFar][kk];
534  }
535 
536  //set the BestFoundSoFar function value
537  mOptProblem->setSolutionValue(Get_BestFoundSoFar_candidate());
538 
539  //store the combination of the BestFoundSoFar parameter values found so far
540  mOptProblem->getSolutionVariables() = Parameter;
541 
542  //free memory space
543  delete individual;
544  delete CrossPoint;
545  delete WinScore;
546  delete CandidateValue;
547 
548  std::cout << std::endl;
549  std::cout << "GA has successfully done!" << std::endl;
550 
551  return 0;
552 }
553 
554 /*
555 // evaluate the fitness of one individual
556 double COptMethodHGASA::evaluate(int i)
557 {
558  int j;
559  bool outside_range = FALSE;
560  double fitness;
561  double fitness0;
562  double tmp;
563 
564  // evaluate the fitness
565  try
566  {
567  fitness0=0;
568  for(j=0; j<NumParameter; j++)
569  {
570  fitness=fitness0+pow(individual[i][j],4.0)-16.0*pow(individual[i][j],2.0)+5.0*individual[i][j];
571  fitness0=fitness;
572  }
573  fitness=fitness0/2.0;
574  }
575  catch(int)
576  {
577  fitness = std::numeric_limits< C_FLOAT64 >::max();
578  }
579 
580  return fitness;
581 }
582  */
583 
584 // copy individual o to position d
585 void COptMethodHGASA::copy(int o, int d)
586 {
587  int i;
588 
589  for (i = 0; i < NumParameter; i++)
590  {
591  individual[d][i] = individual[o][i];
592  }
593 
595 }
596 
597 // swap individuals o and d
598 void COptMethodHGASA::swap(int o, int d)
599 {
600  int i;
601  double tmp;
602 
603  for (i = 0; i < NumParameter; i++)
604  {
605  tmp = individual[d][i];
606  individual[d][i] = individual[o][i];
607  individual[o][i] = tmp;
608  }
609 
610  tmp = CandidateValue[d];
612  CandidateValue[o] = tmp;
613 
614  i = WinScore[d];
615  WinScore[d] = WinScore[o];
616  WinScore[o] = i;
617 }
618 
619 // exchange individuals o and d
620 void COptMethodHGASA::exchange(int o, int d)
621 {
622  int i;
623  double tmp;
624 
625  for (i = 0; i < NumParameter; i++)
626  {
627  tmp = individual[d][i];
628  individual[d][i] = individual[o][i];
629  individual[o][i] = tmp;
630  }
631 
632  tmp = CandidateValue[d];
634  CandidateValue[o] = tmp;
635 }
636 
637 void COptMethodHGASA::crossover(int p1, int p2, int c1, int c2)
638 {
639  int i, j, s, e;
640  int pp1, pp2, tmp, l;
641 
642  //srand(time(NULL)); rand();
643 
644  try
645  {
646  if (NumParameter > 1)
647  {
648  // get a random number of crossover points, always less than half the number of genes
649  NumCrossPoint = (int)fabs(floor((NumParameter / 2) * rand() / RAND_MAX));
650  // NumCrossPoint = (int)floor((NumParameter/2)*pRand->getRandomCC());
651  }
652  else NumCrossPoint = 0;
653 
654  // if less than 0 just copy parent to child
655  if (NumCrossPoint == 0)
656  {
657  for (j = 0; j < NumParameter; j++)
658  {
659  individual[c1][j] = individual[p1][j];
660  individual[c2][j] = individual[p2][j];
661  }
662 
663  return;
664  }
665 
666  // chose first point
667  CrossPoint[0] = 1 + (int)fabs(floor((NumParameter - NumCrossPoint) * rand() / RAND_MAX));
668 
669  //CrossPoint[0] = 1 + (int)floor((NumParameter-NumCrossPoint)*pRand->getRandomCC());
670  // chose the others
671  for (i = 1; i < NumCrossPoint; i++)
672  {
673  l = NumParameter - NumCrossPoint + i - 1 - CrossPoint[i - 1];
674  CrossPoint[i] = 1 + CrossPoint[i - 1] + (l == 0 ? 0 : (int)fabs(floor(l * rand() / RAND_MAX)));
675  //CrossPoint[i] = 1 + CrossPoint[i-1] + (l==0 ? 0 : (int)floor(l*pRand->getRandomCC()));
676  }
677 
678  // copy segments
679  pp1 = p2; pp2 = p1;
680 
681  for (i = 0; i <= NumCrossPoint; i++)
682  {
683  // swap the indexes
684  tmp = pp1;
685  pp1 = pp2;
686  pp2 = tmp;
687  if (i == 0) s = 0; else s = CrossPoint[i - 1];
688  if (i == NumCrossPoint) e = NumParameter; else e = CrossPoint[i];
689 
690  for (j = s; j < e; j++)
691  {
692  individual[c1][j] = individual[pp1][j];
693  individual[c2][j] = individual[pp2][j];
694  }
695  }
696  }
697  catch (int)
698  {}}
699 
700 // replicate the individuals w/ crossover
702 {
703  int i, parent1, parent2;
704 
705  // reproduce in consecutive pairs
706  for (i = 0; i < PopulationSize / 2; i++)
707  {
708  parent1 = (int)fabs(floor(PopulationSize * rand() / RAND_MAX));
709  parent2 = (int)fabs(floor(PopulationSize * rand() / RAND_MAX));
710  crossover(parent1, parent2, PopulationSize + i*2, PopulationSize + i*2 + 1);
711  }
712 
713  // check if there is one left over and just copy it
714  if (PopulationSize % 2 > 0) copy(PopulationSize - 1, 2*PopulationSize - 1);
715 }
716 
717 // select PopulationSize individuals
718 void COptMethodHGASA::select(int SelectionStrategy)
719 {
720  int i, j, TournamentSize, RandomRival;
721  int RandomIndividual;
722  //srand(time(NULL)); rand();
723 
724  switch (SelectionStrategy)
725  {
726  case 1: // parent-offspring competition
727 
728  for (i = PopulationSize; i < 2*PopulationSize; i++)
729  {
730  // if offspring is fitter keep it
731  for (j = 0; j < PopulationSize; j++)
732  {
733  if (CandidateValue[i] < CandidateValue[j]) exchange(i, j);
734  }
735  }
736 
737  break;
738  case 2: // tournament competition
739  // compete with 20% of the population
740  TournamentSize = PopulationSize / 5;
741 
742  // but at least one
743  if (TournamentSize < 1) TournamentSize = 1;
744 
745  // parents and offspring are all in competition
746  for (i = 0; i < 2*PopulationSize; i++)
747  {
748  WinScore[i] = 0;
749 
750  for (j = 0; j < TournamentSize; j++)
751  {
752  // get random rival
753  RandomRival = (int)fabs(floor((PopulationSize * 2 - 1) * rand() / RAND_MAX));
754 
755  if (CandidateValue[i] <= CandidateValue[RandomRival]) WinScore[i]++;
756  }
757  }
758 
759  // selection of top PopulationSize winners
760  for (i = 0; i < PopulationSize; i++)
761  {
762  for (j = i + 1; j < 2*PopulationSize; j++)
763  {if (WinScore[i] < WinScore[j]) swap(i, j);}
764  }
765 
766  break;
767 
768  // ranking strategy without proportionate-fitness
769  case 3:
770 
771  for (i = 0; i < PopulationSize; i++)
772  {
773  for (j = i + 1; j < 2*PopulationSize; j++)
774  {
775  if (CandidateValue[i] > CandidateValue[j]) exchange(i, j);
776  }
777  }
778 
779  break;
780  // Randomly select P individuals from population of 2P
781  case 4:
782 
783  for (i = 0; i < PopulationSize; i++)
784  {
785  RandomIndividual = (int)fabs(floor((PopulationSize * 2 - 1) * rand() / RAND_MAX));
786  exchange(i, RandomIndividual);
787  }
788 
789  break;
790  }
791 }
792 
793 // check the best individual at this generation
795 {
796  int i, b;
797  double f;
798  f = CandidateValue[0];
799  b = 0;
800 
801  for (i = 1; i < PopulationSize; i++)
802  {
803  if (CandidateValue[i] < f)
804  {
805  b = i;
806  f = CandidateValue[i];
807  }
808  }
809 
810  return b;
811 }
812 
814 {
815  std::ofstream finalout("debugopt.dat", std::ios::app);
816 
817  if (!finalout)
818  {
819  std::cout << "debugopt.dat cannot be opened!" << std::endl;
820  exit(1);
821  }
822 
823  finalout << "#" << i << "\t" << std::setprecision(8) << CandidateValue[BestFoundSoFar] << std::endl;
824 
825  for (int j = 0; j < NumParameter; j++)
826  {
827  finalout << std::setprecision(8) << individual[BestFoundSoFar][j] << "\t";
828  }
829 
830  finalout << std::endl;
831  finalout << std::endl;
832 
833  finalout.close();
834 }
835 
836 // SA optimization function
837 
839 {
840  //C_INT32 NumSignificantPoint, NumTempChange, NumIteration = (C_INT32) getValue("SimulatedAnnealing.Iterations");
841  C_INT32 NumSignificantPoint, NumTempChange, NumIteration = 1000;
842  C_INT32 j, NumParameter = mOptProblem->getVariableSize();
843 
844  //variable settings neccessary for SA
845  CVector<double> candparameter(NumParameter); //one-dimentional array of candidate value for parameters
846  double candFuncValue; // candidate value of objective function
847 
848  CVector<double> thisparameter(NumParameter); //current parameters
849  double thisFuncValue; //current value of the objective function
850 
851  CVector <double> newparameter(NumParameter); //new parameter value
852  double newFuncValue; //function value of new point
853 
854  CVector <double> step(NumParameter); //array of maximum steps for each parameter
855  CVector <int> NumStepAccep(NumParameter); //number of steps accepted
856 
857  double TempDecreaseRate = 0.85; //temperature decrease rate
858  double BoltzmannConstant = 1.0; //Boltzmann constant
859 
860  double InitialTemp = 1.0; //initial temperature
861  double t; //current temperature
862 
863  double ConvgCriterion = 0.01; // convergence criterion or program termination criterion
864  double ChangeValue, EnergyDifference;
865 
867  bool ready;
868 
869  /* Create a random number generator */
871  Type = (CRandom::Type)(C_INT32) getValue("HybridGASA.RandomGenerator.Type");
872  unsigned C_INT32 Seed;
873  Seed = (unsigned C_INT32) getValue("HybridGASA.RandomGenerator.Seed");
874  CRandom * pRandSA = CRandom::createGenerator(Type, Seed);
875 
876  assert(pRandSA);
877 
878  const double ** Minimum = mOptProblem->getParameterMin().array();
879  const double ** Maximum = mOptProblem->getParameterMax().array();
880 
881  CVector< C_FLOAT64 > & SAParameter = mOptProblem->getCalculateVariables();
882 
883  //double * Parameter;
884 
885  //dump_datafile_init()
886 
887  //inital temperature
888  t = InitialTemp;
889 
890  //inital point
891  NumSignificantPoint = 0;
892 
893  /*
894  //generate initial parameters randomly
895  for (j = 0; j < NumParameter; j++)
896  {
897  linear = false;
898  la = 1.0;
899 
900  if (*Minimum[j] == 0.0) *Minimum[j] = std::numeric_limits< C_FLOAT64 >::epsilon();
901 
902  if ((*Maximum[j] <= 0.0) || (*Minimum[j] <= 0.0)) linear = true;
903 
904  else
905  {
906  la = log10(*Maximum[j]) - log10(*Minimum[j]);
907  if (la < 1.8) linear = true;
908  }
909 
910  if (linear)
911  Parameter[j] =
912  *Minimum[j] + pRandSA->getRandomCC() * (*Maximum[j] - *Minimum[j]);
913  else
914  Parameter[j] = *Minimum[j] * pow(10.0, la * pRandSA->getRandomCC());
915  } // Initialization ends
916  */
917 
918  for (j = 0; j < NumParameter; j++)
919  {
920  SAParameter[j] = individual[index][j];
921  }
922 
923  for (int kk = 0; kk < NumParameter; kk++)
924  candparameter[kk] = thisparameter[kk] = newparameter[kk] = SAParameter[kk];
925 
926  // calculate the function fitness value
927  double FitnessValue = mOptProblem->calculate();
928  thisFuncValue = candFuncValue = FitnessValue;
929 
930  //Remember them
931  for (int mm = 0; mm < BESTFOUNDSOFAR; mm++) fk[mm] = thisFuncValue;
932 
933  //initial step sizes
934  for (int jj = 0; jj < NumParameter; jj++)
935  {
936  NumStepAccep[jj] = 0;
937  step[jj] = fabs(candparameter[jj]);
938  }
939 
940  //no temperature reductions yet
941  NumTempChange = 0;
942 
943  do
944  {
945  for (int ff = 0; ff < NumIteration; ff++) //step adjustments
946  {
947  std::cout << "New individual begins ......" << index << std::endl;
948  std::cout << "New iteration begins ......" << std::endl;
949  std::cout << "Current Temperature: " << t << std::endl;
950  std::cout << "Number of Temperature Change: " << NumTempChange << std::endl;
951 
952  for (j = 0; j < NumDirection; j++) // adjustment in all directions
953  {
954  for (int hh = 0; hh < NumParameter; hh++)
955  {
956  // ChangeValue=tan(2*PI*rand()/RAND_MAX)*(t/pow(pow(2,2.0)+t*t,(NumParameter+1)/2.0));
957  ChangeValue = tan(2 * PI * pRandSA->getRandomCC()) * (t / pow(pow(2, 2.0) + t * t, (NumParameter + 1) / 2.0));
958  newparameter[hh] = thisparameter[hh] + step[hh] * ChangeValue;
959 
960  if (newparameter[hh] < *Minimum[hh]) newparameter[hh] = *Minimum[hh] + pRandSA->getRandomCC() * (*Maximum[hh] - *Minimum[hh]);
961 
962  if (newparameter[hh] > *Maximum[hh]) newparameter[hh] = *Minimum[hh] + pRandSA->getRandomCC() * (*Maximum[hh] - *Minimum[hh]);
963 
964  for (int exchange = 0; exchange < NumParameter; exchange++)
965  {
966  SAParameter[exchange] = newparameter[exchange];
967  }
968 
969  // calculate the function value
970  double FitnessValue = mOptProblem->calculate();
971  newFuncValue = FitnessValue;
972 
973  //Calculate the energy difference
974  EnergyDifference = newFuncValue - thisFuncValue;
975 
976  //keep newparameter if energy is reduced
977  if (newFuncValue <= thisFuncValue)
978  {
979  for (int exchange = 0; exchange < NumParameter; exchange++)
980  {
981  thisparameter[exchange] = newparameter[exchange];
982  }
983 
984  thisFuncValue = newFuncValue;
985 
986  NumSignificantPoint++; // a new point counted
987  NumStepAccep[hh]++; //a new point in this coordinate counted
988 
989  if (thisFuncValue < candFuncValue)
990  {
991  for (int aa = 0; aa < NumParameter; aa++)
992  individual[index][aa] = SAParameter[aa] = candparameter[aa] = thisparameter[aa];
993 
994  CandidateValue[index] = candFuncValue = thisFuncValue;
995 
996  if (!mOptProblem->checkFunctionalConstraints())
997  continue;
998 
999  //set the best function value
1000  //mOptProblem->setBestValue(candFuncValue);
1001  //std::cout << "the Best value (" << NumSignificantPoint << "): " << candFuncValue << std::endl;
1002 
1003  //store the combination of the best parameter values found so far at current temperature
1004  //mOptProblem->getBestParameter() = *mParameters;
1005 
1006  //std::cout << "the Best Parameters: (";
1007  //for (int kk = 0; kk < mOptProblem->getParameterNum(); kk++)
1008  // std::cout << mOptProblem->getParameter(kk) << ", ";
1009 
1010  // std::cout << ")" << std::endl;
1011  }
1012  }
1013  else
1014  {
1015  //keep newparameter with probability, if newFuncValue is increased
1016  double Probability = exp(-(newFuncValue - thisFuncValue) / (BoltzmannConstant * t));
1017 
1018  if (Probability > pRandSA->getRandomCC())
1019  {
1020  //Keep the new point
1021  for (int exchange = 0; exchange < NumParameter; exchange++)
1022  {
1023  thisparameter[exchange] = newparameter[exchange];
1024  }
1025 
1026  thisFuncValue = newFuncValue;
1027  NumSignificantPoint++; // a new point counted
1028  NumStepAccep[hh]++; //a new point in this coordinate counted
1029  }
1030  }
1031  }
1032  }
1033 
1034  //update the step sizes
1035  for (int nn = 0; nn < NumParameter; nn++)
1036  {
1037  double StepAdjustment = (double) NumStepAccep[nn] / (double)NumDirection;
1038 
1039  if (StepAdjustment > 0.6) step[nn] *= 1 + 5 * (StepAdjustment - 0.6);
1040 
1041  if (StepAdjustment < 0.4) step[nn] /= 1 + 5 * (0.4 - StepAdjustment);
1042 
1043  NumStepAccep[nn] = 0;
1044  }
1045  }
1046 
1047  //update the temperature
1048  t *= TempDecreaseRate;
1049  NumTempChange++;
1050 
1051  // if this is the first cycle then ignore the convergence test
1052  if (NumTempChange == 1) ready = FALSE;
1053  else
1054  {
1055  ready = TRUE;
1056 
1057  //check if there is not much change for termination criterion since last BESTFOUNDSOFAR times
1058  for (int ii = 0; ii < BESTFOUNDSOFAR; ii++)
1059  if (fabs(fk[ii] - thisFuncValue) > ConvgCriterion)
1060  {
1061  ready = FALSE;
1062  break;
1063  }
1064 
1065  if (!ready)
1066  {
1067  for (int aa = 0; aa < BESTFOUNDSOFAR - 1; aa++)
1068  fk[aa] = fk[aa + 1];
1069 
1070  fk[BESTFOUNDSOFAR - 1] = thisFuncValue;
1071  }
1072  else
1073 
1074  //check the termination criterion of not much larger than last optimal
1075  if (fabs(thisFuncValue - candFuncValue) > ConvgCriterion)ready = FALSE;
1076  }
1077 
1078  if (!ready)
1079  {
1080  NumSignificantPoint++;
1081 
1082  for (int kk = 0; kk < NumParameter; kk++)
1083  thisparameter[kk] = candparameter[kk];
1084 
1085  thisFuncValue = candFuncValue;
1086  }
1087  }
1088  while (!ready); // do-while loop ends
1089 
1090  pdelete(pRandSA);
1091  return 0;
1092 }
1093 
1094 /**********************************the end **************************************/
virtual void exchange(int o, int d)
double * CandidateValue
virtual void select(int method)
#define pdelete(p)
Definition: copasi.h:215
qreal linear(qreal a, qreal b, qreal t)
virtual void swap(int o, int d)
virtual int fittest(void)
#define C_INT32
Definition: copasi.h:90
double ** individual
virtual ~COptMethodHGASA()
virtual bool optimise()
virtual void replicate(void)
double Get_BestFoundSoFar_candidate()
static CRandom * createGenerator(CRandom::Type type=CRandom::mt19937, unsigned C_INT32 seed=0)
Definition: CRandom.cpp:49
virtual C_FLOAT64 getRandomCC()
Definition: CRandom.cpp:235
#define FALSE
const Value & getValue() const
#define NumDirection
#define PI
#define BESTFOUNDSOFAR
virtual void copy(int o, int d)
#define TRUE
CType * array()
Definition: CVector.h:139
bool addParameter(const CCopasiParameter &parameter)
virtual void crossover(int p1, int p2, int c1, int c2)
#define min(a, b)
Definition: f2c.h:175
virtual void TrackDataFile(int i)
#define max(a, b)
Definition: f2c.h:176