📄 pso_binary.c
字号:
/***************************************************************************
Some binary PSO derivations
First version : 2004-10
Last update : 2005-02-02
email : Maurice.Clerc@WriteMe.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
/*
Special Particle Swarm Optimiser for binary problems.
Actually, this program contains several algorithms (see the "method" parameter)
founded on the PSO canonical model.
The purpose of this program is indeed to show how easy it is to derive some good
algorithms from this model.
The "method" parameter is an arbitrary code.
If method<100
information links are modified at random after each iteration
if there has been no improvement. The swarm size is S.
Each particle choose at random(K choices) which ones it informs.
It means each particle is informed by a number of particles
between 0 and S, but, of course, probably more or less equal to K
If method>100
Methods originally designed with information links that use
the classical circular topology.
Each particle has then always exactly the same number K of informers
(including itself).
You may use a pseudo-gradient option to find the best informer
(using a cyclic distance between binary positions).
Note that is does really make sense only for some methods (like 1)
Some binary methods are based on two cyclic algebras:
- in {-1,0,1} for the velocity
- in {0,1} for the position
The similarity with a cellular automaton is then particularly obvious
when you use just one particle.
You will see that some methods like 0 are _too_ good on problems 1,2 and 3.
Although they are often called "deceptive" problems, they are in fact,
if I dare say, just "deceptive deceptive" ones:
the structure of the solution is so particular that they are indeed extremely easy.
They can be solved very quickly with a "swarm" of size one!
So, I have added some other problems, in particular Zebra3.
The source code is a bit complicated, for I have added the possibility
to combine several method in an adaptive way (parameter adapt_method).
My advice is to set it first to 0, and to try individually different methods.
Unlike in TRIBES, S and K are not adaptive (see TO DO, though)
so you have to choose them manually.
I have added a "quality measure" that gives an estimation of how good is a given
algorithm on a given problem, assuming you run it in a loop
with different maximum evaluations
(see eval_max1, eval_max2, delta_eval_max)
For some functions, I have also added a trick to save some computational time:
when the function value is progressively computed as a sum of non negative values
there is usually no need to compute it entirely. You just have to be sure it does
not improve the best solution known by the particle
Have fun and keep me posted.
P.S. 1. If your compiler uses a reasonably good pseudo random numbers generator
you don't have to use KISS.
P.S. 2. Some options and subprogramms are not really useful.
Don't forget this is a research version!
P.S. 3 There is an almost separate subprogram (fundamental_hyp)
to perform some statistical tests about the hypothesis "nearer is better"
*/
/* TO DO
- In fact, there _is_ an adaptive K option, but far too rudimentary (either 2 or 3).
Has to be improved.
- add Parisian method for some kinds of problems?
*/
#include <stdio.h>
#include <stdlib.h>
#include "math.h"
#include <time.h>
#define D_max 150 // Max number of dimensions of the search space
#define E_max 1000 // Max number of executions for one problem
#define S_max 200 // Max swarm size
#define F_max 3 // (for multiobjective) Max number of functions
#define ulong unsigned long // For KISS pseudo random numbers generator
#define RAND_MAX_KISS ((unsigned long) 4294967295) // idem
// You don't need KISS if you have a good rand() function
#define MAX_PEAKS 500
// Structures
struct f {int size;double fi[F_max];}; // Fitness's
struct bits {int size;int b[D_max];};
struct position {struct bits x; struct f f;};
// Sub-programs
double alea(double a,double b);
int alea_integer(int a,int b);
double alea_normal(double mean, double std_dev);
struct bits binary(unsigned int a,int size);
int best_(struct f fx,struct f fy);
int best_info(int s,int option);
double combin(int D,int k);
int diameter(); // Swarm diameter
double distance(int s1,int s2, int option);
void fundamental_hyp(int function);
void init_swarm(int option);
double number(int d,struct bits x,double scale);
double perf(int s,int function);
ulong rand_kiss();
void seed_rand_kiss(ulong seed);
void swarm_size(); // Just for plotting some curves
double total_error(struct f f);
// Global variables
struct position best; // Best of the best position
int D; // Search space dimension
int Sp; // pseudo swarm size for problem 11
int Diam; // Swarm diameter
double E; // Useful for some test functions
struct f fmin; // Objective(s) to reach
double granul[D_max]; // Granularity for each dimension (0 = continue, 1 = integer)
int init; // Flag. Just to know wether evaluations are in init phase or not
int nb_eval; // Total number of evaluations
int n_exec,n_exec_max; // Nbs of executions
int LINKS[S_max][S_max]; // Information links
double mean_val;
int n_iter;
struct position P[S_max]; // Positions
struct position P_m[S_max];// Best positions found by each particle
struct position P_init[S_max];
int S; // Swarm size
double pi; // Useful for some test functions
int pivots[S_max]; // Flags to know which particles are pivots
double Q[D_max][D_max]; // Matrix for quadratic problems
double save_time; // To evaluate computational time saved
// by not entirely computing the fitness
struct bits V[S_max];// Velocities
double xmin[D_max],xmax[D_max]; // Intervals defining the search space
// For Multimodal random generator
int peaks; /* The number of peaks */
int landscape[MAX_PEAKS][D_max]; /* Holds the location of the peaks */
// File(s)
FILE *f_multi; // In case of multiobjective, save results, in order to later find
// the Pareto front
FILE *f_Q; // Matrix for quadratic problems
FILE *f_Qr; // Matrix for random quadratic problems
FILE *f_landscape; // Landscape for Multimodal problem
FILE *f_init; // If the initial position is read from a file
FILE *f_trace;
FILE *f_SSS; // Structured search space estimation. Just for information
// If the parameter SSS is set to 1, this file is generated
// and there is no optimisation
FILE *f_synth; // Summary of the results
void main()
{
int adapt_K;
double adapt_latency;
int adapt_method;
double alpha;
struct position best_prev, best_temp;
int bit,bit0,bit1,bit2,bit3;
struct bits B_mod[S_max]; // Modified bits
double c2,c3;// Confidence coefficients
double cp,cp0,cp1,cp2,cp3;
double c_pr0;
int check_hyp;
int choice[3];
int d; // Current dimension
int DD;
double density;
int deter; // Flag for deterministic or random search
int distrib[D_max]; // Just for some statistics
int dist;
double eps; // Wanted accuracy
double eps_mean; // Average accuracy
double error; // Total error for a given position
double error0,error1,error2, error3;
double error_prev; // Total error after previous iteration
double error_prev2;
double error_[E_max];
int eval_max; // Max number of evaluations
int eval_max1, eval_max2, delta_eval_max;
double eval_mean; // Mean number of evaluations
int fixed_link;
int function[F_max];
int g; // Rank of the best informer
//int g_[S_max]; // Best informers list
struct position Gp;
int i,j;
int init_links; // Flag to (re)init or not the information links
int init_option; // 0 => as usually, at random for the positions
// > 0. See init_swarm(),
// some attempts to find a more evenly distribution
// <0 => read on the file init.txt
int k,k0, k1,k2;
double k_tot[D_max]={0}; // Just for some distribution tests
int K; // Max number of informed particles by a given one
double K_choice[3][2];
//int K_prev;
int m;
int method;
double min; // Best result along several runs
int n1,n2,n3;
int n_adapt,n_adapt_max;
int n_adaptK;
int nb_pivots;
int n_f; // (multiobjective) number of functions to optimize
int n_failure; // Number of failures
int n_iter0,n_iter1; // Number of iterations
int method_rank;
int option_best; // Option for the best informer choice
// 0 => really the best (classical)
// 1 => using pseudo-gradient
struct position P_prev[S_max], P_temp[S_max];
struct position P_m_prev[S_max], P_m_temp[S_max];
struct position Pp;
double pr;
double pr0; // Strategy probabilities
int print_level; // > 0 => more verbose
struct bits V_prev[S_max], V_temp[S_max];
struct position Vp;
double quality, quality_ref;
double r;
int s,s1,s2; // Rank of the current particle
int SSS; // Structured Search Space option
int strategies[5];
int strat_number;
double success_rate;
double t1,t2;
int tribe; // 0 => classical. after the move, update just the previous best
// of the particle
// #0 => update the whole tribe
//float z;
double z;
float zz;
double z7;
f_synth=fopen("f_synth.txt","w");
f_trace=fopen("f_trace.txt","w");
//swarm_size(); goto end; // Just to plot some curves
seed_rand_kiss(1); // Initialise KISS
E=exp(1);
pi=acos(-1);
// ------------------------------------------------------ Control parameters
option_best=0; // 1 or 0. Using pseudo-gradient or not
init_option=2; // 0 => positions completely at random
// 1 (removed. Was not good)
// 2 => semi-deterministic distribution
// <0 => read from file init.txt
tribe=0; // 0 => classical. After a move, update just the previous best
// of the current particle
// 1 => update for the whole current tribe
// 2 => update the worst of the current tribe
// WARNING: here "tribe" means "informant group"
method=11; // cf MOVES
// Method 0 is almost a hoax. Try to guess why ;-)
// Method 6 is simple and _sometimes_ good
// Method 7 seem to be globally the best (or method 16?)
// Have a look at method 8, particularly for quadratic problems
// Method 11 is quick on easy problems like 1, 2, 4 etc.
// Method 16 is like 11 but with "taboo". Usually works better
// Method 100 is for comparison (Kennedy's method, improved by me)
S=12; // Swarm size. Typically 35, but just 2 may be enough for particular problems
// like 1, 2, 3,
// On the other hand, you may need more particles for high dimension problems (>40)
// For the "optimal" swarm size, have a look at the paper on my PSO site:
// a rough approximation of the theoretical formula is
// S=INT(9.5+ 0.124*(Dimension-9))
K=3; // Max number of particles informed by a given one
// Usually 2 or 3 for difficult problems
// although K=1 sometimes works
// if <0, K=1 or 3
// For method >= 100, you may use K=S
fixed_link=method>=100; // Mainly for methods>=100. But you may try with some others
adapt_method=0;
// 0 => always the same method
// 1 => sequential mode, if there has been no improvement for a while, tries another method
// 2 => parallell mode: from time to time tries TWO methods and keep the best
if (adapt_method>0) // Strategy set definition. The previous "method" value is ignored
{
strat_number=2;
strategies[0]=16;
strategies[1]=0;
strategies[2]=5;
strategies[3]=5;
strategies[4]=5;
}
deter=0; // 0 => some randomness, 1 => deterministic (for binary methods)
// -1 => more randomness
// WARNING: "old" parameter. Not valid for all methods
print_level=-1; // Just more or less verbous
//--------------------------------------------------------- Problem
D=30; // Search space dimension
n_f=1; // Nb of functions to simultaneously optimise
function[0]=1; //Function code (see perf())
Sp= 10; // Pseudo swarm size for problem 11
SSS=0; // number of sampling points. If >0 => just write the SSS file. No optimisation
/*
1 Goldberg's order 3 deceptive problem (binary problem)
2 Bipolar order 6
3 Muhlenbein's order-5
4 Clerc's order 3 1
5 Clerc's order 3 2
6 Clerc's order 3 (Zebra3)
7 Parabola
8 Using Multimodal bitstring random generator. Use 100D, 20 peaks for test
9 Clerc's Trap
10 Whitney DF2
11 Look for a good initialisation for further problems (see perf() )
-1 Quadratic (data on a file)
-2 Quadratic (random matrix)
-8 Multimodal (data on a file)
99 Test
*/
//---------------------------------
eps=0.00; // Accuracy. Can be null, for it is a discrete problem
fmin.size=n_f;
for (i=0;i<n_f;i++)
fmin.fi[i]=0.0; // Objective(s) to reach
n_exec_max=100; // Numbers of runs
eval_max1=20000; // Min Max number of evaluations
eval_max2=20000; // Max Max number of evalutions
delta_eval_max=1000; // Step
density=1.0; // For Quadratic problem -2
peaks=20; // For Multimodal 8
check_hyp=0; //1 =>Separate test, for the fundamental hypothesis
// If set to 1, NO optimisation at all
//========================================================= Algorithm
if(K<0) adapt_K=1;else adapt_K=0; K=abs(K);
n_adapt_max=(int)((double)S/(double)(K-1)); // Number of evaluations between two adaptations
if (function[0]==-1) // Mono objectif xQx problem. Q on a file
{
f_Q=fopen("Q100paper.txt","r"); // Read the matrix
// f_Q=fopen("Q12.txt","r"); // Read the matrix
printf("\n Q read from file");
fscanf(f_Q,"%i",&D); // WARNING. Read the dimension on the file
printf("\n Q matrix %ix%i",D,D);
density=0;
for (i=0;i<D;i++)
for (d=0;d<D;d++)
{
fscanf(f_Q,"%f",&zz);
Q[i][d]=zz;
if(fabs(zz)>0) density=density+1;
}
z=D;
density=density/(D*D);
printf(" Density %.2f",density);
printf("\n Minimum value 0");
printf("\n");
}
if(function[0]==-2) // Mono objectif xQx problem. Random Q
{
printf("\n Random Q matrix %ix%i",D,D);
printf(" Density %f",density);
printf("\n Unknown minimum value. ...");
printf("\n");
f_Qr=fopen("Qr.txt","w"); // Save the matrix
printf("\n Q saved on Qr.txt");
fprintf(f_Qr,"%i",D);
for (i=0;i<D;i++)
{
fprintf(f_Qr,"\n");
for (d=0;d<D;d++)
{
if(alea(0,1)<density)
n1= alea_integer(-3*D,3*D);
Q[i][d]=n1;
fprintf(f_Qr,"%i ",n1);
}
}
function[0]=-1;
}
if (function[0]==8) // Multimodal bitstring random generator
// (William Spears)
{
f_landscape=fopen("f_landscape.txt","w");
for(i = 0; i < peaks; i++)
{
for(j = 0; j < D; j++)
{
landscape[i][j] = rand()&01;
}
}
fprintf(f_landscape,"%i %i",D,peaks);
for (i=0;i<peaks;i++)
{
fprintf(f_landscape,"\n");
for (j=0;j<D;j++)
fprintf(f_landscape,"%i ", landscape[i][j]);
}
}
if (function[0]==-8) // Multimodal bitstring random generator
{
f_landscape=fopen("f_landscape_12_20.txt","r");
fscanf(f_landscape,"%i %i", &D, &peaks);
for(i = 0; i < peaks; i++)
{
for(j = 0; j < D; j++)
{
fscanf(f_landscape,"%i",&landscape[i][j]);
}
}
function[0]=8;
}
if (SSS==0) goto check_h;
printf("\n Structured Search Space on file SSS.txt");
f_SSS=fopen("SSS.txt","w");
fprintf(f_SSS,"Distance_to_solution Fitness");
printf("\nDistance_to_solution Fitness");
P[0].x.size=D;
switch (function[0])
{
default:
for (s=0;s<D;s++) P[0].x.b[s]=0;
break;
case 1:
for (s=0;s<D;s++) P[0].x.b[s]=1;
break;
case 6:
m=0;
for (d=0;d<D;d=d+3)
{
m=m+1;
if(m%2==0)
{
for (i=d;i<d+3;i++) P[0].x.b[d]=1;
}
else
{
for (i=d;i<d+3;i++) P[0].x.b[d]=0;
}
}
break;
}
P[1].x.size=D;
P[1].f.size=n_f;
for (m=0;m<SSS;m++)
{
for (d=0;d<D;d++) P[1].x.b[d]=alea_integer(0,1);
for (i=0;i<n_f;i++)
P[1].f.fi[i]=fabs(perf(1,function[i])-fmin.fi[i]);
error=total_error(P[1].f);
// Compute distance to solution
r=distance(0,1,-1);
printf("\n %i %.0f %f",m,r,error);
fprintf(f_SSS,"\n%.0f %f",r,error);
}
goto end;
check_h:
if(check_hyp==0) goto optim;
fundamental_hyp(function[0]);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -