⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 main.c

📁 用C编写的部落寻优的优化算法
💻 C
📖 第 1 页 / 共 5 页
字号:
         // ******************* MOVE ************************;

			TR[level].tr[i].part[j]=move_particle(TR[level].tr[i].part[j],partg,level,0);
			if(MEMO==1)  add_memo(TR[level].tr[i].part[j].x,level);

			// Eventually update the best result
               better=better_than(TR[level].tr[i].part[j].p_i.f, best_result.f,level);


			   offline_error_cont+=total_error(best_result.f);

			if (better==1)
			{
			  // For dynamic optimisation, it is better
			  // to permanently evaluate the error

				offline_error+=-total_error(best_result.f);

				previous_best=best_result.f;

				best_result=TR[level].tr[i].part[j].p_i;

				if (BIN==1)
	for (d=0;d<best_result.p.size;d++)
		best_result.p.x[d]=floor(best_result.p.x[d]+0.5);

				error=total_error(best_result.f);
				offline_error+=error;
//   printf(" %f",  offline_error);

				if (DYN==1)
					error=offline_error/n_change; // For non recursive dynamic optim

                  	if (problem[level].printlevel>0)
                      {
                         printf("\n");
                         printf(" Eval: %.0f",eval_f_tot[level]);
                         printf(" Result: %.8f",error);
                      }

					if (problem[level].save==-2) // Save info for convergence curve plotting
					{
						fprintf(f_trace_run,"\n%.0f %f %f %i",eval_f_tot[level],error,offline_error_cont/(eval_f_tot[level]-n_add),n_add);
					}

                      if (problem[level].save>0)
                      {
                         fprintf(f_trace,"\n %f",best_result.f.f[0]);
                      }
                     if(DYN==0 && error<=problem[level].eps) chance=1;  else chance=0;
                      if(chance==1) goto end_by_chance; // A solution has been found

			}
   }  // next j

			// Arbitrary stop criterion
			ticks2=clock();
			clock_tick=ticks2-ticks1;

			if((Max_Eval<0 && eval_f_tot[level]>-Max_Eval) || Max_Eval>clock_tick)
			{
				failure_tot=failure_tot+1; // Just for information
      	       // if (problem[level].printlevel>0)
				printf("\n FAILURE eval_tot  %.0f > max= %.0f",eval_f_tot[level],fabs(Max_Eval));
				goto the_end;
			}

		} // next i


 	if (problem[level].printlevel>1) display_swarm(1,level);

   // Special local search for TSP
   if (TSP==1)
   {
      for (i=0;i<TR[level].size;i++) // For each tribe ...
	    {
         for (j=0;j<TR[level].tr[i].size;j++) // ... for each particle ...
		    {
			  TR[level].tr[i].part[j].x=local_search_tsp(TR[level].tr[i].part[j].x,problem[level].target,level);

			   // If improvement, memorize
               better=better_than(TR[level].tr[i].part[j].x.f, TR[level].tr[i].part[j].p_i.f,level);
               if (better==1)
               {
                 TR[level].tr[i].part[j].p_i= TR[level].tr[i].part[j].x;
               }
            }
      }
   } // end if(TSP==1)


      // Special local search for QAP
   if (QAP==1)
   {
      for (i=0;i<TR[level].size;i++) // For each tribe ...
		{
			for (j=0;j<TR[level].tr[i].size;j++) // ... for each particle ...
			{
               TR[level].tr[i].part[j].x=local_search_qap(TR[level].tr[i].part[j].x,problem[level].target,level);
               // If improvement, memorize
               better=better_than(TR[level].tr[i].part[j].x.f, TR[level].tr[i].part[j].p_i.f,level);
               if (better==1)
               {
                 TR[level].tr[i].part[j].p_i= TR[level].tr[i].part[j].x;
               }
            }

      }
   } // end if(QAP==1)


	// Total particle number as criterion
	cycle_size=problem[level].init_size+n_add-n_remove;
	swarm_size=cycle_size;

   if (cycle_size>Max_swarmsize)
   {
     printf("\nWARNING. You should increase Max_swarmsize (%i) in def_struct.c",Max_swarmsize);
    cycle=Max_swarmsize;

   }
	mean_swarm_size=mean_swarm_size+cycle_size;

  	Iter=Iter+1; // number of time steps. Just for information

	if (adapt>0) goto end_adapt; // Non adaptive option (constant swarm size)

    //                                ***********  ADAPTATION(S)  (begin) *************
  // Number of connections
  n_connect=0;
 for (i=0;i<TR[level].size;i++) // For each tribe
{
	for (j=0;j<TR[level].tr[i].size;j++)  // for each particle
	{
      n_connect=n_connect+TR[level].tr[i].part[j].ig.size;
	}
}

	cycle=cycle+1;
if (cycle<n_connect)
        goto end_adapt; // Adaptation is not made at each iteration
										 // just from time to time

		TRsize0=TR[level].size; // Memorize the current number of tribes

		// Look for Xmin and Xmax. Useful only if you use init 3 (see init_particle)
		if (swarm_size>1) // Needs several particles
		{
			for (d=0;d<problem[level].DD;d++) // loop on dimensions
			{
				Xmin.x[d]=problem[level].H.max[d];
				Xmax.x[d]=problem[level].H.min[d];

				for (i=0;i<TRsize0;i++) // Loop on tribes
				{
					for (j=0;j<TR[level].tr[i].size;j++) // loop on particles
					{
						z=TR[level].tr[i].part[j].p_i.p.x[d];
						if(z<Xmin.x[d]) Xmin.x[d]=z;
						if(z>Xmax.x[d]) Xmax.x[d]=z;

					}
				}
			}
		}

		// If improvement, "bad" particles are not useful => remove them
		//  If deterioration, it is worthwhile to generate a very different particle


		for (i=0;i<TRsize0;i++) // Loop on tribes, to check them
		{
			// Find the best particle in the current tribe
			label_best=TR[level].tr[i].part[0].label; // Label of the best
			f0=TR[level].tr[i].part[0].p_i.f;
			rank_best=0;

			if (TR[level].tr[i].size>1)
			{
				for (j=1;j<TR[level].tr[i].size;j++)
				{
					f1=TR[level].tr[i].part[j].p_i.f;
					if (better_than(f1,f0,level)==1)
					{
						label_best=TR[level].tr[i].part[j].label;
						f0=f1;
						rank_best=j;
					}
				}
			}

         		// Find the worst particle in the current tribe
           // (maybe for future version)
				label_worst=TR[level].tr[i].part[0].label; // Label of the worst
				f0=TR[level].tr[i].part[0].p_i.f;
				rank_worst=0;

				for (j=1;j<TR[level].tr[i].size;j++)
				{
                        f1=TR[level].tr[i].part[j].p_i.f;
                        if (better_than(f1,f0,level)!=1)
                        {
                           label_worst=TR[level].tr[i].part[j].label;
                           f0=f1;
                           rank_worst=j;
                        }
				}
				//worst[i]=TR[level].tr[i].part[rank_worst].label;

			// Count how many particles of the current tribe have improved their position
			improv=local_improv(TR[level].tr[i],level);

      if (problem[level].fuzzy==0)   // Crisp decision rules. Note a tribe may be both bad an good
      {
           bad=improv<TR[level].tr[i].size; // At least one particle has NOT improved its position
		   good=improv>0; // At least one particle has improved  its position
      }
      else   // Use fuzzy rules to decide whether the tribe is bad or good
      {
         z= TR[level].tr[i].size;
		 pr=alea_float(0,z);

		 bad=(improv<=pr);
		 good=1-bad;
      }


if (bad==0) bad=alea(0,1); // To be sure there are on the whole
							// more generations than deletions
	  if (bad==1)
        {
					// If it is the first bad tribe, generate a new empty one
					if (TR[level].size==TRsize0)
					{

						TR[level].size=TR[level].size+1;
						TR[level].tr[TRsize0].size=0;
					}

              // max_add= TR[level].tr[i].size+1;
             // max_add=alea(1,TR[level].tr[i].size); // TEST. May generate several particles
				max_add=1;
				if (add_option==1) max_add=2;

              if (good && (TR[level].tr[i].size>1 || TR[level].tr[i].part[0].status>0)) //The worst particle will be removed
                 max_add=max_add+1;

               for (m=0;m<max_add;m++)  // May generate several particles
               {

				   //Generate a particle in the new tribe (rank of the tribe=TRsize0)
                  if (TSP==1)
                  {
					TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size]=init_particle_tsp(problem[level].DD,problem[level].target,level);
					goto new_label;
                  }
                  if (QAP==1)
                  {
					TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size]=init_particle_qap(problem[level].DD,problem[level].target,level);
					goto new_label;
                  }

				  if (add_option==0 || (add_option==1 && m!=1)) // Random init
					TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size]=init_particle(0,level,dummy_part);

					if (add_option==1 && m==1) // Generate a hopefully good particle
					{
						// Find the best informer of the tribe
						part_best=TR[level].tr[i].part[0];
						for (j=0;j<TR[level].tr[i].size;j++) // ... for each particle ...
						{
						partg=best_informer(TR[level].tr[i].part[j],0,level);
					//	partg=best_informer(TR[level].tr[i].part[j],no_best,level);

						better=better_than(partg.p_i.f,part_best.p_i.f,level);
						if (better==1) part_best=partg;
						}

						// Generate a particle around the best position known by the best informer
						partg=TR[level].tr[i].part[label_best]; // Best particle of the tribe
						partt=move_particle(partg,part_best,level,1);
						partt=complete_part(partt,0,level);
						partt.ig.size=0;
						TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size]=partt;
					}
// ---

new_label:
					new_label=TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size].label;
					if(rand_hood>0) goto end_i_groups;

					TR[level].tr[TRsize0].size=TR[level].tr[TRsize0].size+1; // Increase the (new) tribe size
					n_add=n_add+1;

					// Begin to build its i-group:
					// with the label of the best particle that has generated the new particle

					// Add it to the i-group of the new particle
					rank=TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size].ig.size;
					TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size].ig.label[rank]=label_best;
					TR[level].tr[TRsize0].part[TR[level].tr[TRsize0].size].ig.size=rank+1; // Increase the i-group size

					// Update the i_group of the "generating" best particle ...

					rank=TR[level].tr[i].part[rank_best].ig.size;
					TR[level].tr[i].part[rank_best].ig.label[rank]=new_label;
					TR[level].tr[i].part[rank_best].ig.size=rank+1;
/*
					// ... or ... Update the i_groups of all particles of the "generating" tribe
					// (if you use this option, do not use the previous one)
					for (j=0;j<TR[level].tr[i].size;j++)
					{
						rank=TR[level].tr[i].part[j].ig.size;
						TR[level].tr[i].part[j].ig.label[rank]=new_label;
						TR[level].tr[i].part[j].ig.size=rank+1;
					}
*/

                  end_i_groups:;
               } // m<max_add
			}   // End "if (bad)"

			// If  enough improvement
		if (good==1)
			{
				n_local_remove=0;

				// If there is just one particle, check whether an informer is better
				if (TR[level].tr[i].size==1)
				{
					// Find the best informer
					partg=best_informer(TR[level].tr[i].part[0],no_best,level);

					if (better_than(partg.p_i.f,TR[level].tr[i].part[0].p_i.f,level)==1)
          { label_best=partg.label;
            goto remove;
          }

					goto exit_good;
				}
remove:
					// Remove the worst particle of the current tribe

					n_local_remove=n_local_remove+1;
					// Temporarily keep its i-group
					ig=TR[level].tr[i].part[rank_worst].ig;

					// Remove it from the tribe (compact the tribe)
					if (rank_worst<TR[level].tr[i].size-1)
					{
						for (j=rank_worst;j<TR[level].tr[i].size-1;j++)
						{
							TR[level].tr[i].part[j]=TR[level].tr[i].part[j+1];
						}
					}
					TR[level].tr[i].size=TR[level].tr[i].size-1;
					n_remove=n_remove+1;

					// Replace it by the best in all i-groups
					for (j=0;j<TR[level].size;j++) // For each i_group...
					{
						for (k=0;k<TR[level].tr[j].size;k++) // ... for each particle ...
						{
							// Check if the best is already in the i-group

							already=-1;
							for (l=0;l<TR[level].tr[j].part[k].ig.size;l++)
								if (TR[level].tr[j].part[k].ig.label[l]==label_best)
									{already=l;}

//check_inf:

							igt.size=0;
							for (l=0;l<TR[level].tr[j].part[k].ig.size;l++) //... for each informer...
							{
								labelt=TR[level].tr[j].part[k].ig.label[l];
								if (labelt!=label_worst)
								{igt.label[igt.size]=labelt; igt.size=igt.size+1;}
							}
             if(already==-1 && label_worst!=label_best)
               {igt.label[igt.size]=label_best; igt.size=igt.size+1;}
							TR[level].tr[j].part[k].ig=igt;

//next_part:;
						}
					}  // End "for each i-group"
                         if(rand_hood>0) goto end_i_groups_2;
					// Merge its i-group (except itself) into the one of the best
					for (j=0;j<ig.size;j++) // Loop on informers
					{
						if (ig.label[j]==label_worst) continue;
						size=TR[level].tr[i].part[rank_best].ig.size;
						for (k=0;k<size;k++)
						{
							if (ig.label[j]==TR[level].tr[i].part[rank_best].ig.label[k]) goto next_informer;
						}
						TR[level].tr[i].part[rank_best].ig.label[size]=ig.label[j];
						TR[level].tr[i].part[rank_best].ig.size=size+1;

					next_informer:;
					}
               end_i_groups_2:;
				} // End "if (good)"

exit_good:;
		} // End "for each tribe"

 if (rand_hood>0) goto empty_tribe;

       		// Complete the i-groups of each particle of the new tribe (if there is one)
		// with all particles of the tribe
      if (TR[level].size>TRsize0)
		{
			for (i=0;i<TR[level].tr[TRsize0].size;i++) // For each particle ...
			{						
				for (j=0;j<TR[level].tr[TRsize0].size;j++) // ... add the others as informers
				{
					rank=TR[level].tr[TRsize0].part[i].ig.size;
					labelt=TR[level].tr[TRsize0].part[j].label;
					TR[level].tr[TRsize0].part[i].ig.label[rank]=labelt;
					TR[level].tr[TRsize0].part[i].ig.size=rank+1;

				}
			}
		}

empty_tribe:

		// Eventually remove empty tribes
		for (i=0;i<TR[level].size;i++)
		{
			if (TR[level].tr[i].size>0) continue; // Not empty
                     i0=i;

				if (i<TR[level].size-1) // Compact the tribe list
				{
					for (j=i;j<TR[level].size-1;j++)
					{
						TR[level].tr[j]=TR[level].tr[j+1];

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -