⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 vivaldi.cc

📁 经网络提出了一种基于蚁群聚类算法的径向基神经网络. 利用蚁群算法的并行寻优特征和挥发系数方法的自适应更改信息量的能力,并以球面聚类的方式确定了径向基神经网络中基函数的位置, 同时通过比较隐层神经元的相
💻 CC
字号:
/* $Id: vivaldi.cc,v 1.7 2006-08-21 15:02:00 jonathan Exp $* Jonathan Ledlie, Harvard University.* Copyright 2006.  All rights reserved.*/#include "node.h"FILE *runLogFP = NULL;FILE *secFP = NULL;vector<double> sec_sysRawRE_list;double sec_sysDD_sum;vector<double> sec_appRawRE_list;double sec_appDD_sum;void Node::processSample (int o_stamp, int myId, int o_yourId, float o_rawLatency){	// o_ refers to this observation	// s_ refers to the sample being processed,	//    as we go through our samples	// distance refers to metric space distance	// latency to actual observations	if (debug)		cout << "\n\nLINE: "<< o_stamp << " me " << myId << " you " 		<< o_yourId << " s " << o_rawLatency << endl;	if (myId >= nodeCount || o_yourId >= nodeCount)	{		printf ("id out of range, mine %d, yours %d\n", myId, o_yourId);		exit (-1);	}	// remember samples from this node	node[myId].addSample		(o_yourId, o_rawLatency, node[o_yourId].vec, node[o_yourId].weightedError,		node[o_yourId].appVector, o_stamp);	// retrieve our idea of the underlying latency for this link	double o_latency = node[myId].getSample (o_yourId);	// remember when we last exchanged info with this guy	node[myId].lastUpdateFrom = o_stamp;	node[o_yourId].lastUpdateTo = o_stamp;	if (o_latency > 0) 	{		if (debug)			cout << "my coord" << node[myId].vec << endl;		double o_distance = node[myId].vec->getEuclideanDistance (node[o_yourId].vec);		while (o_distance == 0) 		{			node[myId].vec->bump();			o_distance = node[myId].vec->getEuclideanDistance (node[o_yourId].vec);		}		if (o_distance == 0)		{			cout << "my coord " << node[myId].vec				<< "your coord " << node[o_yourId].vec << endl;		}		assert (!isnan(o_distance));		double o_relativeError = fabs(o_distance - o_latency) / o_latency;		assert (!isnan(o_relativeError));		double o_rawRelativeError = fabs(o_distance - o_rawLatency) / o_rawLatency;		assert (!isnan(o_rawRelativeError));		// EMWA on weightedError, based on how much we trust this current sample		// update our confidence based on the confidence in this sample		assert (node[myId].weightedError + node[o_yourId].weightedError > 0);		double o_weight = node[myId].weightedError /			(node[myId].weightedError + node[o_yourId].weightedError);		assert (!isnan(o_weight));		double o_alphaWeightedError = ERROR_FRACTION * o_weight;		assert (!isnan(o_alphaWeightedError));		// update our confidence		node[myId].weightedError = (o_relativeError * o_alphaWeightedError) +			(node[myId].weightedError * (1. - (o_alphaWeightedError)));		if (debug) 		{			printf ("weightedError = %f\n", node[myId].weightedError);		}		if (node[myId].weightedError > 1.) node[myId].weightedError = 1.;		if (node[myId].weightedError < 0.) node[myId].weightedError = 0.;		assert (!isnan(node[myId].weightedError));		Point o_force;		int measurementsUsed = 0;		int o_oldestSample = o_stamp;		// randomize processing of our neighbors		// when SAMPLE_EXPIRATION == 0, we only process the most recent sample		vector<int> validNodes;		for (map<int,Samples*>::iterator samplesIter =			node[myId].node2samples.begin(); 			samplesIter != node[myId].node2samples.end(); samplesIter++)		{				int s_yourId = samplesIter->first;				Samples *sample = samplesIter->second;				if (sample->stamp < o_stamp - SAMPLE_EXPIRATION &&					SAMPLE_EXPIRATION >= 0) 				{						if (debug)							cout << "Expired id " << s_yourId << " sample " << sample << endl;				} 				else				{					if (debug)						cout << "Valid id " << s_yourId << " sample " << sample << endl;					validNodes.push_back (s_yourId);					if (o_oldestSample > sample->stamp)					{						o_oldestSample = sample->stamp;					}				}		}		random_shuffle(validNodes.begin(),			validNodes.end());		double o_sampleWeightSum = 0.;		// let o_sampleWeightSum be sum of		// relative age of each sample.		// Then weigh each sample by its weight/o_sampleWeightSum		for (int validNodeIndex = 0; validNodeIndex < validNodes.size();			validNodeIndex++) 		{				int s_yourId = validNodes[validNodeIndex];				Samples *sample = node[myId].node2samples[s_yourId];				o_sampleWeightSum += sample->stamp - o_oldestSample;		}		assert (o_sampleWeightSum >= 0);		for (int validNodeIndex = 0; validNodeIndex < validNodes.size();			validNodeIndex++)		{				int s_yourId = validNodes[validNodeIndex];				Samples *sample = node[myId].node2samples[s_yourId];				double s_distance = node[myId].vec->getEuclideanDistance					(sample->vec);				while (s_distance == 0.) 				{					node[myId].vec->bump();					s_distance = node[myId].vec->getEuclideanDistance (sample->vec);				}				assert (!isnan(o_distance));				// find direction				Point *s_unitVector = node[myId].vec->getDirection (sample->vec);				if (s_unitVector == NULL)				{					s_unitVector = Point::getRandomUnitVector();				}				double s_latency = node[myId].getSample (s_yourId);				if (debug)					cout << "unitVector to id "<< s_yourId << " = "<<s_unitVector <<endl;				// how much to weigh this sample				// are his coords more reliable than mine?				if (!(node[myId].weightedError + sample->weightedError > 0.))				{					printf ("me %d you %d meas %d myWE %f yourWE %f\n",						myId, s_yourId, measurementsUsed, node[myId].weightedError,						sample->weightedError);					exit (0);				}				assert (node[myId].weightedError + sample->weightedError > 0.);				double s_weight = node[myId].weightedError /					(node[myId].weightedError + sample->weightedError);				if (debug) 				{					printf ("sample weighted error = %f\n", s_weight);					printf ("predicted distance = %f\n", s_distance);					printf ("smoothed distance =    %f\n", s_latency);				}				// Meant to account for blips in very low latency links				// When measurement_error == 0, this doesn't do anything				// e_{s}				if ( (s_distance < s_latency &&					s_distance + measurement_error >= s_latency) ||					(s_distance > s_latency &&					s_distance - measurement_error <= s_latency)) 				{						printf ("within measurement error\n");						s_distance = s_latency;						withinMeasurementError++;				}				// error of sample				double s_error = s_distance - s_latency;				// delta				double s_dampening = s_weight;				if (debug)				{					printf ("dampening = %f for vector adjustment\n", s_dampening);				}				// If you don't have this weight and have a bunch of neighbors				// coords spin out of control (i.e. there is too much change				// and they don't converge				// Can also be achieved with 1/num neighbors				// but this decays based on age of last sample to neighbor				double s_sampleWeight = 1.;				if (o_sampleWeightSum > 0)				{					s_sampleWeight = (sample->stamp - o_oldestSample)/o_sampleWeightSum;				}				s_unitVector->scale (s_error * s_dampening * s_sampleWeight);				if (debug)					cout << "scaled unitVector "<<s_unitVector <<endl;				o_force.add (s_unitVector);				if (debug)					cout << "total force "<<&o_force <<endl;				delete s_unitVector;				measurementsUsed++;		}		// Update the local coordinate		o_force.height = -o_force.height;		o_force.scale (DAMPENING_FRACTION);		if (debug) 		{			cout << "before update "<< node[myId].vec << endl;			cout << "force " << &o_force << endl;		}		node[myId].vec->add(&o_force);		node[myId].vec->checkHeight();		if (debug)		{			cout << "after update "<< node[myId].vec << endl;		}		double o_distanceDelta = fabs(o_force.length());		node[myId].addDistanceDelta (o_distanceDelta);		// Note: sometimes this happens toward the beginning of a large sim		if (o_distanceDelta > 500)		{			fprintf (stderr, "large distance delta %.3f\n", o_distanceDelta);		}		if (debug)		{			cout << "dd " << o_distanceDelta << endl;		}		if (debug)		{			double o_distance = node[myId].vec->getEuclideanDistance (node[o_yourId].vec);			cout << "new dist " << o_distance << endl;		}		double o_appCoordDistance = node[myId].appVector->getEuclideanDistance			(node[o_yourId].appVector);		Point *o_oldAppPosition = new Point (node[myId].appVector);		// update nearest neighbor after changing our coordinate,		// and before thinking about updating the appVector		node[myId].addToNearestNeighbor (node[o_yourId].vec, o_stamp);		double o_appComparison = 0.;		bool o_updatedAppVector = node[myId].updateAppVector			(o_distanceDelta, o_stamp,			o_appComparison);		if (debug)			cout << "appCoordDistance " << o_appCoordDistance << endl;		double o_appDistanceDelta =			o_oldAppPosition->getAbsoluteDistance (node[myId].appVector);		double o_appRawRelativeError = 			fabs(o_appCoordDistance - o_rawLatency) / o_rawLatency;   		double o_appRelativeError = 			fabs(o_appCoordDistance - o_latency) / o_latency;   		delete o_oldAppPosition;		if (secFP)		{			sec_sysRawRE_list.push_back (o_rawRelativeError);			sec_sysDD_sum += o_distanceDelta;			sec_appRawRE_list.push_back (o_appRawRelativeError);			sec_appDD_sum += o_appDistanceDelta;		}		if (runLogFP)		{			fprintf (runLogFP, "%d me %3d you %3d s %7.2f ss %7.2f confi %4.3f dd %6.3f re %5.3f sA %d ",				o_stamp, myId, o_yourId, o_rawLatency, o_latency,				node[myId].weightedError, o_distanceDelta,				o_relativeError, o_oldestSample);			fprintf (runLogFP, " [ ");			node[myId].printCoord(runLogFP);			fprintf (runLogFP, " ]\n");		}	}	if (debug) 	{		printf ("\n\n\n");	}}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -