⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 fann_cascade.c

📁 python 神经网络 数据挖掘 python实现的神经网络算法
💻 C
📖 第 1 页 / 共 3 页
字号:
			/*          if(output_train_errors[j] != (ann->output[j] - data->output[i][j])){
			 * printf("difference in calculated error at %f != %f; %f = %f - %f;\n", output_train_errors[j], (ann->output[j] - data->output[i][j]), output_train_errors[j], ann->output[j], data->output[i][j]);
			 * } */

			/*
			 * output_train_errors[j] = (data->output[i][j] - ann->output[j])/2;
			 * output_train_errors[j] = ann->output[j] - data->output[i][j];
			 */

			output_train_errors[j] = (data->output[i][j] - ann->output[j]);

			switch (output_neurons[j].activation_function)
			{
				case FANN_LINEAR_PIECE_SYMMETRIC:
				case FANN_SIGMOID_SYMMETRIC:
				case FANN_SIGMOID_SYMMETRIC_STEPWISE:
				case FANN_THRESHOLD_SYMMETRIC:
				case FANN_ELLIOT_SYMMETRIC:
				case FANN_GAUSSIAN_SYMMETRIC:
				case FANN_SIN_SYMMETRIC:
				case FANN_COS_SYMMETRIC:
					output_train_errors[j] /= 2.0;
					break;
				case FANN_LINEAR:
				case FANN_THRESHOLD:
				case FANN_SIGMOID:
				case FANN_SIGMOID_STEPWISE:
				case FANN_GAUSSIAN:
				case FANN_GAUSSIAN_STEPWISE:
				case FANN_ELLIOT:
				case FANN_LINEAR_PIECE:
					break;
			}
		}

		fann_update_candidate_slopes(ann);
	}

	fann_update_candidate_weights(ann, data->num_data);

	/* find the best candidate score */
	best_candidate = 0;
	best_score = ann->cascade_candidate_scores[best_candidate];
	for(i = 1; i < num_cand; i++)
	{
		/*struct fann_neuron *cand = ann->first_layer->first_neuron + ann->total_neurons + 1 + i;
		 * printf("candidate[%d] = activation: %s, steepness: %f, score: %f\n", 
		 * i, FANN_ACTIVATIONFUNC_NAMES[cand->activation_function], 
		 * cand->activation_steepness, ann->cascade_candidate_scores[i]); */

		if(ann->cascade_candidate_scores[i] > best_score)
		{
			best_candidate = i;
			best_score = ann->cascade_candidate_scores[best_candidate];
		}
	}

	ann->cascade_best_candidate = ann->total_neurons + best_candidate + 1;
#ifdef CASCADE_DEBUG_FULL
	printf("Best candidate[%d]: with score %f, real score: %f\n", best_candidate,
		   ann->MSE_value - best_score, best_score);
#endif

	return best_score;
}

/* add a layer ad the position pointed to by *layer */
struct fann_layer *fann_add_layer(struct fann *ann, struct fann_layer *layer)
{
	int layer_pos = layer - ann->first_layer;
	int num_layers = ann->last_layer - ann->first_layer + 1;
	int i;

	/* allocate the layer */
	struct fann_layer *layers =
		(struct fann_layer *) realloc(ann->first_layer, num_layers * sizeof(struct fann_layer));
	if(layers == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return NULL;
	}

	/* copy layers so that the free space is at the right location */
	for(i = num_layers - 1; i >= layer_pos; i--)
	{
		layers[i] = layers[i - 1];
	}

	/* the newly allocated layer is empty */
	layers[layer_pos].first_neuron = layers[layer_pos + 1].first_neuron;
	layers[layer_pos].last_neuron = layers[layer_pos + 1].first_neuron;

	/* Set the ann pointers correctly */
	ann->first_layer = layers;
	ann->last_layer = layers + num_layers;

#ifdef CASCADE_DEBUG_FULL
	printf("add layer at pos %d\n", layer_pos);
#endif

	return layers + layer_pos;
}

void fann_set_shortcut_connections(struct fann *ann)
{
	struct fann_layer *layer_it;
	struct fann_neuron *neuron_it, **neuron_pointers, *neurons;
	unsigned int num_connections = 0, i;

	neuron_pointers = ann->connections;
	neurons = ann->first_layer->first_neuron;

	for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
	{
		for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
		{

			neuron_pointers += num_connections;
			num_connections = neuron_it->last_con - neuron_it->first_con;

			for(i = 0; i != num_connections; i++)
			{
				neuron_pointers[i] = neurons + i;
			}
		}
	}
}

void fann_add_candidate_neuron(struct fann *ann, struct fann_layer *layer)
{
	unsigned int num_connections_in = layer->first_neuron - ann->first_layer->first_neuron;
	unsigned int num_connections_out =
		(ann->last_layer - 1)->last_neuron - (layer + 1)->first_neuron;
	unsigned int num_connections_move = num_connections_out + num_connections_in;

	unsigned int candidate_con, candidate_output_weight;
	int i;

	struct fann_layer *layer_it;
	struct fann_neuron *neuron_it, *neuron_place, *candidate;

	/* We know that there is enough room for the new neuron
	 * (the candidates are in the same arrays), so move
	 * the last neurons to make room for this neuron.
	 */

	/* first move the pointers to neurons in the layer structs */
	for(layer_it = ann->last_layer - 1; layer_it != layer; layer_it--)
	{
#ifdef CASCADE_DEBUG_FULL
		printf("move neuron pointers in layer %d, first(%d -> %d), last(%d -> %d)\n",
			   layer_it - ann->first_layer,
			   layer_it->first_neuron - ann->first_layer->first_neuron,
			   layer_it->first_neuron - ann->first_layer->first_neuron + 1,
			   layer_it->last_neuron - ann->first_layer->first_neuron,
			   layer_it->last_neuron - ann->first_layer->first_neuron + 1);
#endif
		layer_it->first_neuron++;
		layer_it->last_neuron++;
	}

	/* also move the last neuron in the layer that needs the neuron added */
	layer->last_neuron++;

	/* this is the place that should hold the new neuron */
	neuron_place = layer->last_neuron - 1;

#ifdef CASCADE_DEBUG_FULL
	printf("num_connections_in=%d, num_connections_out=%d\n", num_connections_in,
		   num_connections_out);
#endif

	candidate = ann->first_layer->first_neuron + ann->cascade_best_candidate;

	/* the output weights for the candidates are located after the input weights */
	candidate_output_weight = candidate->last_con;

	/* move the actual output neurons and the indexes to the connection arrays */
	for(neuron_it = (ann->last_layer - 1)->last_neuron - 1; neuron_it != neuron_place; neuron_it--)
	{
#ifdef CASCADE_DEBUG_FULL
		printf("move neuron %d -> %d\n", neuron_it - ann->first_layer->first_neuron - 1,
			   neuron_it - ann->first_layer->first_neuron);
#endif
		*neuron_it = *(neuron_it - 1);

		/* move the weights */
#ifdef CASCADE_DEBUG_FULL
		printf("move weight[%d ... %d] -> weight[%d ... %d]\n", neuron_it->first_con,
			   neuron_it->last_con - 1, neuron_it->first_con + num_connections_move - 1,
			   neuron_it->last_con + num_connections_move - 2);
#endif
		for(i = neuron_it->last_con - 1; i >= (int)neuron_it->first_con; i--)
		{
#ifdef CASCADE_DEBUG_FULL
			printf("move weight[%d] = weight[%d]\n", i + num_connections_move - 1, i);
#endif
			ann->weights[i + num_connections_move - 1] = ann->weights[i];
		}

		/* move the indexes to weights */
		neuron_it->last_con += num_connections_move;
		num_connections_move--;
		neuron_it->first_con += num_connections_move;

		/* set the new weight to the newly allocated neuron */
		ann->weights[neuron_it->last_con - 1] =
			(ann->weights[candidate_output_weight]) * ann->cascade_weight_multiplier;
		candidate_output_weight++;
	}

	/* Now inititalize the actual neuron */
	neuron_place->value = 0;
	neuron_place->sum = 0;
	neuron_place->activation_function = candidate->activation_function;
	neuron_place->activation_steepness = candidate->activation_steepness;
	neuron_place->last_con = (neuron_place + 1)->first_con;
	neuron_place->first_con = neuron_place->last_con - num_connections_in;
#ifdef CASCADE_DEBUG_FULL
	printf("neuron[%d] = weights[%d ... %d] activation: %s, steepness: %f\n",
		   neuron_place - ann->first_layer->first_neuron, neuron_place->first_con,
		   neuron_place->last_con - 1, FANN_ACTIVATIONFUNC_NAMES[neuron_place->activation_function],
		   neuron_place->activation_steepness);/* TODO remove */
#endif

	candidate_con = candidate->first_con;
	/* initialize the input weights at random */
#ifdef CASCADE_DEBUG_FULL
	printf("move cand weights[%d ... %d] -> [%d ... %d]\n", candidate_con,
		   candidate_con + num_connections_in - 1, neuron_place->first_con,
		   neuron_place->last_con - 1);
#endif

	for(i = 0; i < (int)num_connections_in; i++)
	{
		ann->weights[i + neuron_place->first_con] = ann->weights[i + candidate_con];
#ifdef CASCADE_DEBUG_FULL
		printf("move weights[%d] -> weights[%d] (%f)\n", i + candidate_con,
			   i + neuron_place->first_con, ann->weights[i + neuron_place->first_con]);
#endif
	}

	/* Change some of main variables */
	ann->total_neurons++;
	ann->total_connections += num_connections_in + num_connections_out;

	return;
}

void fann_install_candidate(struct fann *ann)
{
	struct fann_layer *layer;

	layer = fann_add_layer(ann, ann->last_layer - 1);
	fann_add_candidate_neuron(ann, layer);
	return;
}

#endif /* FIXEDFANN */

FANN_EXTERNAL unsigned int FANN_API fann_get_cascade_num_candidates(struct fann *ann)
{
	return ann->cascade_activation_functions_count *
		ann->cascade_activation_steepnesses_count *
		ann->cascade_num_candidate_groups;
}

FANN_GET_SET(float, cascade_output_change_fraction)
FANN_GET_SET(unsigned int, cascade_output_stagnation_epochs)
FANN_GET_SET(float, cascade_candidate_change_fraction)
FANN_GET_SET(unsigned int, cascade_candidate_stagnation_epochs)
FANN_GET_SET(unsigned int, cascade_num_candidate_groups)
FANN_GET_SET(fann_type, cascade_weight_multiplier)
FANN_GET_SET(fann_type, cascade_candidate_limit)
FANN_GET_SET(unsigned int, cascade_max_out_epochs)
FANN_GET_SET(unsigned int, cascade_max_cand_epochs)

FANN_GET(unsigned int, cascade_activation_functions_count)
FANN_GET(enum fann_activationfunc_enum *, cascade_activation_functions)

FANN_EXTERNAL void FANN_API fann_set_cascade_activation_functions(struct fann *ann,
														 enum fann_activationfunc_enum *
														 cascade_activation_functions,
														 unsigned int 
														 cascade_activation_functions_count)
{
	if(ann->cascade_activation_functions_count != cascade_activation_functions_count)
	{
		ann->cascade_activation_functions_count = cascade_activation_functions_count;
		
		/* reallocate mem */
		ann->cascade_activation_functions = 
			(enum fann_activationfunc_enum *)realloc(ann->cascade_activation_functions, 
			ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
		if(ann->cascade_activation_functions == NULL)
		{
			fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
	}
	
	memmove(ann->cascade_activation_functions, cascade_activation_functions, 
		ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
}

FANN_GET(unsigned int, cascade_activation_steepnesses_count)
FANN_GET(fann_type *, cascade_activation_steepnesses)

FANN_EXTERNAL void FANN_API fann_set_cascade_activation_steepnesses(struct fann *ann,
														   fann_type *
														   cascade_activation_steepnesses,
														   unsigned int 
														   cascade_activation_steepnesses_count)
{
	if(ann->cascade_activation_steepnesses_count != cascade_activation_steepnesses_count)
	{
		ann->cascade_activation_steepnesses_count = cascade_activation_steepnesses_count;
		
		/* reallocate mem */
		ann->cascade_activation_steepnesses = 
			(fann_type *)realloc(ann->cascade_activation_steepnesses, 
			ann->cascade_activation_steepnesses_count * sizeof(fann_type));
		if(ann->cascade_activation_steepnesses == NULL)
		{
			fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
	}
	
	memmove(ann->cascade_activation_steepnesses, cascade_activation_steepnesses, 
		ann->cascade_activation_steepnesses_count * sizeof(fann_type));
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -