⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 testsom.c

📁 SOM-sd 是现在国外非常流行的一个神经网络的结构自组织特征映射算法
💻 C
📖 第 1 页 / 共 5 页
字号:
      for (j = 0; j < parameters.train->ldim; j++)	sprintf(&bigbuf[strlen(bigbuf)], " %f", harray[i].node->points[j]);      sprintf(&bigbuf[strlen(bigbuf)], " %s", harray[i].substructID);      free(harray[i].substructID);      harray[i].substructID = strdup(bigbuf);    }    qsort(harray, N, sizeof(struct AllHits), comparsubStructID);    nsdsub = 1;    for (i = 1; i < N; i++){      if (strcmp(harray[i-1].substructID, harray[i].substructID))	nsdsub++;    }    nl = 0;    qsort(labelval, N, sizeof(FLOAT), comparFloat);    for (i = 1; i < N; i++){      if (labelval[i-1] != labelval[i]){	nl++;      }    }  }  if (parameters.train->FanOut != maxO)    fprintf(stderr, "Max outdegree stated: %d, but actual ", parameters.train->FanOut);  fprintf(stderr, "max. outdegree is: %d\n", maxO);  fprintf(stderr, "Outdegree of graphs: min %d, max %d, avg %.2E\n", minO,maxO, (float)totalO/nG);  fprintf(stderr, "Total number of links: %d\n", nlinks);  fprintf(stderr, "Number of unique substructures: %d (struct only)\n", nsub);  fprintf(stderr, "Number of unique substructures: %d (struct & label)\n", nsdsub);  fprintf(stderr, "Number of unique graphs: %d\n", V);  fprintf(stderr, "Number of unique nodes(structure only): %d\n", Vn);  fprintf(stderr, "Number of unique labels: %d\n", nl);  fprintf(stderr, "%s %d\n", ctmp, yme);  /* Cleanup */  for (i = 0; i < N; i++)    free(harray[i].substructID);  free(harray);  free(buffer);  free(labelval);}/******************************************************************************Description: Return value: ******************************************************************************/void ComputePrecision(struct Parameters parameters){  int i, n, x, y, r;  int flag, ni, mi, N;  FLOAT qerr, Si, sSi;  char *buffer;  int olen;  struct Graph *graph;  struct Node *node;  struct Map *map;  struct AllHits *hits = NULL, *hptr, *hprev, *harray;  if (parameters.train == NULL)    return;  map = &parameters.map;  /* Find the winners for all nodes */  if (parameters.map.topology == TOPOL_VQ)    VQSet_ab(&parameters);  if (KstepEnabled)    qerr = K_Step_Approximation(map, parameters.train, 1);  else    qerr = GetNodeCoordinates(map, parameters.train);    fprintf(stdout, "Qerror:%E\n", qerr);  //  return;  N = 0;  Si = sSi = 0.0;  olen = 0;  buffer = MyCalloc(1024*1024, sizeof(char));  for (y = 0; y < parameters.map.ydim; y++){    for (x = 0; x < parameters.map.xdim; x++){      ni = 0;      for (graph = parameters.train; graph != NULL; graph = graph->next){	for (n = 0; n < graph->numnodes; n++){	  node = graph->nodes[n];	  if (map->topology == TOPOL_VQ)            /* In VQ mode...   */	    flag = (y*parameters.map.xdim+x == node->winner);	  else	    flag = (x == node->x && y == node->y);	  if (flag){	    hptr = (struct AllHits*)MyCalloc(1, sizeof(struct AllHits));	    for (r = 0; r < graph->numnodes; r++)	      if (IsRoot(graph->nodes[r]))		break;	    if (!IsRoot(graph->nodes[r]))	      fprintf(stderr, "No root found\n");	    hptr->graph = graph;	    hptr->node = node;	    memset(buffer, 0, olen);	    GetStructID(hptr->graph->FanOut, graph->nodes[r], buffer);	    olen = strlen(buffer);	    hptr->structID = (char*)memdup(buffer, olen+1);	    memset(buffer, 0, olen);	    GetStructID(hptr->graph->FanOut, hptr->node, buffer);	    olen = strlen(buffer);	    hptr->substructID =(char*)memdup(buffer, olen+1);	    hptr->next = hits;	    hits = hptr;	    ni++;	    //  printf("%d %d %d %d\n", x, y, graph->gnum, node->nnum);	    //  fprintf(stderr, "%s\n", graph->gname);	  }	}      }      //Sort list by structID      if (ni > 0){	harray = malloc(ni * sizeof(struct AllHits));	for (i = 0, hptr = hits; hptr != NULL; hptr = hptr->next, i++)	  memcpy(&harray[i], hptr, sizeof(struct AllHits));	qsort(harray, ni, sizeof(struct AllHits), comparStructID);	mi = 1;	n = 1;	hprev = &harray[0];	for (i = 1; i < ni; i++){	  if (!strcmp(harray[i].structID, hprev->structID))	    n++;	  else{	    if (mi < n)	      mi = n;	    n = 1;	    hprev = &harray[i];	  }	}	if (mi < n)	  mi = n;	Si += (FLOAT)mi/ni;      }      //Sort list by substructID      if (ni > 0){	qsort(harray, ni, sizeof(struct AllHits), comparsubStructID);	mi = 1;	n = 1;	hprev = &harray[0];	for (i = 1; i < ni; i++){	  if (!strcmp(harray[i].substructID, hprev->substructID))	    n++;	  else{	    if (mi < n)	      mi = n;	    n = 1;	    hprev = &harray[i];	  }	}	if (mi < n)	  mi = n;	sSi += (FLOAT)mi/ni;	free(harray);	harray = NULL;      }      //Delete list      while(hits != NULL){	hptr = hits->next;	free(hits->structID);	free(hits->substructID);	free(hits);	hits = hptr;      }      if (ni > 0)	N++;    } //End x-loop  } //End y-loop  fprintf(stdout, "Struct mapping performance (E): %f\n", Si/N);  fprintf(stdout, "SubStruct mapping performance (e): %f\n", sSi/N);}/******************************************************************************Description: Compute best matching codebook for which             vmap->activation[y][x] != 0Return value: ******************************************************************************/void FindWinnerEucledianOnActiveOnly(struct Map *map, struct Node *node, struct Graph *gptr, struct Winner *winner, struct VMap *vmap){  FLOAT *mu;  UNSIGNED tend;  UNSIGNED noc;  /* Number of codebooks in the map */  FLOAT *codebook, *sample;  UNSIGNED n, i;  FLOAT diffsf, diff, difference;  tend = gptr->dimension;  mu = node->mu;  noc = map->xdim * map->ydim;  diffsf = FLT_MAX;  sample = node->points;  for (n = 0; n < noc; n++){  /* For every codebook of the map */    if (vmap->activation[map->codes[n].y][map->codes[n].x] == 0)      continue;    codebook = map->codes[n].points;    difference = 0.0;    /* Compute the difference between codebook and input entry */    for (i = 0; i < tend; i++){      diff = codebook[i] - sample[i];      difference += diff * diff * mu[i];      if (difference > diffsf)      	goto big_difference;    }    /* Distance is smaller than previous distances */    winner->codeno = n;    diffsf         = difference;  big_difference:    continue;  }  winner->diff   = diffsf;  return;}float ComputeClassificationConfusion(int x, int y, struct VMap *vmap){  int i, num, bestnum;  if (vmap->classes[y][x] == NULL){    //given y and x should point to a valid location of a activated neuron    fprintf(stderr, "Unexpected internal error\n");    fprintf(stderr, "Perhaps the dataset contains unlabelled root nodes?\n");    fprintf(stderr, "Debug info: %d %d %d %d\n", x, y, vmap->numclasses, vmap->activation[y][x]);  }  num = 0;  bestnum = 0;  for (i = 0; i < vmap->numclasses; i++){    num += vmap->classes[y][x][i];    if (vmap->classes[y][x][i] > bestnum)      bestnum = vmap->classes[y][x][i];  }  //Bestnum should actually be the same as: vmap->classes[y][x][vmap->winnerclass[y][x]-1]  return (float)bestnum/num;}int **ComputeConfusionMatrix(int xdim, int ydim, struct VMap *vmap){  int i, row, ond, offd;  int x, y;  int r;  int **matrix;  int *lo;  matrix = (int**)malloc(vmap->numclasses * sizeof(int*));  for (i = 0; i < vmap->numclasses; i++)    matrix[i] = (int*)calloc(vmap->numclasses, sizeof(int));  for (y = 0; y < ydim; y++){    for (x = 0; x < xdim; x++){      if (vmap->classes[y][x] == NULL)	continue;      row = vmap->winnerclass[y][x]-1;      for (i = 0; i < vmap->numclasses; i++)	matrix[i][row] += vmap->classes[y][x][i];    }  }  ond = 0;  offd = 0;  lo = GetSortedLabelIndex();  for (x = 0; x < vmap->numclasses; x++)    fprintf(stdout, " %d", lo[x]);  fprintf(stdout, "\n");  for (x = 0; x < vmap->numclasses; x++)    fprintf(stdout, " %s", GetLabel(lo[x]));  fprintf(stdout, "\n");  for (y = 0; y < vmap->numclasses; y++){    r = 0;    for (x = 0; x < vmap->numclasses; x++){      r+= matrix[lo[y]-1][lo[x]-1];      if (x == y)	ond += matrix[lo[y]-1][lo[x]-1];      else	offd += matrix[lo[y]-1][lo[x]-1];      fprintf(stdout, " %4d", matrix[lo[y]-1][lo[x]-1]);    }    if (r > 0)      fprintf(stdout, " #%.4f", (float)100*matrix[lo[y]-1][lo[y]-1]/r);    fprintf(stdout, "\n");  }  fprintf(stdout, "On diagonal: %d\n", ond);  fprintf(stdout, "Off diagonal: %d\n", offd);  if (ond > 0)    fprintf(stdout, "Confusion: %f\n", (float)offd*100/ond);  return matrix;}//The following assumes a hexagonal mapfloat GetClusteringPerformance(struct Parameters parameters, struct VMap vmap){  int x, y, i, j, n, W;  int xdim, ydim;  float Pi, P;  int offset[9];  int best, all, numlabels;  int mid, id, nPi;  struct Codebook *neuron;  xdim = parameters.map.xdim;  ydim = parameters.map.ydim;  offset[0] = 0;  offset[1] = 1;  offset[2] = -1;  offset[3] = xdim;  offset[4] = -xdim+1;  offset[5] = -xdim;  offset[6] = -xdim-1;  if (parameters.map.topology == TOPOL_RECT){    n = 9;    offset[7] = xdim-1;    offset[8] = xdim+1;  }  else if (parameters.map.topology == TOPOL_HEXA)    n = 7;  else{    fprintf(stderr, "Unsupported Neighbourhood in function GetClusteringPerformance()\n");    return -1;  }  W = 0;  P = 0.0;  numlabels = GetNumLabels();  for (y = 0; y < ydim; y++){    for (x = 0; x < xdim; x++){      mid = y*xdim+x;      best = vmap.winnerclass[y][x]-1;      if (best < 0)	continue;      Pi = 0.0;      nPi = 0;      for (i = 0; i < n; i++){	id = mid+offset[i];	if (id < 0 || id >= xdim*ydim)	  continue;	neuron = &parameters.map.codes[id];	if (abs(neuron->x - parameters.map.codes[mid].x) <= 1 &&	    abs(neuron->y - parameters.map.codes[mid].y) <= 1 &&	    vmap.classes[neuron->y][neuron->x] != NULL){	  all = 0;	  for (j = 0; j < numlabels; j++)	    all += vmap.classes[neuron->y][neuron->x][j];	  Pi += (float)vmap.classes[neuron->y][neuron->x][best]/all;	  nPi++;	}      }      if (nPi > 0){	Pi = Pi/nPi;	P += Pi;	W++;      }    }  }  if (W>0)    P = P/W;  return P;}/******************************************************************************Description: Return value: ******************************************************************************/void ComputeRetrievalPerformance(struct Parameters parameters, int classifyflag){  int flag = 0;  struct VMap vmap;  struct VMap tvmap; //for the test set  float R, P;  int C, n, nnum;  int winnerx, winnery;  struct Graph *gptr;  struct Node *node;  struct Winner winner = {0};  struct Map *map;  if (parameters.test == NULL){    printf("Warning: No test file given. Will use training data for testing.\n");    parameters.test = parameters.train;    flag = 1;  }  map = &parameters.map;  /* Compute the mapping of nodes in the training and the test dataset */  if (KstepEnabled){    K_Step_Approximation(map, parameters.train, 1);    if (!flag)      K_Step_Approximation(map, parameters.test, 1);  }  else{    GetNodeCoordinates(map, parameters.train);    if (!flag)      GetNodeCoordinates(map, parameters.test);  }  vmap = GetHits(parameters.map.xdim, parameters.map.ydim, parameters.train, ROOT);  GetClusterID(parameters.map, parameters.train, &vmap);  if (!flag){    tvmap = GetHits(parameters.map.xdim, parameters.map.ydim, parameters.test, ROOT);    GetClusterID(parameters.map, parameters.test, &tvmap);  }  R = 0.0;  C = 0;  n = 0;  //For (every node in the test set){  for (gptr = parameters.test;gptr != NULL; gptr = gptr->next){    for (nnum = 0; nnum < gptr->numnodes; nnum++){      node = gptr->nodes[nnum];      if (!IsRoot(node))	continue;      FindWinnerEucledianOnActiveOnly(map, node, gptr, &winner, &vmap);      winnerx = map->codes[winner.codeno].x;      winnery = map->codes[winner.codeno].y;      n++;      R += ComputeClassificationConfusion(winnerx, winnery, &vmap);      if (classifyflag != 0)	fprintf(stdout, "Graph:%s %s (%d,%s)", gptr->gname, GetLabel(vmap.winnerclass[winnery][winnerx]), node->label, GetLabel(node->label));      if (node->label == vmap.winnerclass[winnery][winnerx]){	//	fprintf(stdout, "G\n");	C++;      }      //      else      //	fprintf(stdout, "B:%E\n", winner.diff);    }

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -