⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 cluster.cpp

📁 一个google的OCR源码
💻 CPP
📖 第 1 页 / 共 5 页
字号:
      left subcluster.  This continues until a leaf is found.      If all samples have been found, NULL is returned.      InitSampleSearch() must be called      before NextSample() to initialize the search.Return:		Pointer to the next leaf cluster (sample) or NULL.Exceptions:	NoneHistory:	6/16/89, DSJ, Created.****************************************************************************/CLUSTER *NextSample(LIST *SearchState) {  CLUSTER *Cluster;  if (*SearchState == NIL)    return (NULL);  Cluster = (CLUSTER *) first_node (*SearchState);  *SearchState = pop (*SearchState);  while (TRUE) {    if (Cluster->Left == NULL)      return (Cluster);    *SearchState = push (*SearchState, Cluster->Right);    Cluster = Cluster->Left;  }}                                // NextSample/** Mean ***********************************************************Parameters:	Proto		prototype to return mean of      Dimension	dimension whose mean is to be returnedGlobals:	noneOperation:	This routine returns the mean of the specified      prototype in the indicated dimension.Return:		Mean of Prototype in DimensionExceptions: noneHistory:	7/6/89, DSJ, Created.*********************************************************************/FLOAT32 Mean(PROTOTYPE *Proto, uinT16 Dimension) {  return (Proto->Mean[Dimension]);}                                // Mean/** StandardDeviation *************************************************Parameters:	Proto		prototype to return standard deviation of      Dimension	dimension whose stddev is to be returnedGlobals:	noneOperation:	This routine returns the standard deviation of the      prototype in the indicated dimension.Return:		Standard deviation of Prototype in DimensionExceptions: noneHistory:	7/6/89, DSJ, Created.**********************************************************************/FLOAT32 StandardDeviation(PROTOTYPE *Proto, uinT16 Dimension) {  switch (Proto->Style) {    case spherical:      return ((FLOAT32) sqrt ((double) Proto->Variance.Spherical));    case elliptical:      return ((FLOAT32)        sqrt ((double) Proto->Variance.Elliptical[Dimension]));    case mixed:      switch (Proto->Distrib[Dimension]) {        case normal:          return ((FLOAT32)            sqrt ((double) Proto->Variance.Elliptical[Dimension]));        case uniform:        case D_random:          return (Proto->Variance.Elliptical[Dimension]);      }  }  return 0.0f;}                                // StandardDeviation/*---------------------------------------------------------------------------            Private Code----------------------------------------------------------------------------*//** CreateClusterTree *******************************************************Parameters:	Clusterer	data structure holdings samples to be clusteredGlobals:	Tree		kd-tree holding samples      TempCluster	array of temporary clusters      CurrentTemp	index of next temp cluster to be used      Heap		heap used to hold temp clusters - "best" on topOperation:	This routine performs a bottoms-up clustering on the samples      held in the kd-tree of the Clusterer data structure.  The      result is a cluster tree.  Each node in the tree represents      a cluster which conceptually contains a subset of the samples.      More precisely, the cluster contains all of the samples which      are contained in its two sub-clusters.  The leaves of the      tree are the individual samples themselves; they have no      sub-clusters.  The root node of the tree conceptually contains      all of the samples.Return:		None (the Clusterer data structure is changed)Exceptions:	NoneHistory:	5/29/89, DSJ, Created.******************************************************************************/void CreateClusterTree(CLUSTERER *Clusterer) {  HEAPENTRY HeapEntry;  TEMPCLUSTER *PotentialCluster;  // save the kd-tree in a global variable so kd-tree walker can get at it  Tree = Clusterer->KDTree;  // allocate memory to to hold all of the "potential" clusters  TempCluster = (TEMPCLUSTER *)    Emalloc (Clusterer->NumberOfSamples * sizeof (TEMPCLUSTER));  CurrentTemp = 0;  // each sample and its nearest neighbor form a "potential" cluster  // save these in a heap with the "best" potential clusters on top  Heap = MakeHeap (Clusterer->NumberOfSamples);  KDWalk (Tree, (void_proc) MakePotentialClusters);  // form potential clusters into actual clusters - always do "best" first  while (GetTopOfHeap (Heap, &HeapEntry) != EMPTY) {    PotentialCluster = (TEMPCLUSTER *) (HeapEntry.Data);    // if main cluster of potential cluster is already in another cluster    // then we don't need to worry about it    if (PotentialCluster->Cluster->Clustered) {      continue;    }    // if main cluster is not yet clustered, but its nearest neighbor is    // then we must find a new nearest neighbor    else if (PotentialCluster->Neighbor->Clustered) {      PotentialCluster->Neighbor =        FindNearestNeighbor (Tree, PotentialCluster->Cluster,        &(HeapEntry.Key));      if (PotentialCluster->Neighbor != NULL) {        HeapStore(Heap, &HeapEntry);      }    }    // if neither cluster is already clustered, form permanent cluster    else {      PotentialCluster->Cluster =        MakeNewCluster(Clusterer, PotentialCluster);      PotentialCluster->Neighbor =        FindNearestNeighbor (Tree, PotentialCluster->Cluster,        &(HeapEntry.Key));      if (PotentialCluster->Neighbor != NULL) {        HeapStore(Heap, &HeapEntry);      }    }  }  // the root node in the cluster tree is now the only node in the kd-tree  Clusterer->Root = (CLUSTER *) RootOf (Clusterer->KDTree);  // free up the memory used by the K-D tree, heap, and temp clusters  FreeKDTree(Tree);  Clusterer->KDTree = NULL;  FreeHeap(Heap);  memfree(TempCluster);}                                // CreateClusterTree/** MakePotentialClusters **************************************************Parameters:	Cluster	current cluster being visited in kd-tree walk      Order	order in which cluster is being visited      Level	level of this cluster in the kd-treeGlobals:	Tree		kd-tree to be searched for neighbors      TempCluster	array of temporary clusters      CurrentTemp	index of next temp cluster to be used      Heap		heap used to hold temp clusters - "best" on topOperation:	This routine is designed to be used in concert with the      KDWalk routine.  It will create a potential cluster for      each sample in the kd-tree that is being walked.  This      potential cluster will then be pushed on the heap.Return:		noneExceptions: noneHistory:	5/29/89, DSJ, Created.      7/13/89, DSJ, Removed visibility of kd-tree node data struct.******************************************************************************/void MakePotentialClusters(CLUSTER *Cluster, VISIT Order, inT32 Level) {  HEAPENTRY HeapEntry;  if ((Order == preorder) || (Order == leaf)) {    TempCluster[CurrentTemp].Cluster = Cluster;    HeapEntry.Data = (char *) &(TempCluster[CurrentTemp]);    TempCluster[CurrentTemp].Neighbor =      FindNearestNeighbor (Tree, TempCluster[CurrentTemp].Cluster,      &(HeapEntry.Key));    if (TempCluster[CurrentTemp].Neighbor != NULL) {      HeapStore(Heap, &HeapEntry);      CurrentTemp++;    }  }}                                // MakePotentialClusters/** FindNearestNeighbor *********************************************************Parameters:	Tree		kd-tree to search in for nearest neighbor      Cluster		cluster whose nearest neighbor is to be found      Distance	ptr to variable to report distance foundGlobals:	noneOperation:	This routine searches the specified kd-tree for the nearest      neighbor of the specified cluster.  It actually uses the      kd routines to find the 2 nearest neighbors since one of them      will be the original cluster.  A pointer to the nearest      neighbor is returned, if it can be found, otherwise NULL is      returned.  The distance between the 2 nodes is placed      in the specified variable.Return:		Pointer to the nearest neighbor of Cluster, or NULLExceptions: noneHistory:	5/29/89, DSJ, Created.      7/13/89, DSJ, Removed visibility of kd-tree node data struct********************************************************************************/CLUSTER *FindNearestNeighbor (KDTREE * Tree, CLUSTER * Cluster, FLOAT32 * Distance)#define MAXNEIGHBORS  2#define MAXDISTANCE   MAX_FLOAT32{  CLUSTER *Neighbor[MAXNEIGHBORS];  FLOAT32 Dist[MAXNEIGHBORS];  inT32 NumberOfNeighbors;  inT32 i;  CLUSTER *BestNeighbor;  // find the 2 nearest neighbors of the cluster  NumberOfNeighbors = KDNearestNeighborSearch    (Tree, Cluster->Mean, MAXNEIGHBORS, MAXDISTANCE, Neighbor, Dist);  // search for the nearest neighbor that is not the cluster itself  *Distance = MAXDISTANCE;  BestNeighbor = NULL;  for (i = 0; i < NumberOfNeighbors; i++) {    if ((Dist[i] < *Distance) && (Neighbor[i] != Cluster)) {      *Distance = Dist[i];      BestNeighbor = Neighbor[i];    }  }  return (BestNeighbor);}                                // FindNearestNeighbor/** MakeNewCluster *************************************************************Parameters:	Clusterer	current clustering environment      TempCluster	potential cluster to make permanentGlobals:	noneOperation:	This routine creates a new permanent cluster from the      clusters specified in TempCluster.  The 2 clusters in      TempCluster are marked as "clustered" and deleted from      the kd-tree.  The new cluster is then added to the kd-tree.      Return: Pointer to the new permanent clusterExceptions:	noneHistory:	5/29/89, DSJ, Created.      7/13/89, DSJ, Removed visibility of kd-tree node data struct********************************************************************************/CLUSTER *MakeNewCluster(CLUSTERER *Clusterer, TEMPCLUSTER *TempCluster) {  CLUSTER *Cluster;  // allocate the new cluster and initialize it  Cluster = (CLUSTER *) Emalloc (sizeof (CLUSTER) +    (Clusterer->SampleSize -    1) * sizeof (FLOAT32));  Cluster->Clustered = FALSE;  Cluster->Prototype = FALSE;  Cluster->Left = TempCluster->Cluster;  Cluster->Right = TempCluster->Neighbor;  Cluster->CharID = -1;  // mark the old clusters as "clustered" and delete them from the kd-tree  Cluster->Left->Clustered = TRUE;  Cluster->Right->Clustered = TRUE;  KDDelete (Clusterer->KDTree, Cluster->Left->Mean, Cluster->Left);  KDDelete (Clusterer->KDTree, Cluster->Right->Mean, Cluster->Right);  // compute the mean and sample count for the new cluster  Cluster->SampleCount =    MergeClusters (Clusterer->SampleSize, Clusterer->ParamDesc,    Cluster->Left->SampleCount, Cluster->Right->SampleCount,    Cluster->Mean, Cluster->Left->Mean, Cluster->Right->Mean);  // add the new cluster to the KD tree  KDStore (Clusterer->KDTree, Cluster->Mean, Cluster);  return (Cluster);}                                // MakeNewCluster/** MergeClusters ************************************************************Parameters:	N	# of dimensions (size of arrays)      ParamDesc	array of dimension descriptions      n1, n2	number of samples in each old cluster      m	array to hold mean of new cluster      m1, m2	arrays containing means of old clustersGlobals:	NoneOperation:	This routine merges two clusters into one larger cluster.      To do this it computes the number of samples in the new      cluster and the mean of the new cluster.  The ParamDesc      information is used to ensure that circular dimensions      are handled correctly.Return:		The number of samples in the new cluster.Exceptions:	NoneHistory:	5/31/89, DSJ, Created.*********************************************************************************/inT32MergeClusters (inT16 N,register PARAM_DESC ParamDesc[],register inT32 n1,register inT32 n2,register FLOAT32 m[],register FLOAT32 m1[], register FLOAT32 m2[]) {  register inT32 i, n;  n = n1 + n2;  for (i = N; i > 0; i--, ParamDesc++, m++, m1++, m2++) {    if (ParamDesc->Circular) {      // if distance between means is greater than allowed      // reduce upper point by one "rotation" to compute mean      // then normalize the mean back into the accepted range      if ((*m2 - *m1) > ParamDesc->HalfRange) {        *m = (n1 * *m1 + n2 * (*m2 - ParamDesc->Range)) / n;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -