⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 intmatcher.cpp

📁 一OCR的相关资料。.希望对研究OCR的朋友有所帮助.
💻 CPP
📖 第 1 页 / 共 5 页
字号:
make_int_var (AdaptProtoThresh, 230, MakeAdaptProtoThresh,16, 29, SetAdaptProtoThresh,"Threshold for good protos during adaptive 0-255:   ");make_int_var (AdaptFeatureThresh, 230, MakeAdaptFeatureThresh,16, 30, SetAdaptFeatureThresh,"Threshold for good features during adaptive 0-255:   ");//extern int display_ratings;//extern "C" int                                        newcp_ratings_on;//extern "C" double                             newcp_prune_threshold;//extern "C" double                             tessedit_cp_ratio;//extern "C" int                                        feature_prune_percentile;//extern INT32                                  cp_maps[4];int protoword_lookups;int zero_protowords;int proto_shifts;int set_proto_bits;int config_shifts;int set_config_bits;/**----------------------------------------------------------------------------              Public Code----------------------------------------------------------------------------**//*---------------------------------------------------------------------------*/int ClassPruner(INT_TEMPLATES IntTemplates,                INT16 NumFeatures,                INT_FEATURE_ARRAY Features,                CLASS_NORMALIZATION_ARRAY NormalizationFactors,                CLASS_CUTOFF_ARRAY ExpectedNumFeatures,                CLASS_PRUNER_RESULTS Results,                int Debug) {/* **      Parameters: **              IntTemplates           Class pruner tables **              NumFeatures            Number of features in blob **              Features               Array of features **              NormalizationFactors   Array of fudge factors from blob **                                     normalization process **                                     (by CLASS_INDEX) **              ExpectedNumFeatures    Array of expected number of features **                                     for each class **                                     (by CLASS_INDEX) **              Results                Sorted Array of pruned classes **                                     (by CLASS_ID) **              Debug                  Debugger flag: 1=debugger on **      Globals: **              ClassPrunerThreshold   Cutoff threshold **              ClassPrunerMultiplier  Normalization factor multiplier **      Operation: **              Prune the classes using a modified fast match table. **              Return a sorted list of classes along with the number **              of pruned classes in that list. **      Return: Number of pruned classes. **      Exceptions: none **      History: Tue Feb 19 10:24:24 MST 1991, RWM, Created. */  UINT32 PrunerWord;  INT32 class_index;             //index to class  int Word;  UINT32 *BasePrunerAddress;  UINT32 feature_address;        //current feature index  INT_FEATURE feature;           //current feature  CLASS_PRUNER *ClassPruner;  int PrunerSet;  int NumPruners;  INT32 feature_index;           //current feature  static INT32 ClassCount[MAX_NUM_CLASSES - 1];  static INT16 NormCount[MAX_NUM_CLASSES - 1];  static INT16 SortKey[MAX_NUM_CLASSES];  static UINT8 SortIndex[MAX_NUM_CLASSES];  CLASS_INDEX Class;  int out_class;  int MaxNumClasses;  int MaxCount;  int NumClasses;  FLOAT32 max_rating;            //max allowed rating  INT32 *ClassCountPtr;  INT8 classch;  MaxNumClasses = NumClassesIn (IntTemplates);  /* Clear Class Counts */  ClassCountPtr = &(ClassCount[0]);  for (Class = 0; Class < MaxNumClasses; Class++) {    *ClassCountPtr++ = 0;  }  /* Update Class Counts */  NumPruners = NumClassPrunersIn (IntTemplates);  for (feature_index = 0; feature_index < NumFeatures; feature_index++) {    feature = &Features[feature_index];    feature->CP_misses = 0;    feature_address = (((feature->X * NUM_CP_BUCKETS >> 8) * NUM_CP_BUCKETS      +      (feature->Y * NUM_CP_BUCKETS >> 8)) *      NUM_CP_BUCKETS +      (feature->Theta * NUM_CP_BUCKETS >> 8)) << 1;    ClassPruner = ClassPrunersFor (IntTemplates);    class_index = 0;    for (PrunerSet = 0; PrunerSet < NumPruners; PrunerSet++, ClassPruner++) {      BasePrunerAddress = (UINT32 *) (*ClassPruner) + feature_address;      for (Word = 0; Word < WERDS_PER_CP_VECTOR; Word++) {        PrunerWord = *BasePrunerAddress++;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];        PrunerWord >>= 2;        ClassCount[class_index++] += cp_maps[PrunerWord & 3];      }    }  }  /* Adjust Class Counts for Number of Expected Features */  for (Class = 0; Class < MaxNumClasses; Class++)    if (NumFeatures < ExpectedNumFeatures[Class])      ClassCount[Class] =        (int) (((FLOAT32) (ClassCount[Class] * NumFeatures)) /        (NumFeatures +        CPCutoffStrength * (ExpectedNumFeatures[Class] -        NumFeatures)));  /* Adjust Class Counts for Normalization Factors */  MaxCount = 0;  for (Class = 0; Class < MaxNumClasses; Class++) {    NormCount[Class] = ClassCount[Class]      - ((ClassPrunerMultiplier * NormalizationFactors[Class]) >> 8)      * cp_maps[3] / 3;    if (NormCount[Class] > MaxCount)      MaxCount = NormCount[Class];  }  /* Prune Classes */  MaxCount *= ClassPrunerThreshold;  MaxCount >>= 8;  /* Select Classes */  if (MaxCount < 1)    MaxCount = 1;  NumClasses = 0;  for (Class = 0; Class < MaxNumClasses; Class++)  if (NormCount[Class] >= MaxCount) {    NumClasses++;    SortIndex[NumClasses] = Class;    SortKey[NumClasses] = NormCount[Class];  }  /* Sort Classes using Heapsort Algorithm */  if (NumClasses > 1)    HeapSort(NumClasses, SortKey, SortIndex);  if (display_ratings > 1) {    cprintf ("CP:%d classes, %d features:\n", NumClasses, NumFeatures);    for (Class = 0; Class < NumClasses; Class++) {      classch =        ClassIdForIndex (IntTemplates, SortIndex[NumClasses - Class]);      cprintf ("%c:C=%d, E=%d, N=%d, Rat=%d\n", classch,        ClassCount[SortIndex[NumClasses - Class]],        ExpectedNumFeatures[SortIndex[NumClasses - Class]],        SortKey[NumClasses - Class],        (int) (10 +        1000 * (1.0f -        SortKey[NumClasses -        Class] / ((float) cp_maps[3] *        NumFeatures))));    }    if (display_ratings > 2) {      NumPruners = NumClassPrunersIn (IntTemplates);      for (feature_index = 0; feature_index < NumFeatures;      feature_index++) {        cprintf ("F=%3d,", feature_index);        feature = &Features[feature_index];        feature->CP_misses = 0;        feature_address =          (((feature->X * NUM_CP_BUCKETS >> 8) * NUM_CP_BUCKETS +          (feature->Y * NUM_CP_BUCKETS >> 8)) * NUM_CP_BUCKETS +          (feature->Theta * NUM_CP_BUCKETS >> 8)) << 1;        ClassPruner = ClassPrunersFor (IntTemplates);        class_index = 0;        for (PrunerSet = 0; PrunerSet < NumPruners;        PrunerSet++, ClassPruner++) {          BasePrunerAddress = (UINT32 *) (*ClassPruner)            + feature_address;          for (Word = 0; Word < WERDS_PER_CP_VECTOR; Word++) {            PrunerWord = *BasePrunerAddress++;            for (Class = 0; Class < 16; Class++, class_index++) {              if (NormCount[class_index] >= MaxCount)                cprintf (" %c=%d,",                  ClassIdForIndex (IntTemplates,                  class_index),                  PrunerWord & 3);              PrunerWord >>= 2;            }          }        }        cprintf ("\n");      }      cprintf ("Adjustments:");      for (Class = 0; Class < MaxNumClasses; Class++) {        if (NormCount[Class] > MaxCount)          cprintf (" %c=%d,",            ClassIdForIndex (IntTemplates, Class),            -((ClassPrunerMultiplier *            NormalizationFactors[Class]) >> 8) * cp_maps[3] /            3);      }      cprintf ("\n");    }  }  /* Set Up Results */  max_rating = 0.0f;  for (Class = 0, out_class = 0; Class < NumClasses; Class++) {    Results[out_class].Class =      ClassIdForIndex (IntTemplates, SortIndex[NumClasses - Class]);    Results[out_class].config_mask = (UINT32) - 1;    Results[out_class].Rating =      1.0 - SortKey[NumClasses -      Class] / ((float) cp_maps[3] * NumFeatures);    /**/ Results[out_class].Rating2 =      1.0 - SortKey[NumClasses -      Class] / ((float) cp_maps[3] * NumFeatures);    if (tessedit_cp_ratio == 0.0 || Class == 0      || Results[out_class].Rating * 1000 + 10 <      cp_maxes[Results[out_class].Class]      && Results[out_class].Rating * 1000 + 10 <      (Results[0].Rating * 1000 +      10) * cp_ratios[Results[out_class].Class])      out_class++;  }  NumClasses = out_class;  if (blob_type != 0) {    cp_classes = NumClasses;    if (NumClasses > 0) {      cp_chars[0] = Results[0].Class;      cp_ratings[0] = (int) (1000 * Results[0].Rating + 10);      cp_confs[0] = (int) (1000 * Results[0].Rating2 + 10);      if (NumClasses > 1) {        cp_chars[1] = Results[1].Class;        cp_ratings[1] = (int) (1000 * Results[1].Rating + 10);        cp_confs[1] = (int) (1000 * Results[1].Rating2 + 10);      }      else {        cp_chars[1] = '~';        cp_ratings[1] = -1;        cp_confs[1] = -1;      }    }    else {      cp_chars[0] = '~';      cp_ratings[0] = -1;      cp_confs[0] = -1;    }    cp_bestindex = -1;    cp_bestrating = -1;    cp_bestconf = -1;    for (Class = 0; Class < NumClasses; Class++) {      classch = Results[Class].Class;      if (classch == blob_answer) {        cp_bestindex = Class;        cp_bestrating = (int) (1000 * Results[Class].Rating + 10);        cp_bestconf = (int) (1000 * Results[Class].Rating2 + 10);      }    }  }  return NumClasses;}/*---------------------------------------------------------------------------*/int feature_pruner(INT_TEMPLATES IntTemplates,                   INT16 NumFeatures,                   INT_FEATURE_ARRAY Features,                   INT32 NumClasses,                   CLASS_PRUNER_RESULTS Results) {/* **      Parameters: **              IntTemplates           Class pruner tables **              NumFeatures            Number of features in blob **              Features               Array of features **              NormalizationFactors   Array of fudge factors from blob **                                     normalization process **                                     (by CLASS_INDEX) **              ExpectedNumFeatures    Array of expected number of features **                                     for each class **                                     (by CLASS_INDEX) **              Results                Sorted Array of pruned classes **                                     (by CLASS_ID) **              Debug                  Debugger flag: 1=debugger on **      Globals: **              ClassPrunerThreshold   Cutoff threshold **              ClassPrunerMultiplier  Normalization factor multiplier **      Operation: **              Prune the classes using a modified fast match table. **              Return a sorted list of classes along with the number **              of pruned classes in that list. **      Return: Number of pruned classes. **      Exceptions: none **      History: Tue Feb 19 10:24:24 MST 1991, RWM, Created. */  UINT32 PrunerWord;  CLASS_PRUNER *ClassPruner;  INT32 class_index;             //index to class  INT32 result_index;            //CP results index  int PrunerSet;  int NumPruners;  int Word;  INT_FEATURE feature;           //current feature  INT32 feature_index;           //current feature  INT32 CP_misses;               //missed features  UINT32 feature_address;        //current feature index  UINT32 *BasePrunerAddress;  int MaxNumClasses;  UINT32 class_mask[CLASS_MASK_SIZE];  INT32 miss_histogram[MAX_NUM_CLASSES];  MaxNumClasses = NumClassesIn (IntTemplates);  for (class_index = 0; class_index < MaxNumClasses; class_index++)    miss_histogram[class_index] = 0;  /* Create class mask */  for (class_index = 0; class_index < CLASS_MASK_SIZE; class_index++)    class_mask[class_index] = (UINT32) - 1;  for (result_index = 0; result_index < NumClasses; result_index++) {    class_index =      IndexForClassId (IntTemplates, Results[result_index].Class);    class_mask[class_index / CLASSES_PER_CP_WERD] &=

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -