📄 adaptmatch.cpp
字号:
enables use of pre-adapted templates** Operation: This routine reads in the training information needed** by the adaptive classifier and saves it into global** variables.** Return: none** Exceptions: none** History: Mon Mar 11 12:49:34 1991, DSJ, Created.*/ int i; FILE *File; char Filename[1024]; if (!EnableAdaptiveMatcher) return; strcpy(Filename, demodir); strcat(Filename, BuiltInTemplatesFile); #ifndef SECURE_NAMES // cprintf( "\nReading built-in templates from %s ...", // Filename); fflush(stdout); #endif #ifdef __UNIX__ File = Efopen (Filename, "r"); #else File = Efopen (Filename, "rb"); #endif PreTrainedTemplates = ReadIntTemplates (File, TRUE); fclose(File); strcpy(Filename, demodir); strcat(Filename, BuiltInCutoffsFile); #ifndef SECURE_NAMES // cprintf( "\nReading built-in pico-feature cutoffs from %s ...", // Filename); fflush(stdout); #endif ReadNewCutoffs (Filename, PreTrainedTemplates->IndexFor, CharNormCutoffs); GetNormProtos(); InitIntegerMatcher(); InitIntegerFX(); AllProtosOn = NewBitVector (MAX_NUM_PROTOS); PrunedProtos = NewBitVector (MAX_NUM_PROTOS); AllConfigsOn = NewBitVector (MAX_NUM_CONFIGS); AllProtosOff = NewBitVector (MAX_NUM_PROTOS); AllConfigsOff = NewBitVector (MAX_NUM_CONFIGS); TempProtoMask = NewBitVector (MAX_NUM_PROTOS); set_all_bits (AllProtosOn, WordsInVectorOfSize (MAX_NUM_PROTOS)); set_all_bits (PrunedProtos, WordsInVectorOfSize (MAX_NUM_PROTOS)); set_all_bits (AllConfigsOn, WordsInVectorOfSize (MAX_NUM_CONFIGS)); zero_all_bits (AllProtosOff, WordsInVectorOfSize (MAX_NUM_PROTOS)); zero_all_bits (AllConfigsOff, WordsInVectorOfSize (MAX_NUM_CONFIGS)); if (UsePreAdaptedTemplates) { strcpy(Filename, imagefile); strcat(Filename, ADAPT_TEMPLATE_SUFFIX); File = fopen (Filename, "rb"); if (File == NULL) AdaptedTemplates = NewAdaptedTemplates (); else { #ifndef SECURE_NAMES cprintf ("\nReading pre-adapted templates from %s ...", Filename); fflush(stdout); #endif AdaptedTemplates = ReadAdaptedTemplates (File); cprintf ("\n"); fclose(File); PrintAdaptedTemplates(stdout, AdaptedTemplates); for (i = 0; i < NumClassesIn (AdaptedTemplates->Templates); i++) { BaselineCutoffs[i] = CharNormCutoffs[IndexForClassId (PreTrainedTemplates, ClassIdForIndex (AdaptedTemplates->Templates, i))]; } } } else AdaptedTemplates = NewAdaptedTemplates (); old_enable_learning = EnableLearning;} /* InitAdaptiveClassifier */void ResetAdaptiveClassifier() { free_adapted_templates(AdaptedTemplates); AdaptedTemplates = NULL;}/*---------------------------------------------------------------------------*/void InitAdaptiveClassifierVars() {/* ** Parameters: none ** Globals: none ** Operation: This routine installs the control knobs used by the ** adaptive matcher. ** Return: none ** Exceptions: none ** History: Mon Mar 11 12:49:34 1991, DSJ, Created. */ VALUE dummy; string_variable (BuiltInTemplatesFile, "BuiltInTemplatesFile", BUILT_IN_TEMPLATES_FILE); string_variable (BuiltInCutoffsFile, "BuiltInCutoffsFile", BUILT_IN_CUTOFFS_FILE); MakeEnableAdaptiveMatcher(); MakeUsePreAdaptedTemplates(); MakeSaveAdaptedTemplates(); MakeEnableLearning(); MakeEnableAdaptiveDebugger(); MakeBadMatchPad(); MakeGoodAdaptiveMatch(); MakeGreatAdaptiveMatch(); MakeNoiseBlobLength(); MakeMinNumPermClasses(); MakeReliableConfigThreshold(); MakeMaxAngleDelta(); MakeLearningDebugLevel(); MakeMatcherDebugLevel(); MakeMatchDebugFlags(); MakeRatingMargin(); MakePerfectRating(); MakeEnableIntFX(); MakeEnableNewAdaptRules(); MakeRatingScale(); MakeCertaintyScale(); InitPicoFXVars(); InitOutlineFXVars(); //?} /* InitAdaptiveClassifierVars *//*---------------------------------------------------------------------------*/void PrintAdaptiveStatistics(FILE *File) {/* ** Parameters: ** File open text file to print adaptive statistics to** Globals: none** Operation: Print to File the statistics which have been gathered** for the adaptive matcher.** Return: none** Exceptions: none** History: Thu Apr 18 14:37:37 1991, DSJ, Created.*/ #ifndef SECURE_NAMES fprintf (File, "\nADAPTIVE MATCHER STATISTICS:\n"); fprintf (File, "\tNum blobs classified = %d\n", AdaptiveMatcherCalls); fprintf (File, "\tNum classes output = %d (Avg = %4.2f)\n", NumClassesOutput, ((AdaptiveMatcherCalls == 0) ? (0.0) : ((float) NumClassesOutput / AdaptiveMatcherCalls))); fprintf (File, "\t\tBaseline Classifier: %4d calls (%4.2f classes/call)\n", BaselineClassifierCalls, ((BaselineClassifierCalls == 0) ? (0.0) : ((float) NumBaselineClassesTried / BaselineClassifierCalls))); fprintf (File, "\t\tCharNorm Classifier: %4d calls (%4.2f classes/call)\n", CharNormClassifierCalls, ((CharNormClassifierCalls == 0) ? (0.0) : ((float) NumCharNormClassesTried / CharNormClassifierCalls))); fprintf (File, "\t\tAmbig Classifier: %4d calls (%4.2f classes/call)\n", AmbigClassifierCalls, ((AmbigClassifierCalls == 0) ? (0.0) : ((float) NumAmbigClassesTried / AmbigClassifierCalls))); fprintf (File, "\nADAPTIVE LEARNER STATISTICS:\n"); fprintf (File, "\tNumber of words adapted to: %d\n", NumWordsAdaptedTo); fprintf (File, "\tNumber of chars adapted to: %d\n", NumCharsAdaptedTo); PrintAdaptedTemplates(File, AdaptedTemplates); #endif} /* PrintAdaptiveStatistics *//*---------------------------------------------------------------------------*/void SettupPass1() {/* ** Parameters: none ** Globals: ** EnableLearning set to TRUE by this routine** Operation: This routine prepares the adaptive matcher for the start** of the first pass. Learning is enabled (unless it is** disabled for the whole program).** Return: none** Exceptions: none** History: Mon Apr 15 16:39:29 1991, DSJ, Created.*/ /* Note: this is somewhat redundant, it simply says that if learning is enabled then it will remain enabled on the first pass. If it is disabled, then it will remain disabled. This is only put here to make it very clear that learning is controlled directly by the global setting of EnableLearning. */ EnableLearning = old_enable_learning; SettupStopperPass1();} /* SettupPass1 *//*---------------------------------------------------------------------------*/void SettupPass2() {/* ** Parameters: none ** Globals: ** EnableLearning set to FALSE by this routine** Operation: This routine prepares the adaptive matcher for the start** of the second pass. Further learning is disabled.** Return: none** Exceptions: none** History: Mon Apr 15 16:39:29 1991, DSJ, Created.*/ EnableLearning = FALSE; SettupStopperPass2();} /* SettupPass2 *//*---------------------------------------------------------------------------*/void MakeNewAdaptedClass(TBLOB *Blob, LINE_STATS *LineStats, CLASS_ID ClassId, ADAPT_TEMPLATES Templates) {/* ** Parameters: ** Blob blob to model new class after** LineStats statistics for text row blob is in** ClassId id of new class to be created** Templates adapted templates to add new class to** Globals:** AllProtosOn dummy mask with all 1's** BaselineCutoffs kludge needed to get cutoffs** PreTrainedTemplates kludge needed to get cutoffs** Operation: This routine creates a new adapted class and uses Blob** as the model for the first config in that class.** Return: none** Exceptions: none** History: Thu Mar 14 12:49:39 1991, DSJ, Created.*/ FEATURE_SET Features; int Fid, Pid; FEATURE Feature; int NumFeatures; TEMP_PROTO TempProto; PROTO Proto; ADAPT_CLASS Class; INT_CLASS IClass; CLASS_INDEX ClassIndex; TEMP_CONFIG Config; NormMethod = baseline; Features = ExtractOutlineFeatures (Blob, LineStats); NumFeatures = NumFeaturesIn (Features); if (NumFeatures > UNLIKELY_NUM_FEAT) { FreeFeatureSet(Features); return; } Class = NewAdaptedClass (); ClassIndex = AddAdaptedClass (Templates, Class, ClassId); Config = NewTempConfig (NumFeatures - 1); TempConfigFor (Class, 0) = Config; /* this is a kludge to construct cutoffs for adapted templates */ BaselineCutoffs[ClassIndex] = CharNormCutoffs[IndexForClassId (PreTrainedTemplates, ClassId)]; IClass = ClassForClassId (Templates->Templates, ClassId); for (Fid = 0; Fid < NumFeaturesIn (Features); Fid++) { Pid = AddIntProto (IClass); assert (Pid != NO_PROTO); Feature = FeatureIn (Features, Fid); TempProto = NewTempProto (); Proto = &(TempProto->Proto); /* compute proto params - NOTE that Y_DIM_OFFSET must be used because ConvertProto assumes that the Y dimension varies from -0.5 to 0.5 instead of the -0.25 to 0.75 used in baseline normalization */ ProtoAngle (Proto) = ParamOf (Feature, OutlineFeatDir); ProtoX (Proto) = ParamOf (Feature, OutlineFeatX); ProtoY (Proto) = ParamOf (Feature, OutlineFeatY) - Y_DIM_OFFSET; ProtoLength (Proto) = ParamOf (Feature, OutlineFeatLength); FillABC(Proto); TempProto->ProtoId = Pid; SET_BIT (Config->Protos, Pid); ConvertProto(Proto, Pid, IClass); AddProtoToProtoPruner(Proto, Pid, IClass); Class->TempProtos = push (Class->TempProtos, TempProto); } FreeFeatureSet(Features); AddIntConfig(IClass); ConvertConfig (AllProtosOn, 0, IClass); if (LearningDebugLevel >= 1) { cprintf ("Added new class '%c' with index %d and %d protos.\n", ClassId, ClassIndex, NumFeatures); }} /* MakeNewAdaptedClass *//*---------------------------------------------------------------------------*/int GetAdaptiveFeatures(TBLOB *Blob, LINE_STATS *LineStats, INT_FEATURE_ARRAY IntFeatures, FEATURE_SET *FloatFeatures) {/* ** Parameters: ** Blob blob to extract features from** LineStats statistics about text row blob is in** IntFeatures array to fill with integer features** FloatFeatures place to return actual floating-pt features** Globals: none** Operation: This routine sets up the feature extractor to extract** baseline normalized pico-features.** The extracted pico-features are converted** to integer form and placed in IntFeatures. The original** floating-pt. features are returned in FloatFeatures.** Return: Number of pico-features returned (0 if an error occurred)** Exceptions: none** History: Tue Mar 12 17:55:18 1991, DSJ, Created.*/ FEATURE_SET Features; int NumFeatures; NormMethod = baseline; Features = ExtractPicoFeatures (Blob, LineStats); NumFeatures = NumFeaturesIn (Features); if (NumFeatures > UNLIKELY_NUM_FEAT) { FreeFeatureSet(Features); return (0); } ComputeIntFeatures(Features, IntFeatures); *FloatFeatures = Features; return (NumFeatures);} /* GetAdaptiveFeatures *//**---------------------------------------------------------------------------- Private Code----------------------------------------------------------------------------**//*---------------------------------------------------------------------------*/int AdaptableWord(TWERD *Word, const char *BestChoice, const char *BestRawChoice) {/* ** Parameters: ** Word current word** BestChoice best overall choice for word with context** BestRawChoice best choice for word without context** Globals: none** Operation: Return TRUE if the specified word is acceptable for** adaptation.** Return: TRUE or FALSE** Exceptions: none** History: Thu May 30 14:25:06 1991, DSJ, Created.*/ int BestChoiceLength; return ( /* rules that apply in general - simplest to compute first */ /* EnableLearning && */ /* new rules */ BestChoice != NULL && BestRawChoice != NULL && Word != NULL && (BestChoiceLength = strlen (BestChoice)) > 0 && BestChoiceLength == NumBlobsIn (Word) && BestChoiceLength <= MAX_ADAPTABLE_WERD_SIZE && ( EnableNewAdaptRules && CurrentBestChoiceAdjustFactor () <= ADAPTABLE_WERD && AlternativeChoicesWorseThan (ADAPTABLE_WERD) && CurrentBestChoiceIs
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -