Tesseract  3.02
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
intmatcher.cpp
Go to the documentation of this file.
1 /******************************************************************************
2  ** Filename: intmatcher.c
3  ** Purpose: Generic high level classification routines.
4  ** Author: Robert Moss
5  ** History: Wed Feb 13 17:35:28 MST 1991, RWM, Created.
6  ** Mon Mar 11 16:33:02 MST 1991, RWM, Modified to add
7  ** support for adaptive matching.
8  ** (c) Copyright Hewlett-Packard Company, 1988.
9  ** Licensed under the Apache License, Version 2.0 (the "License");
10  ** you may not use this file except in compliance with the License.
11  ** You may obtain a copy of the License at
12  ** http://www.apache.org/licenses/LICENSE-2.0
13  ** Unless required by applicable law or agreed to in writing, software
14  ** distributed under the License is distributed on an "AS IS" BASIS,
15  ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  ** See the License for the specific language governing permissions and
17  ** limitations under the License.
18  ******************************************************************************/
19 
20 // Include automatically generated configuration file if running autoconf.
21 #ifdef HAVE_CONFIG_H
22 #include "config_auto.h"
23 #endif
24 
25 /*----------------------------------------------------------------------------
26  Include Files and Type Defines
27 ----------------------------------------------------------------------------*/
28 #include "intmatcher.h"
29 #include "intproto.h"
30 #include "callcpp.h"
31 #include "scrollview.h"
32 #include "float2int.h"
33 #include "globals.h"
34 #include "helpers.h"
35 #include "classify.h"
36 #include "shapetable.h"
37 #include <math.h>
38 
39 /*----------------------------------------------------------------------------
40  Global Data Definitions and Declarations
41 ----------------------------------------------------------------------------*/
42 // Parameters of the sigmoid used to convert similarity to evidence in the
43 // similarity_evidence_table_ that is used to convert distance metric to an
44 // 8 bit evidence value in the secondary matcher. (See IntMatcher::Init).
46 const float IntegerMatcher::kSimilarityCenter = 0.0075;
47 
48 static const uinT8 offset_table[256] = {
49  255, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
50  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
51  5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
52  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
53  6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
54  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
55  5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
56  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
57  7, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
58  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
59  5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
60  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
61  6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
62  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
63  5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
64  4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0
65 };
66 
67 static const uinT8 next_table[256] = {
68  0, 0, 0, 0x2, 0, 0x4, 0x4, 0x6, 0, 0x8, 0x8, 0x0a, 0x08, 0x0c, 0x0c, 0x0e,
69  0, 0x10, 0x10, 0x12, 0x10, 0x14, 0x14, 0x16, 0x10, 0x18, 0x18, 0x1a, 0x18,
70  0x1c, 0x1c, 0x1e,
71  0, 0x20, 0x20, 0x22, 0x20, 0x24, 0x24, 0x26, 0x20, 0x28, 0x28, 0x2a, 0x28,
72  0x2c, 0x2c, 0x2e,
73  0x20, 0x30, 0x30, 0x32, 0x30, 0x34, 0x34, 0x36, 0x30, 0x38, 0x38, 0x3a,
74  0x38, 0x3c, 0x3c, 0x3e,
75  0, 0x40, 0x40, 0x42, 0x40, 0x44, 0x44, 0x46, 0x40, 0x48, 0x48, 0x4a, 0x48,
76  0x4c, 0x4c, 0x4e,
77  0x40, 0x50, 0x50, 0x52, 0x50, 0x54, 0x54, 0x56, 0x50, 0x58, 0x58, 0x5a,
78  0x58, 0x5c, 0x5c, 0x5e,
79  0x40, 0x60, 0x60, 0x62, 0x60, 0x64, 0x64, 0x66, 0x60, 0x68, 0x68, 0x6a,
80  0x68, 0x6c, 0x6c, 0x6e,
81  0x60, 0x70, 0x70, 0x72, 0x70, 0x74, 0x74, 0x76, 0x70, 0x78, 0x78, 0x7a,
82  0x78, 0x7c, 0x7c, 0x7e,
83  0, 0x80, 0x80, 0x82, 0x80, 0x84, 0x84, 0x86, 0x80, 0x88, 0x88, 0x8a, 0x88,
84  0x8c, 0x8c, 0x8e,
85  0x80, 0x90, 0x90, 0x92, 0x90, 0x94, 0x94, 0x96, 0x90, 0x98, 0x98, 0x9a,
86  0x98, 0x9c, 0x9c, 0x9e,
87  0x80, 0xa0, 0xa0, 0xa2, 0xa0, 0xa4, 0xa4, 0xa6, 0xa0, 0xa8, 0xa8, 0xaa,
88  0xa8, 0xac, 0xac, 0xae,
89  0xa0, 0xb0, 0xb0, 0xb2, 0xb0, 0xb4, 0xb4, 0xb6, 0xb0, 0xb8, 0xb8, 0xba,
90  0xb8, 0xbc, 0xbc, 0xbe,
91  0x80, 0xc0, 0xc0, 0xc2, 0xc0, 0xc4, 0xc4, 0xc6, 0xc0, 0xc8, 0xc8, 0xca,
92  0xc8, 0xcc, 0xcc, 0xce,
93  0xc0, 0xd0, 0xd0, 0xd2, 0xd0, 0xd4, 0xd4, 0xd6, 0xd0, 0xd8, 0xd8, 0xda,
94  0xd8, 0xdc, 0xdc, 0xde,
95  0xc0, 0xe0, 0xe0, 0xe2, 0xe0, 0xe4, 0xe4, 0xe6, 0xe0, 0xe8, 0xe8, 0xea,
96  0xe8, 0xec, 0xec, 0xee,
97  0xe0, 0xf0, 0xf0, 0xf2, 0xf0, 0xf4, 0xf4, 0xf6, 0xf0, 0xf8, 0xf8, 0xfa,
98  0xf8, 0xfc, 0xfc, 0xfe
99 };
100 
101 namespace tesseract {
102 
103 // Encapsulation of the intermediate data and computations made by the class
104 // pruner. The class pruner implements a simple linear classifier on binary
105 // features by heavily quantizing the feature space, and applying
106 // NUM_BITS_PER_CLASS (2)-bit weights to the features. Lack of resolution in
107 // weights is compensated by a non-constant bias that is dependent on the
108 // number of features present.
109 class ClassPruner {
110  public:
111  ClassPruner(int max_classes) {
112  // The unrolled loop in ComputeScores means that the array sizes need to
113  // be rounded up so that the array is big enough to accommodate the extra
114  // entries accessed by the unrolling. Each pruner word is of sized
115  // BITS_PER_WERD and each entry is NUM_BITS_PER_CLASS, so there are
116  // BITS_PER_WERD / NUM_BITS_PER_CLASS entries.
117  // See ComputeScores.
118  max_classes_ = max_classes;
119  rounded_classes_ = RoundUp(
121  class_count_ = new int[rounded_classes_];
122  norm_count_ = new int[rounded_classes_];
123  sort_key_ = new int[rounded_classes_ + 1];
124  sort_index_ = new int[rounded_classes_ + 1];
125  for (int i = 0; i < rounded_classes_; i++) {
126  class_count_[i] = 0;
127  }
128  pruning_threshold_ = 0;
129  num_features_ = 0;
130  num_classes_ = 0;
131  }
132 
134  delete []class_count_;
135  delete []norm_count_;
136  delete []sort_key_;
137  delete []sort_index_;
138  }
139 
140  // Computes the scores for every class in the character set, by summing the
141  // weights for each feature and stores the sums internally in class_count_.
142  void ComputeScores(const INT_TEMPLATES_STRUCT* int_templates,
143  int num_features, const INT_FEATURE_STRUCT* features) {
144  num_features_ = num_features;
145  int num_pruners = int_templates->NumClassPruners;
146  for (int f = 0; f < num_features; ++f) {
147  const INT_FEATURE_STRUCT* feature = &features[f];
148  // Quantize the feature to NUM_CP_BUCKETS*NUM_CP_BUCKETS*NUM_CP_BUCKETS.
149  int x = feature->X * NUM_CP_BUCKETS >> 8;
150  int y = feature->Y * NUM_CP_BUCKETS >> 8;
151  int theta = feature->Theta * NUM_CP_BUCKETS >> 8;
152  int class_id = 0;
153  // Each CLASS_PRUNER_STRUCT only covers CLASSES_PER_CP(32) classes, so
154  // we need a collection of them, indexed by pruner_set.
155  for (int pruner_set = 0; pruner_set < num_pruners; ++pruner_set) {
156  // Look up quantized feature in a 3-D array, an array of weights for
157  // each class.
158  const uinT32* pruner_word_ptr =
159  int_templates->ClassPruners[pruner_set]->p[x][y][theta];
160  for (int word = 0; word < WERDS_PER_CP_VECTOR; ++word) {
161  uinT32 pruner_word = *pruner_word_ptr++;
162  // This inner loop is unrolled to speed up the ClassPruner.
163  // Currently gcc would not unroll it unless it is set to O3
164  // level of optimization or -funroll-loops is specified.
165  /*
166  uinT32 class_mask = (1 << NUM_BITS_PER_CLASS) - 1;
167  for (int bit = 0; bit < BITS_PER_WERD/NUM_BITS_PER_CLASS; bit++) {
168  class_count_[class_id++] += pruner_word & class_mask;
169  pruner_word >>= NUM_BITS_PER_CLASS;
170  }
171  */
172  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
173  pruner_word >>= NUM_BITS_PER_CLASS;
174  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
175  pruner_word >>= NUM_BITS_PER_CLASS;
176  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
177  pruner_word >>= NUM_BITS_PER_CLASS;
178  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
179  pruner_word >>= NUM_BITS_PER_CLASS;
180  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
181  pruner_word >>= NUM_BITS_PER_CLASS;
182  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
183  pruner_word >>= NUM_BITS_PER_CLASS;
184  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
185  pruner_word >>= NUM_BITS_PER_CLASS;
186  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
187  pruner_word >>= NUM_BITS_PER_CLASS;
188  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
189  pruner_word >>= NUM_BITS_PER_CLASS;
190  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
191  pruner_word >>= NUM_BITS_PER_CLASS;
192  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
193  pruner_word >>= NUM_BITS_PER_CLASS;
194  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
195  pruner_word >>= NUM_BITS_PER_CLASS;
196  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
197  pruner_word >>= NUM_BITS_PER_CLASS;
198  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
199  pruner_word >>= NUM_BITS_PER_CLASS;
200  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
201  pruner_word >>= NUM_BITS_PER_CLASS;
202  class_count_[class_id++] += pruner_word & CLASS_PRUNER_CLASS_MASK;
203  }
204  }
205  }
206  }
207 
208  // Adjusts the scores according to the number of expected features. Used
209  // in lieu of a constant bias, this penalizes classes that expect more
210  // features than there are present. Thus an actual c will score higher for c
211  // than e, even though almost all the features match e as well as c, because
212  // e expects more features to be present.
213  void AdjustForExpectedNumFeatures(const uinT16* expected_num_features,
214  int cutoff_strength) {
215  for (int class_id = 0; class_id < max_classes_; ++class_id) {
216  if (num_features_ < expected_num_features[class_id]) {
217  int deficit = expected_num_features[class_id] - num_features_;
218  class_count_[class_id] -= class_count_[class_id] * deficit /
219  (num_features_ * cutoff_strength + deficit);
220  }
221  }
222  }
223 
224  // Zeros the scores for classes disabled in the unicharset.
225  // Implements the black-list to recognize a subset of the character set.
226  void DisableDisabledClasses(const UNICHARSET& unicharset) {
227  for (int class_id = 0; class_id < max_classes_; ++class_id) {
228  if (!unicharset.get_enabled(class_id))
229  class_count_[class_id] = 0; // This char is disabled!
230  }
231  }
232 
233  // Zeros the scores of fragments.
234  void DisableFragments(const UNICHARSET& unicharset) {
235  for (int class_id = 0; class_id < max_classes_; ++class_id) {
236  // Do not include character fragments in the class pruner
237  // results if disable_character_fragments is true.
238  if (unicharset.get_fragment(class_id)) {
239  class_count_[class_id] = 0;
240  }
241  }
242  }
243 
244  // Normalizes the counts for xheight, putting the normalized result in
245  // norm_count_. Applies a simple subtractive penalty for incorrect vertical
246  // position provided by the normalization_factors array, indexed by
247  // character class, and scaled by the norm_multiplier.
248  void NormalizeForXheight(int norm_multiplier,
249  const uinT8* normalization_factors) {
250  for (int class_id = 0; class_id < max_classes_; class_id++) {
251  norm_count_[class_id] = class_count_[class_id] -
252  ((norm_multiplier * normalization_factors[class_id]) >> 8);
253  }
254  }
255 
256  // The nop normalization copies the class_count_ array to norm_count_.
258  for (int class_id = 0; class_id < max_classes_; class_id++) {
259  norm_count_[class_id] = class_count_[class_id];
260  }
261  }
262 
263  // Prunes the classes using <the maximum count> * pruning_factor/256 as a
264  // threshold for keeping classes. If max_of_non_fragments, then ignore
265  // fragments in computing the maximum count.
266  void PruneAndSort(int pruning_factor, bool max_of_non_fragments,
267  const UNICHARSET& unicharset) {
268  int max_count = 0;
269  for (int c = 0; c < max_classes_; ++c) {
270  if (norm_count_[c] > max_count &&
271  // This additional check is added in order to ensure that
272  // the classifier will return at least one non-fragmented
273  // character match.
274  // TODO(daria): verify that this helps accuracy and does not
275  // hurt performance.
276  (!max_of_non_fragments || !unicharset.get_fragment(c))) {
277  max_count = norm_count_[c];
278  }
279  }
280  // Prune Classes.
281  pruning_threshold_ = (max_count * pruning_factor) >> 8;
282  // Select Classes.
283  if (pruning_threshold_ < 1)
284  pruning_threshold_ = 1;
285  num_classes_ = 0;
286  for (int class_id = 0; class_id < max_classes_; class_id++) {
287  if (norm_count_[class_id] >= pruning_threshold_) {
288  ++num_classes_;
289  sort_index_[num_classes_] = class_id;
290  sort_key_[num_classes_] = norm_count_[class_id];
291  }
292  }
293 
294  // Sort Classes using Heapsort Algorithm.
295  if (num_classes_ > 1)
296  HeapSort(num_classes_, sort_key_, sort_index_);
297  }
298 
299  // Prints debug info on the class pruner matches for the pruned classes only.
300  void DebugMatch(const Classify& classify,
301  const INT_TEMPLATES_STRUCT* int_templates,
302  const INT_FEATURE_STRUCT* features) const {
303  int num_pruners = int_templates->NumClassPruners;
304  int max_num_classes = int_templates->NumClasses;
305  for (int f = 0; f < num_features_; ++f) {
306  const INT_FEATURE_STRUCT* feature = &features[f];
307  tprintf("F=%3d(%d,%d,%d),", f, feature->X, feature->Y, feature->Theta);
308  // Quantize the feature to NUM_CP_BUCKETS*NUM_CP_BUCKETS*NUM_CP_BUCKETS.
309  int x = feature->X * NUM_CP_BUCKETS >> 8;
310  int y = feature->Y * NUM_CP_BUCKETS >> 8;
311  int theta = feature->Theta * NUM_CP_BUCKETS >> 8;
312  int class_id = 0;
313  for (int pruner_set = 0; pruner_set < num_pruners; ++pruner_set) {
314  // Look up quantized feature in a 3-D array, an array of weights for
315  // each class.
316  const uinT32* pruner_word_ptr =
317  int_templates->ClassPruners[pruner_set]->p[x][y][theta];
318  for (int word = 0; word < WERDS_PER_CP_VECTOR; ++word) {
319  uinT32 pruner_word = *pruner_word_ptr++;
320  for (int word_class = 0; word_class < 16 &&
321  class_id < max_num_classes; ++word_class, ++class_id) {
322  if (norm_count_[class_id] >= pruning_threshold_) {
323  tprintf(" %s=%d,",
324  classify.ClassIDToDebugStr(int_templates,
325  class_id, 0).string(),
326  pruner_word & CLASS_PRUNER_CLASS_MASK);
327  }
328  pruner_word >>= NUM_BITS_PER_CLASS;
329  }
330  }
331  tprintf("\n");
332  }
333  }
334  }
335 
336  // Prints a summary of the pruner result.
337  void SummarizeResult(const Classify& classify,
338  const INT_TEMPLATES_STRUCT* int_templates,
339  const uinT16* expected_num_features,
340  int norm_multiplier,
341  const uinT8* normalization_factors) const {
342  tprintf("CP:%d classes, %d features:\n", num_classes_, num_features_);
343  for (int i = 0; i < num_classes_; ++i) {
344  int class_id = sort_index_[num_classes_ - i];
345  STRING class_string = classify.ClassIDToDebugStr(int_templates,
346  class_id, 0);
347  tprintf("%s:Initial=%d, E=%d, Xht-adj=%d, N=%d, Rat=%.2f\n",
348  class_string.string(),
349  class_count_[class_id],
350  expected_num_features[class_id],
351  (norm_multiplier * normalization_factors[class_id]) >> 8,
352  sort_key_[num_classes_ - i],
353  100.0 - 100.0 * sort_key_[num_classes_ - i] /
354  (CLASS_PRUNER_CLASS_MASK * num_features_));
355  }
356  }
357 
358  // Copies the pruned, sorted classes into the output results and returns
359  // the number of classes.
360  int SetupResults(CP_RESULT_STRUCT* results) const {
361  for (int c = 0; c < num_classes_; ++c) {
362  results[c].Class = sort_index_[num_classes_ - c];
363  results[c].Rating = 1.0 - sort_key_[num_classes_ - c] /
364  (static_cast<float>(CLASS_PRUNER_CLASS_MASK) * num_features_);
365  }
366  return num_classes_;
367  }
368 
369  private:
370  // Array[rounded_classes_] of initial counts for each class.
371  int *class_count_;
372  // Array[rounded_classes_] of modified counts for each class after normalizing
373  // for expected number of features, disabled classes, fragments, and xheights.
374  int *norm_count_;
375  // Array[rounded_classes_ +1] of pruned counts that gets sorted
376  int *sort_key_;
377  // Array[rounded_classes_ +1] of classes corresponding to sort_key_.
378  int *sort_index_;
379  // Number of classes in this class pruner.
380  int max_classes_;
381  // Rounded up number of classes used for array sizes.
382  int rounded_classes_;
383  // Threshold count applied to prune classes.
384  int pruning_threshold_;
385  // The number of features used to compute the scores.
386  int num_features_;
387  // Final number of pruned classes.
388  int num_classes_;
389 };
390 
391 /*----------------------------------------------------------------------------
392  Public Code
393 ----------------------------------------------------------------------------*/
394 /*---------------------------------------------------------------------------*/
395 // Runs the class pruner from int_templates on the given features, returning
396 // the number of classes output in results.
397 // int_templates Class pruner tables
398 // num_features Number of features in blob
399 // features Array of features
400 // normalization_factors Array of fudge factors from blob
401 // normalization process (by CLASS_INDEX)
402 // expected_num_features Array of expected number of features
403 // for each class (by CLASS_INDEX)
404 // results Sorted Array of pruned classes. Must be an array
405 // of size at least int_templates->NumClasses.
407  int num_features,
408  const INT_FEATURE_STRUCT* features,
409  const uinT8* normalization_factors,
410  const uinT16* expected_num_features,
411  CP_RESULT_STRUCT* results) {
412 /*
413  ** Operation:
414  ** Prunes the classes using a modified fast match table.
415  ** Returns a sorted list of classes along with the number
416  ** of pruned classes in that list.
417  ** Return: Number of pruned classes.
418  ** Exceptions: none
419  ** History: Tue Feb 19 10:24:24 MST 1991, RWM, Created.
420  */
421  ClassPruner pruner(int_templates->NumClasses);
422  // Compute initial match scores for all classes.
423  pruner.ComputeScores(int_templates, num_features, features);
424  // Adjust match scores for number of expected features.
425  pruner.AdjustForExpectedNumFeatures(expected_num_features,
427  // Apply disabled classes in unicharset - only works without a shape_table.
428  if (shape_table_ == NULL)
430  // If fragments are disabled, remove them, also only without a shape table.
433 
434  // If we have good x-heights, apply the given normalization factors.
435  if (normalization_factors != NULL) {
437  normalization_factors);
438  } else {
439  pruner.NoNormalization();
440  }
441  // Do the actual pruning and sort the short-list.
444 
445  if (classify_debug_level > 2) {
446  pruner.DebugMatch(*this, int_templates, features);
447  }
448  if (classify_debug_level > 1) {
449  pruner.SummarizeResult(*this, int_templates, expected_num_features,
451  normalization_factors);
452  }
453  // Convert to the expected output format.
454  return pruner.SetupResults(results);
455 }
456 
457 } // namespace tesseract
458 
459 /*---------------------------------------------------------------------------*/
460 void IntegerMatcher::Match(INT_CLASS ClassTemplate,
461  BIT_VECTOR ProtoMask,
462  BIT_VECTOR ConfigMask,
463  inT16 NumFeatures,
464  const INT_FEATURE_STRUCT* Features,
465  INT_RESULT Result,
466  int AdaptFeatureThreshold,
467  int Debug,
468  bool SeparateDebugWindows) {
469 /*
470  ** Parameters:
471  ** ClassTemplate Prototypes & tables for a class
472  ** BlobLength Length of unormalized blob
473  ** NumFeatures Number of features in blob
474  ** Features Array of features
475  ** NormalizationFactor Fudge factor from blob
476  ** normalization process
477  ** Result Class rating & configuration:
478  ** (0.0 -> 1.0), 0=good, 1=bad
479  ** Debug Debugger flag: 1=debugger on
480  ** Globals:
481  ** local_matcher_multiplier_ Normalization factor multiplier
482  ** Operation:
483  ** IntegerMatcher returns the best configuration and rating
484  ** for a single class. The class matched against is determined
485  ** by the uniqueness of the ClassTemplate parameter. The
486  ** best rating and its associated configuration are returned.
487  ** Return:
488  ** Exceptions: none
489  ** History: Tue Feb 19 16:36:23 MST 1991, RWM, Created.
490  */
491  ScratchEvidence *tables = new ScratchEvidence();
492  int Feature;
493  int BestMatch;
494 
495  if (MatchDebuggingOn (Debug))
496  cprintf ("Integer Matcher -------------------------------------------\n");
497 
498  tables->Clear(ClassTemplate);
499  Result->FeatureMisses = 0;
500 
501  for (Feature = 0; Feature < NumFeatures; Feature++) {
502  int csum = UpdateTablesForFeature(ClassTemplate, ProtoMask, ConfigMask,
503  Feature, &Features[Feature],
504  tables, Debug);
505  // Count features that were missed over all configs.
506  if (csum == 0)
507  Result->FeatureMisses++;
508  }
509 
510 #ifndef GRAPHICS_DISABLED
511  if (PrintProtoMatchesOn(Debug) || PrintMatchSummaryOn(Debug)) {
512  DebugFeatureProtoError(ClassTemplate, ProtoMask, ConfigMask, *tables,
513  NumFeatures, Debug);
514  }
515 
516  if (DisplayProtoMatchesOn(Debug)) {
517  DisplayProtoDebugInfo(ClassTemplate, ProtoMask, ConfigMask,
518  *tables, SeparateDebugWindows);
519  }
520 
521  if (DisplayFeatureMatchesOn(Debug)) {
522  DisplayFeatureDebugInfo(ClassTemplate, ProtoMask, ConfigMask, NumFeatures,
523  Features, AdaptFeatureThreshold, Debug,
524  SeparateDebugWindows);
525  }
526 #endif
527 
528  tables->UpdateSumOfProtoEvidences(ClassTemplate, ConfigMask, NumFeatures);
529  tables->NormalizeSums(ClassTemplate, NumFeatures, NumFeatures);
530 
531  BestMatch = FindBestMatch(ClassTemplate, *tables, Result);
532 
533 #ifndef GRAPHICS_DISABLED
534  if (PrintMatchSummaryOn(Debug))
535  DebugBestMatch(BestMatch, Result);
536 
537  if (MatchDebuggingOn(Debug))
538  cprintf("Match Complete --------------------------------------------\n");
539 #endif
540 
541  delete tables;
542 }
543 
544 
545 /*---------------------------------------------------------------------------*/
547  INT_CLASS ClassTemplate,
548  BIT_VECTOR ProtoMask,
549  BIT_VECTOR ConfigMask,
550  uinT16 BlobLength,
551  inT16 NumFeatures,
552  INT_FEATURE_ARRAY Features,
553  PROTO_ID *ProtoArray,
554  int AdaptProtoThreshold,
555  int Debug) {
556 /*
557  ** Parameters:
558  ** ClassTemplate Prototypes & tables for a class
559  ** ProtoMask AND Mask for proto word
560  ** ConfigMask AND Mask for config word
561  ** BlobLength Length of unormalized blob
562  ** NumFeatures Number of features in blob
563  ** Features Array of features
564  ** ProtoArray Array of good protos
565  ** AdaptProtoThreshold Threshold for good protos
566  ** Debug Debugger flag: 1=debugger on
567  ** Globals:
568  ** local_matcher_multiplier_ Normalization factor multiplier
569  ** Operation:
570  ** FindGoodProtos finds all protos whose normalized proto-evidence
571  ** exceed classify_adapt_proto_thresh. The list is ordered by increasing
572  ** proto id number.
573  ** Return:
574  ** Number of good protos in ProtoArray.
575  ** Exceptions: none
576  ** History: Tue Mar 12 17:09:26 MST 1991, RWM, Created
577  */
578  ScratchEvidence *tables = new ScratchEvidence();
579  int NumGoodProtos = 0;
580 
581  /* DEBUG opening heading */
582  if (MatchDebuggingOn (Debug))
583  cprintf
584  ("Find Good Protos -------------------------------------------\n");
585 
586  tables->Clear(ClassTemplate);
587 
588  for (int Feature = 0; Feature < NumFeatures; Feature++)
589  UpdateTablesForFeature(
590  ClassTemplate, ProtoMask, ConfigMask, Feature, &(Features[Feature]),
591  tables, Debug);
592 
593 #ifndef GRAPHICS_DISABLED
594  if (PrintProtoMatchesOn (Debug) || PrintMatchSummaryOn (Debug))
595  DebugFeatureProtoError(ClassTemplate, ProtoMask, ConfigMask, *tables,
596  NumFeatures, Debug);
597 #endif
598 
599  /* Average Proto Evidences & Find Good Protos */
600  for (int proto = 0; proto < ClassTemplate->NumProtos; proto++) {
601  /* Compute Average for Actual Proto */
602  int Temp = 0;
603  for (int i = 0; i < ClassTemplate->ProtoLengths[proto]; i++)
604  Temp += tables->proto_evidence_[proto][i];
605 
606  Temp /= ClassTemplate->ProtoLengths[proto];
607 
608  /* Find Good Protos */
609  if (Temp >= AdaptProtoThreshold) {
610  *ProtoArray = proto;
611  ProtoArray++;
612  NumGoodProtos++;
613  }
614  }
615 
616  if (MatchDebuggingOn (Debug))
617  cprintf ("Match Complete --------------------------------------------\n");
618  delete tables;
619 
620  return NumGoodProtos;
621 }
622 
623 
624 /*---------------------------------------------------------------------------*/
626  INT_CLASS ClassTemplate,
627  BIT_VECTOR ProtoMask,
628  BIT_VECTOR ConfigMask,
629  uinT16 BlobLength,
630  inT16 NumFeatures,
631  INT_FEATURE_ARRAY Features,
632  FEATURE_ID *FeatureArray,
633  int AdaptFeatureThreshold,
634  int Debug) {
635 /*
636  ** Parameters:
637  ** ClassTemplate Prototypes & tables for a class
638  ** ProtoMask AND Mask for proto word
639  ** ConfigMask AND Mask for config word
640  ** BlobLength Length of unormalized blob
641  ** NumFeatures Number of features in blob
642  ** Features Array of features
643  ** FeatureArray Array of bad features
644  ** AdaptFeatureThreshold Threshold for bad features
645  ** Debug Debugger flag: 1=debugger on
646  ** Operation:
647  ** FindBadFeatures finds all features with maximum feature-evidence <
648  ** AdaptFeatureThresh. The list is ordered by increasing feature number.
649  ** Return:
650  ** Number of bad features in FeatureArray.
651  ** History: Tue Mar 12 17:09:26 MST 1991, RWM, Created
652  */
653  ScratchEvidence *tables = new ScratchEvidence();
654  int NumBadFeatures = 0;
655 
656  /* DEBUG opening heading */
657  if (MatchDebuggingOn(Debug))
658  cprintf("Find Bad Features -------------------------------------------\n");
659 
660  tables->Clear(ClassTemplate);
661 
662  for (int Feature = 0; Feature < NumFeatures; Feature++) {
663  UpdateTablesForFeature(
664  ClassTemplate, ProtoMask, ConfigMask, Feature, &Features[Feature],
665  tables, Debug);
666 
667  /* Find Best Evidence for Current Feature */
668  int best = 0;
669  for (int i = 0; i < ClassTemplate->NumConfigs; i++)
670  if (tables->feature_evidence_[i] > best)
671  best = tables->feature_evidence_[i];
672 
673  /* Find Bad Features */
674  if (best < AdaptFeatureThreshold) {
675  *FeatureArray = Feature;
676  FeatureArray++;
677  NumBadFeatures++;
678  }
679  }
680 
681 #ifndef GRAPHICS_DISABLED
682  if (PrintProtoMatchesOn(Debug) || PrintMatchSummaryOn(Debug))
683  DebugFeatureProtoError(ClassTemplate, ProtoMask, ConfigMask, *tables,
684  NumFeatures, Debug);
685 #endif
686 
687  if (MatchDebuggingOn(Debug))
688  cprintf("Match Complete --------------------------------------------\n");
689 
690  delete tables;
691  return NumBadFeatures;
692 }
693 
694 
695 /*---------------------------------------------------------------------------*/
696 void IntegerMatcher::Init(tesseract::IntParam *classify_debug_level,
697  int classify_integer_matcher_multiplier) {
698  classify_debug_level_ = classify_debug_level;
699 
700  /* Set default mode of operation of IntegerMatcher */
701  SetCharNormMatch(classify_integer_matcher_multiplier);
702 
703  /* Initialize table for evidence to similarity lookup */
704  for (int i = 0; i < SE_TABLE_SIZE; i++) {
705  uinT32 IntSimilarity = i << (27 - SE_TABLE_BITS);
706  double Similarity = ((double) IntSimilarity) / 65536.0 / 65536.0;
707  double evidence = Similarity / kSimilarityCenter;
708  evidence = 255.0 / (evidence * evidence + 1.0);
709 
710  if (kSEExponentialMultiplier > 0.0) {
711  double scale = 1.0 - exp(-kSEExponentialMultiplier) *
712  exp(kSEExponentialMultiplier * ((double) i / SE_TABLE_SIZE));
713  evidence *= ClipToRange(scale, 0.0, 1.0);
714  }
715 
716  similarity_evidence_table_[i] = (uinT8) (evidence + 0.5);
717  }
718 
719  /* Initialize evidence computation variables */
720  evidence_table_mask_ =
721  ((1 << kEvidenceTableBits) - 1) << (9 - kEvidenceTableBits);
722  mult_trunc_shift_bits_ = (14 - kIntEvidenceTruncBits);
723  table_trunc_shift_bits_ = (27 - SE_TABLE_BITS - (mult_trunc_shift_bits_ << 1));
724  evidence_mult_mask_ = ((1 << kIntEvidenceTruncBits) - 1);
725 }
726 
727 /*--------------------------------------------------------------------------*/
729  local_matcher_multiplier_ = 0;
730 }
731 
732 
733 /*--------------------------------------------------------------------------*/
734 void IntegerMatcher::SetCharNormMatch(int integer_matcher_multiplier) {
735  local_matcher_multiplier_ = integer_matcher_multiplier;
736 }
737 
738 
742 void ScratchEvidence::Clear(const INT_CLASS class_template) {
743  memset(sum_feature_evidence_, 0,
744  class_template->NumConfigs * sizeof(sum_feature_evidence_[0]));
745  memset(proto_evidence_, 0,
746  class_template->NumProtos * sizeof(proto_evidence_[0]));
747 }
748 
750  memset(feature_evidence_, 0,
751  class_template->NumConfigs * sizeof(feature_evidence_[0]));
752 }
753 
754 
755 
756 /*---------------------------------------------------------------------------*/
757 void IMDebugConfiguration(int FeatureNum,
758  uinT16 ActualProtoNum,
759  uinT8 Evidence,
760  BIT_VECTOR ConfigMask,
761  uinT32 ConfigWord) {
762 /*
763  ** Parameters:
764  ** Globals:
765  ** Operation:
766  ** Print debugging information for Configuations
767  ** Return:
768  ** Exceptions: none
769  ** History: Wed Feb 27 14:12:28 MST 1991, RWM, Created.
770  */
771  cprintf ("F = %3d, P = %3d, E = %3d, Configs = ",
772  FeatureNum, (int) ActualProtoNum, (int) Evidence);
773  while (ConfigWord) {
774  if (ConfigWord & 1)
775  cprintf ("1");
776  else
777  cprintf ("0");
778  ConfigWord >>= 1;
779  }
780  cprintf ("\n");
781 }
782 
783 
784 /*---------------------------------------------------------------------------*/
785 void IMDebugConfigurationSum(int FeatureNum,
786  uinT8 *FeatureEvidence,
787  inT32 ConfigCount) {
788 /*
789  ** Parameters:
790  ** Globals:
791  ** Operation:
792  ** Print debugging information for Configuations
793  ** Return:
794  ** Exceptions: none
795  ** History: Wed Feb 27 14:12:28 MST 1991, RWM, Created.
796  */
797  cprintf("F=%3d, C=", FeatureNum);
798  for (int ConfigNum = 0; ConfigNum < ConfigCount; ConfigNum++) {
799  cprintf("%4d", FeatureEvidence[ConfigNum]);
800  }
801  cprintf("\n");
802 }
803 
804 
805 
806 /*---------------------------------------------------------------------------*/
807 int IntegerMatcher::UpdateTablesForFeature(
808  INT_CLASS ClassTemplate,
809  BIT_VECTOR ProtoMask,
810  BIT_VECTOR ConfigMask,
811  int FeatureNum,
812  const INT_FEATURE_STRUCT* Feature,
813  ScratchEvidence *tables,
814  int Debug) {
815 /*
816  ** Parameters:
817  ** ClassTemplate Prototypes & tables for a class
818  ** FeatureNum Current feature number (for DEBUG only)
819  ** Feature Pointer to a feature struct
820  ** tables Evidence tables
821  ** Debug Debugger flag: 1=debugger on
822  ** Operation:
823  ** For the given feature: prune protos, compute evidence,
824  ** update Feature Evidence, Proto Evidence, and Sum of Feature
825  ** Evidence tables.
826  ** Return:
827  */
828  register uinT32 ConfigWord;
829  register uinT32 ProtoWord;
830  register uinT32 ProtoNum;
831  register uinT32 ActualProtoNum;
832  uinT8 proto_byte;
833  inT32 proto_word_offset;
834  inT32 proto_offset;
835  uinT8 config_byte;
836  inT32 config_offset;
837  PROTO_SET ProtoSet;
838  uinT32 *ProtoPrunerPtr;
839  INT_PROTO Proto;
840  int ProtoSetIndex;
841  uinT8 Evidence;
842  uinT32 XFeatureAddress;
843  uinT32 YFeatureAddress;
844  uinT32 ThetaFeatureAddress;
845  register uinT8 *UINT8Pointer;
846  register int ProtoIndex;
847  uinT8 Temp;
848  register int *IntPointer;
849  int ConfigNum;
850  register inT32 M3;
851  register inT32 A3;
852  register uinT32 A4;
853 
854  tables->ClearFeatureEvidence(ClassTemplate);
855 
856  /* Precompute Feature Address offset for Proto Pruning */
857  XFeatureAddress = ((Feature->X >> 2) << 1);
858  YFeatureAddress = (NUM_PP_BUCKETS << 1) + ((Feature->Y >> 2) << 1);
859  ThetaFeatureAddress = (NUM_PP_BUCKETS << 2) + ((Feature->Theta >> 2) << 1);
860 
861  for (ProtoSetIndex = 0, ActualProtoNum = 0;
862  ProtoSetIndex < ClassTemplate->NumProtoSets; ProtoSetIndex++) {
863  ProtoSet = ClassTemplate->ProtoSets[ProtoSetIndex];
864  ProtoPrunerPtr = (uinT32 *) ((*ProtoSet).ProtoPruner);
865  for (ProtoNum = 0; ProtoNum < PROTOS_PER_PROTO_SET;
866  ProtoNum += (PROTOS_PER_PROTO_SET >> 1), ActualProtoNum +=
867  (PROTOS_PER_PROTO_SET >> 1), ProtoMask++, ProtoPrunerPtr++) {
868  /* Prune Protos of current Proto Set */
869  ProtoWord = *(ProtoPrunerPtr + XFeatureAddress);
870  ProtoWord &= *(ProtoPrunerPtr + YFeatureAddress);
871  ProtoWord &= *(ProtoPrunerPtr + ThetaFeatureAddress);
872  ProtoWord &= *ProtoMask;
873 
874  if (ProtoWord != 0) {
875  proto_byte = ProtoWord & 0xff;
876  ProtoWord >>= 8;
877  proto_word_offset = 0;
878  while (ProtoWord != 0 || proto_byte != 0) {
879  while (proto_byte == 0) {
880  proto_byte = ProtoWord & 0xff;
881  ProtoWord >>= 8;
882  proto_word_offset += 8;
883  }
884  proto_offset = offset_table[proto_byte] + proto_word_offset;
885  proto_byte = next_table[proto_byte];
886  Proto = &(ProtoSet->Protos[ProtoNum + proto_offset]);
887  ConfigWord = Proto->Configs[0];
888  A3 = (((Proto->A * (Feature->X - 128)) << 1)
889  - (Proto->B * (Feature->Y - 128)) + (Proto->C << 9));
890  M3 =
891  (((inT8) (Feature->Theta - Proto->Angle)) * kIntThetaFudge) << 1;
892 
893  if (A3 < 0)
894  A3 = ~A3;
895  if (M3 < 0)
896  M3 = ~M3;
897  A3 >>= mult_trunc_shift_bits_;
898  M3 >>= mult_trunc_shift_bits_;
899  if (A3 > evidence_mult_mask_)
900  A3 = evidence_mult_mask_;
901  if (M3 > evidence_mult_mask_)
902  M3 = evidence_mult_mask_;
903 
904  A4 = (A3 * A3) + (M3 * M3);
905  A4 >>= table_trunc_shift_bits_;
906  if (A4 > evidence_table_mask_)
907  Evidence = 0;
908  else
909  Evidence = similarity_evidence_table_[A4];
910 
911  if (PrintFeatureMatchesOn (Debug))
912  IMDebugConfiguration (FeatureNum,
913  ActualProtoNum + proto_offset,
914  Evidence, ConfigMask, ConfigWord);
915 
916  ConfigWord &= *ConfigMask;
917 
918  UINT8Pointer = tables->feature_evidence_ - 8;
919  config_byte = 0;
920  while (ConfigWord != 0 || config_byte != 0) {
921  while (config_byte == 0) {
922  config_byte = ConfigWord & 0xff;
923  ConfigWord >>= 8;
924  UINT8Pointer += 8;
925  }
926  config_offset = offset_table[config_byte];
927  config_byte = next_table[config_byte];
928  if (Evidence > UINT8Pointer[config_offset])
929  UINT8Pointer[config_offset] = Evidence;
930  }
931 
932  UINT8Pointer =
933  &(tables->proto_evidence_[ActualProtoNum + proto_offset][0]);
934  for (ProtoIndex =
935  ClassTemplate->ProtoLengths[ActualProtoNum + proto_offset];
936  ProtoIndex > 0; ProtoIndex--, UINT8Pointer++) {
937  if (Evidence > *UINT8Pointer) {
938  Temp = *UINT8Pointer;
939  *UINT8Pointer = Evidence;
940  Evidence = Temp;
941  }
942  else if (Evidence == 0)
943  break;
944  }
945  }
946  }
947  }
948  }
949 
950  if (PrintFeatureMatchesOn(Debug)) {
951  IMDebugConfigurationSum(FeatureNum, tables->feature_evidence_,
952  ClassTemplate->NumConfigs);
953  }
954 
955  IntPointer = tables->sum_feature_evidence_;
956  UINT8Pointer = tables->feature_evidence_;
957  int SumOverConfigs = 0;
958  for (ConfigNum = ClassTemplate->NumConfigs; ConfigNum > 0; ConfigNum--) {
959  int evidence = *UINT8Pointer++;
960  SumOverConfigs += evidence;
961  *IntPointer++ += evidence;
962  }
963  return SumOverConfigs;
964 }
965 
966 
967 /*---------------------------------------------------------------------------*/
968 #ifndef GRAPHICS_DISABLED
969 void IntegerMatcher::DebugFeatureProtoError(
970  INT_CLASS ClassTemplate,
971  BIT_VECTOR ProtoMask,
972  BIT_VECTOR ConfigMask,
973  const ScratchEvidence& tables,
974  inT16 NumFeatures,
975  int Debug) {
976 /*
977  ** Parameters:
978  ** Globals:
979  ** Operation:
980  ** Print debugging information for Configuations
981  ** Return:
982  ** Exceptions: none
983  ** History: Wed Feb 27 14:12:28 MST 1991, RWM, Created.
984  */
985  FLOAT32 ProtoConfigs[MAX_NUM_CONFIGS];
986  int ConfigNum;
987  uinT32 ConfigWord;
988  int ProtoSetIndex;
989  uinT16 ProtoNum;
990  uinT8 ProtoWordNum;
991  PROTO_SET ProtoSet;
992  uinT16 ActualProtoNum;
993 
994  if (PrintMatchSummaryOn(Debug)) {
995  cprintf("Configuration Mask:\n");
996  for (ConfigNum = 0; ConfigNum < ClassTemplate->NumConfigs; ConfigNum++)
997  cprintf("%1d", (((*ConfigMask) >> ConfigNum) & 1));
998  cprintf("\n");
999 
1000  cprintf("Feature Error for Configurations:\n");
1001  for (ConfigNum = 0; ConfigNum < ClassTemplate->NumConfigs; ConfigNum++) {
1002  cprintf(
1003  " %5.1f",
1004  100.0 * (1.0 -
1005  (FLOAT32) tables.sum_feature_evidence_[ConfigNum]
1006  / NumFeatures / 256.0));
1007  }
1008  cprintf("\n\n\n");
1009  }
1010 
1011  if (PrintMatchSummaryOn (Debug)) {
1012  cprintf ("Proto Mask:\n");
1013  for (ProtoSetIndex = 0; ProtoSetIndex < ClassTemplate->NumProtoSets;
1014  ProtoSetIndex++) {
1015  ActualProtoNum = (ProtoSetIndex * PROTOS_PER_PROTO_SET);
1016  for (ProtoWordNum = 0; ProtoWordNum < 2;
1017  ProtoWordNum++, ProtoMask++) {
1018  ActualProtoNum = (ProtoSetIndex * PROTOS_PER_PROTO_SET);
1019  for (ProtoNum = 0;
1020  ((ProtoNum < (PROTOS_PER_PROTO_SET >> 1))
1021  && (ActualProtoNum < ClassTemplate->NumProtos));
1022  ProtoNum++, ActualProtoNum++)
1023  cprintf ("%1d", (((*ProtoMask) >> ProtoNum) & 1));
1024  cprintf ("\n");
1025  }
1026  }
1027  cprintf ("\n");
1028  }
1029 
1030  for (int i = 0; i < ClassTemplate->NumConfigs; i++)
1031  ProtoConfigs[i] = 0;
1032 
1033  if (PrintProtoMatchesOn (Debug)) {
1034  cprintf ("Proto Evidence:\n");
1035  for (ProtoSetIndex = 0; ProtoSetIndex < ClassTemplate->NumProtoSets;
1036  ProtoSetIndex++) {
1037  ProtoSet = ClassTemplate->ProtoSets[ProtoSetIndex];
1038  ActualProtoNum = (ProtoSetIndex * PROTOS_PER_PROTO_SET);
1039  for (ProtoNum = 0;
1040  ((ProtoNum < PROTOS_PER_PROTO_SET) &&
1041  (ActualProtoNum < ClassTemplate->NumProtos));
1042  ProtoNum++, ActualProtoNum++) {
1043  cprintf ("P %3d =", ActualProtoNum);
1044  int temp = 0;
1045  for (int j = 0; j < ClassTemplate->ProtoLengths[ActualProtoNum]; j++) {
1046  uinT8 data = tables.proto_evidence_[ActualProtoNum][j];
1047  cprintf(" %d", data);
1048  temp += data;
1049  }
1050 
1051  cprintf(" = %6.4f%%\n",
1052  temp / 256.0 / ClassTemplate->ProtoLengths[ActualProtoNum]);
1053 
1054  ConfigWord = ProtoSet->Protos[ProtoNum].Configs[0];
1055  ConfigNum = 0;
1056  while (ConfigWord) {
1057  cprintf ("%5d", ConfigWord & 1 ? temp : 0);
1058  if (ConfigWord & 1)
1059  ProtoConfigs[ConfigNum] += temp;
1060  ConfigNum++;
1061  ConfigWord >>= 1;
1062  }
1063  cprintf("\n");
1064  }
1065  }
1066  }
1067 
1068  if (PrintMatchSummaryOn (Debug)) {
1069  cprintf ("Proto Error for Configurations:\n");
1070  for (ConfigNum = 0; ConfigNum < ClassTemplate->NumConfigs; ConfigNum++)
1071  cprintf (" %5.1f",
1072  100.0 * (1.0 -
1073  ProtoConfigs[ConfigNum] /
1074  ClassTemplate->ConfigLengths[ConfigNum] / 256.0));
1075  cprintf ("\n\n");
1076  }
1077 
1078  if (PrintProtoMatchesOn (Debug)) {
1079  cprintf ("Proto Sum for Configurations:\n");
1080  for (ConfigNum = 0; ConfigNum < ClassTemplate->NumConfigs; ConfigNum++)
1081  cprintf (" %4.1f", ProtoConfigs[ConfigNum] / 256.0);
1082  cprintf ("\n\n");
1083 
1084  cprintf ("Proto Length for Configurations:\n");
1085  for (ConfigNum = 0; ConfigNum < ClassTemplate->NumConfigs; ConfigNum++)
1086  cprintf (" %4.1f",
1087  (float) ClassTemplate->ConfigLengths[ConfigNum]);
1088  cprintf ("\n\n");
1089  }
1090 
1091 }
1092 
1093 
1094 /*---------------------------------------------------------------------------*/
1095 void IntegerMatcher::DisplayProtoDebugInfo(
1096  INT_CLASS ClassTemplate,
1097  BIT_VECTOR ProtoMask,
1098  BIT_VECTOR ConfigMask,
1099  const ScratchEvidence& tables,
1100  bool SeparateDebugWindows) {
1101  uinT16 ProtoNum;
1102  uinT16 ActualProtoNum;
1103  PROTO_SET ProtoSet;
1104  int ProtoSetIndex;
1105 
1107  if (SeparateDebugWindows) {
1110  }
1111 
1112 
1113  for (ProtoSetIndex = 0; ProtoSetIndex < ClassTemplate->NumProtoSets;
1114  ProtoSetIndex++) {
1115  ProtoSet = ClassTemplate->ProtoSets[ProtoSetIndex];
1116  ActualProtoNum = ProtoSetIndex * PROTOS_PER_PROTO_SET;
1117  for (ProtoNum = 0;
1118  ((ProtoNum < PROTOS_PER_PROTO_SET) &&
1119  (ActualProtoNum < ClassTemplate->NumProtos));
1120  ProtoNum++, ActualProtoNum++) {
1121  /* Compute Average for Actual Proto */
1122  int temp = 0;
1123  for (int i = 0; i < ClassTemplate->ProtoLengths[ActualProtoNum]; i++)
1124  temp += tables.proto_evidence_[ActualProtoNum][i];
1125 
1126  temp /= ClassTemplate->ProtoLengths[ActualProtoNum];
1127 
1128  if ((ProtoSet->Protos[ProtoNum]).Configs[0] & (*ConfigMask)) {
1129  DisplayIntProto(ClassTemplate, ActualProtoNum, temp / 255.0);
1130  }
1131  }
1132  }
1133 }
1134 
1135 
1136 /*---------------------------------------------------------------------------*/
1137 void IntegerMatcher::DisplayFeatureDebugInfo(
1138  INT_CLASS ClassTemplate,
1139  BIT_VECTOR ProtoMask,
1140  BIT_VECTOR ConfigMask,
1141  inT16 NumFeatures,
1142  const INT_FEATURE_STRUCT* Features,
1143  int AdaptFeatureThreshold,
1144  int Debug,
1145  bool SeparateDebugWindows) {
1146  ScratchEvidence *tables = new ScratchEvidence();
1147 
1148  tables->Clear(ClassTemplate);
1149 
1151  if (SeparateDebugWindows) {
1154  }
1155 
1156  for (int Feature = 0; Feature < NumFeatures; Feature++) {
1157  UpdateTablesForFeature(
1158  ClassTemplate, ProtoMask, ConfigMask, Feature, &Features[Feature],
1159  tables, 0);
1160 
1161  /* Find Best Evidence for Current Feature */
1162  int best = 0;
1163  for (int i = 0; i < ClassTemplate->NumConfigs; i++)
1164  if (tables->feature_evidence_[i] > best)
1165  best = tables->feature_evidence_[i];
1166 
1167  /* Update display for current feature */
1168  if (ClipMatchEvidenceOn(Debug)) {
1169  if (best < AdaptFeatureThreshold)
1170  DisplayIntFeature(&Features[Feature], 0.0);
1171  else
1172  DisplayIntFeature(&Features[Feature], 1.0);
1173  } else {
1174  DisplayIntFeature(&Features[Feature], best / 255.0);
1175  }
1176  }
1177 
1178  delete tables;
1179 }
1180 #endif
1181 
1182 /*---------------------------------------------------------------------------*/
1183 // Add sum of Proto Evidences into Sum Of Feature Evidence Array
1185  INT_CLASS ClassTemplate, BIT_VECTOR ConfigMask, inT16 NumFeatures) {
1186 
1187  int *IntPointer;
1188  uinT32 ConfigWord;
1189  int ProtoSetIndex;
1190  uinT16 ProtoNum;
1191  PROTO_SET ProtoSet;
1192  int NumProtos;
1193  uinT16 ActualProtoNum;
1194 
1195  NumProtos = ClassTemplate->NumProtos;
1196 
1197  for (ProtoSetIndex = 0; ProtoSetIndex < ClassTemplate->NumProtoSets;
1198  ProtoSetIndex++) {
1199  ProtoSet = ClassTemplate->ProtoSets[ProtoSetIndex];
1200  ActualProtoNum = (ProtoSetIndex * PROTOS_PER_PROTO_SET);
1201  for (ProtoNum = 0;
1202  ((ProtoNum < PROTOS_PER_PROTO_SET) && (ActualProtoNum < NumProtos));
1203  ProtoNum++, ActualProtoNum++) {
1204  int temp = 0;
1205  for (int i = 0; i < ClassTemplate->ProtoLengths[ActualProtoNum]; i++)
1206  temp += proto_evidence_[ActualProtoNum] [i];
1207 
1208  ConfigWord = ProtoSet->Protos[ProtoNum].Configs[0];
1209  ConfigWord &= *ConfigMask;
1210  IntPointer = sum_feature_evidence_;
1211  while (ConfigWord) {
1212  if (ConfigWord & 1)
1213  *IntPointer += temp;
1214  IntPointer++;
1215  ConfigWord >>= 1;
1216  }
1217  }
1218  }
1219 }
1220 
1221 
1222 
1223 /*---------------------------------------------------------------------------*/
1224 // Normalize Sum of Proto and Feature Evidence by dividing by the sum of
1225 // the Feature Lengths and the Proto Lengths for each configuration.
1227  INT_CLASS ClassTemplate, inT16 NumFeatures, inT32 used_features) {
1228 
1229  for (int i = 0; i < ClassTemplate->NumConfigs; i++) {
1231  (NumFeatures + ClassTemplate->ConfigLengths[i]);
1232  }
1233 }
1234 
1235 
1236 /*---------------------------------------------------------------------------*/
1237 int IntegerMatcher::FindBestMatch(
1238  INT_CLASS ClassTemplate,
1239  const ScratchEvidence &tables,
1240  INT_RESULT Result) {
1241 /*
1242  ** Parameters:
1243  ** Globals:
1244  ** Operation:
1245  ** Find the best match for the current class and update the Result
1246  ** with the configuration and match rating.
1247  ** Return:
1248  ** The best normalized sum of evidences
1249  ** Exceptions: none
1250  ** History: Wed Feb 27 14:12:28 MST 1991, RWM, Created.
1251  */
1252  int BestMatch = 0;
1253  int Best2Match = 0;
1254  Result->Config = 0;
1255  Result->Config2 = 0;
1256 
1257  /* Find best match */
1258  for (int ConfigNum = 0; ConfigNum < ClassTemplate->NumConfigs; ConfigNum++) {
1259  int rating = tables.sum_feature_evidence_[ConfigNum];
1260  if (*classify_debug_level_ > 2)
1261  cprintf("Config %d, rating=%d\n", ConfigNum, rating);
1262  if (rating > BestMatch) {
1263  if (BestMatch > 0) {
1264  Result->Config2 = Result->Config;
1265  Best2Match = BestMatch;
1266  } else {
1267  Result->Config2 = ConfigNum;
1268  }
1269  Result->Config = ConfigNum;
1270  BestMatch = rating;
1271  } else if (rating > Best2Match) {
1272  Result->Config2 = ConfigNum;
1273  Best2Match = rating;
1274  }
1275  }
1276 
1277  /* Compute Certainty Rating */
1278  Result->Rating = (65536.0 - BestMatch) / 65536.0;
1279 
1280  return BestMatch;
1281 }
1282 
1283 // Applies the CN normalization factor to the given rating and returns
1284 // the modified rating.
1285 float IntegerMatcher::ApplyCNCorrection(float rating, int blob_length,
1286  int normalization_factor) {
1287  return (rating * blob_length +
1288  local_matcher_multiplier_ * normalization_factor / 256.0) /
1289  (blob_length + local_matcher_multiplier_);
1290 }
1291 
1292 /*---------------------------------------------------------------------------*/
1293 #ifndef GRAPHICS_DISABLED
1294 // Print debug information about the best match for the current class.
1295 void IntegerMatcher::DebugBestMatch(
1296  int BestMatch, INT_RESULT Result) {
1297  tprintf("Rating = %5.1f%% Best Config = %3d, Distance = %5.1f\n",
1298  100.0 * Result->Rating, Result->Config,
1299  100.0 * (65536.0 - BestMatch) / 65536.0);
1300 }
1301 #endif
1302 
1303 /*---------------------------------------------------------------------------*/
1304 void
1305 HeapSort (int n, register int ra[], register int rb[]) {
1306 /*
1307  ** Parameters:
1308  ** n Number of elements to sort
1309  ** ra Key array [1..n]
1310  ** rb Index array [1..n]
1311  ** Globals:
1312  ** Operation:
1313  ** Sort Key array in ascending order using heap sort
1314  ** algorithm. Also sort Index array that is tied to
1315  ** the key array.
1316  ** Return:
1317  ** Exceptions: none
1318  ** History: Tue Feb 19 10:24:24 MST 1991, RWM, Created.
1319  */
1320  register int i, rra, rrb;
1321  int l, j, ir;
1322 
1323  l = (n >> 1) + 1;
1324  ir = n;
1325  for (;;) {
1326  if (l > 1) {
1327  rra = ra[--l];
1328  rrb = rb[l];
1329  }
1330  else {
1331  rra = ra[ir];
1332  rrb = rb[ir];
1333  ra[ir] = ra[1];
1334  rb[ir] = rb[1];
1335  if (--ir == 1) {
1336  ra[1] = rra;
1337  rb[1] = rrb;
1338  return;
1339  }
1340  }
1341  i = l;
1342  j = l << 1;
1343  while (j <= ir) {
1344  if (j < ir && ra[j] < ra[j + 1])
1345  ++j;
1346  if (rra < ra[j]) {
1347  ra[i] = ra[j];
1348  rb[i] = rb[j];
1349  j += (i = j);
1350  }
1351  else
1352  j = ir + 1;
1353  }
1354  ra[i] = rra;
1355  rb[i] = rrb;
1356  }
1357 }