{
int charsetsize = it->shape_table()->unicharset().size();
int shapesize = it->CompactCharsetSize();
int fontsize = it->sample_set()->NumFonts();
ErrorCounter counter(charsetsize, shapesize, fontsize);
clock_t start = clock();
int total_samples = 0;
double unscaled_error = 0.0;
int error_samples = report_level > 3 ? report_level * report_level : 0;
for (it->Begin(); !it->AtEnd(); it->Next()) {
TrainingSample* mutable_sample = it->MutableSample();
int page_index = mutable_sample->page_num();
Pix* page_pix = 0 <= page_index && page_index < page_images.
size()
? page_images[page_index] :
NULL;
classifier->ClassifySample(*mutable_sample, page_pix, 0, INVALID_UNICHAR_ID,
&results);
if (mutable_sample->class_id() == 0) {
counter.AccumulateJunk(*it->shape_table(), results, mutable_sample);
} else if (counter.AccumulateErrors(report_level > 3, boosting_mode,
fontinfo_table, *it->shape_table(),
results, mutable_sample) &&
error_samples > 0) {
tprintf(
"Error on sample %d: Classifier debug output:\n",
it->GlobalSampleIndex());
int keep_this = it->GetSparseClassID();
classifier->ClassifySample(*mutable_sample, page_pix, 1, keep_this,
&results);
--error_samples;
}
++total_samples;
}
double total_time = 1.0 * (clock() - start) / CLOCKS_PER_SEC;
unscaled_error = counter.ReportErrors(report_level, boosting_mode,
fontinfo_table,
*it, unichar_error, fonts_report);
if (scaled_error !=
NULL) *scaled_error = counter.scaled_error_;
if (report_level > 1) {
tprintf(
"Errors computed in %.2fs at %.1f μs/char\n",
total_time, 1000000.0 * total_time / total_samples);
}
return unscaled_error;
}