Better handling of printing results board

This commit is contained in:
saundersp
2024-03-21 00:50:13 +01:00
parent f65c58d95c
commit 211dcad893
8 changed files with 307 additions and 188 deletions

View File

@ -27,10 +27,11 @@ std::tuple<np::Array<int32_t>, np::Array<uint16_t>, np::Array<uint8_t>, np::Arra
for (const char* const folder_name : { "models", "out" })
fs::create_directory(folder_name);
printf("| %-49s | %-18s | %-29s |\n", "Preprocessing", "Time spent (ns)", "Formatted time spent");
printf("|%s|%s|%s|\n", S(51), S(20), S(31));
const std::chrono::system_clock::time_point preproc_timestamp = perf_counter_ns();
const std::array<int32_t, 3> preproc_gaps = { 49, -18, 29 };
header({ "Preprocessing", "Time spent (ns)", "Formatted time spent" }, preproc_gaps);
const auto [ X_train, y_train, X_test, y_test ] = state_saver<uint8_t, 4>("Loading sets", {"X_train", "y_train", "X_test", "y_test"},
const auto [ X_train, y_train, X_test, y_test ] = state_saver<uint8_t, 4>("Loading sets", preproc_gaps[0], {"X_train", "y_train", "X_test", "y_test"},
FORCE_REDO, SAVE_STATE, OUT_DIR, load_datasets);
#if __DEBUG
@ -48,7 +49,7 @@ std::tuple<np::Array<int32_t>, np::Array<uint16_t>, np::Array<uint8_t>, np::Arra
print(y_test, { IDX_INSPECT, IDX_INSPECT + IDX_INSPECT_OFFSET });
#endif
const np::Array<uint8_t> feats = state_saver<uint8_t>("Building features", "feats",
const np::Array<uint8_t> feats = state_saver<uint8_t>("Building features", preproc_gaps[0], "feats",
FORCE_REDO, SAVE_STATE, OUT_DIR, build_features, X_train.shape[1], X_train.shape[2]);
#if __DEBUG
@ -57,9 +58,9 @@ std::tuple<np::Array<int32_t>, np::Array<uint16_t>, np::Array<uint8_t>, np::Arra
print_feat(feats, { IDX_INSPECT });
#endif
const np::Array<uint32_t> X_train_ii = state_saver<uint32_t>("Converting training set to integral images (" LABEL ")", "X_train_ii_" LABEL,
const np::Array<uint32_t> X_train_ii = state_saver<uint32_t>("Converting training set to integral images (" LABEL ")", preproc_gaps[0], "X_train_ii_" LABEL,
FORCE_REDO, SAVE_STATE, OUT_DIR, set_integral_image, X_train);
const np::Array<uint32_t> X_test_ii = state_saver<uint32_t>("Converting testing set to integral images (" LABEL ")", "X_test_ii_" LABEL,
const np::Array<uint32_t> X_test_ii = state_saver<uint32_t>("Converting testing set to integral images (" LABEL ")", preproc_gaps[0], "X_test_ii_" LABEL,
FORCE_REDO, SAVE_STATE, OUT_DIR, set_integral_image, X_test);
#if __DEBUG
@ -71,9 +72,9 @@ std::tuple<np::Array<int32_t>, np::Array<uint16_t>, np::Array<uint8_t>, np::Arra
print(X_test_ii, { IDX_INSPECT });
#endif
const np::Array<int32_t> X_train_feat = state_saver<int32_t>("Applying features to training set (" LABEL ")", "X_train_feat_" LABEL,
const np::Array<int32_t> X_train_feat = state_saver<int32_t>("Applying features to training set (" LABEL ")", preproc_gaps[0], "X_train_feat_" LABEL,
FORCE_REDO, SAVE_STATE, OUT_DIR, apply_features, feats, X_train_ii);
const np::Array<int32_t> X_test_feat = state_saver<int32_t>("Applying features to testing set (" LABEL ")", "X_test_feat_" LABEL,
const np::Array<int32_t> X_test_feat = state_saver<int32_t>("Applying features to testing set (" LABEL ")", preproc_gaps[0], "X_test_feat_" LABEL,
FORCE_REDO, SAVE_STATE, OUT_DIR, apply_features, feats, X_test_ii);
#if __DEBUG
@ -92,7 +93,7 @@ std::tuple<np::Array<int32_t>, np::Array<uint16_t>, np::Array<uint8_t>, np::Arra
// print_feature(indices);
#endif
const np::Array<uint16_t> X_train_feat_argsort = state_saver<uint16_t>("Precalculating training set argsort (" LABEL ")", "X_train_feat_argsort_" LABEL,
const np::Array<uint16_t> X_train_feat_argsort = state_saver<uint16_t>("Precalculating training set argsort (" LABEL ")", preproc_gaps[0], "X_train_feat_argsort_" LABEL,
FORCE_REDO, SAVE_STATE, OUT_DIR, argsort_2d, X_train_feat);
#if __DEBUG
@ -101,35 +102,38 @@ std::tuple<np::Array<int32_t>, np::Array<uint16_t>, np::Array<uint8_t>, np::Arra
print(X_train_feat_argsort, { IDX_INSPECT, IDX_INSPECT + IDX_INSPECT_OFFSET });
#endif
const np::Array<uint16_t> X_test_feat_argsort = state_saver<uint16_t>("Precalculating testing set argsort (" LABEL ")", "X_test_feat_argsort_" LABEL,
FORCE_REDO, SAVE_STATE, OUT_DIR, argsort_2d, X_test_feat);
// const np::Array<uint16_t> X_test_feat_argsort = state_saver<uint16_t>("Precalculating testing set argsort (" LABEL ")", preproc_gaps[0], "X_test_feat_argsort_" LABEL,
// FORCE_REDO, SAVE_STATE, OUT_DIR, argsort_2d, X_test_feat);
#if __DEBUG
print("X_test_feat_argsort");
print(X_test_feat_argsort.shape);
print(X_test_feat_argsort, { IDX_INSPECT, IDX_INSPECT + IDX_INSPECT_OFFSET });
// printf("X_test_feat_argsort\n");
// print(X_test_feat_argsort.shape);
// print(X_test_feat_argsort, { IDX_INSPECT, IDX_INSPECT + IDX_INSPECT_OFFSET });
#endif
const long long time_spent = duration_ns(perf_counter_ns() - preproc_timestamp);
formatted_line(preproc_gaps, "", "", "", "");
formatted_row(preproc_gaps, { "Preprocessing summary", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str() });
footer(preproc_gaps);
return { X_train_feat, X_train_feat_argsort, y_train, X_test_feat, y_test };
}
void train(const np::Array<int32_t>& X_train_feat, const np::Array<uint16_t>& X_train_feat_argsort, const np::Array<uint8_t>& y_train) {
printf("\n| %-49s | %-18s | %-29s |\n", "Training", "Time spent (ns)", "Formatted time spent");
printf("|%s|%s|%s|\n", S(51), S(20), S(31));
std::array<std::array<np::Array<float64_t>, 2>, TS.size()> train(const np::Array<int32_t>& X_train_feat, const np::Array<uint16_t>& X_train_feat_argsort, const np::Array<uint8_t>& y_train) noexcept {
const std::chrono::system_clock::time_point training_timestamp = perf_counter_ns();
const std::array<int32_t, 3> training_gaps = { 26, -18, 29 };
header({ "Training", "Time spent (ns)", "Formatted time spent" }, training_gaps);
std::array<std::array<np::Array<float64_t>, 2>, TS.size()> models;
size_t i = 0;
for (const size_t T : TS) {
char title[BUFFER_SIZE] = { 0 };
char alphas_title[BUFFER_SIZE] = { 0 };
char final_classifiers_title[BUFFER_SIZE] = { 0 };
sprintf(title, "ViolaJones T = %-4lu (%s)", T, LABEL);
sprintf(alphas_title, "alphas_%lu_%s", T, LABEL);
sprintf(final_classifiers_title, "final_classifiers_%lu_%s", T, LABEL);
snprintf(title, BUFFER_SIZE, "ViolaJones T = %-4lu (%s)", T, LABEL);
snprintf(alphas_title, BUFFER_SIZE, "alphas_%lu_%s", T, LABEL);
snprintf(final_classifiers_title, BUFFER_SIZE, "final_classifiers_%lu_%s", T, LABEL);
#if __DEBUG
const auto [ alphas, final_classifiers ] = state_saver<float64_t, 2>(title, { alphas_title, final_classifiers_title },
#else
state_saver<float64_t, 2>(title, { alphas_title, final_classifiers_title },
#endif
const auto [ alphas, final_classifiers ] = state_saver<float64_t, 2>(title, training_gaps[0], { alphas_title, final_classifiers_title },
FORCE_REDO, SAVE_STATE, MODEL_DIR, train_viola_jones, T, X_train_feat, X_train_feat_argsort, y_train);
#if __DEBUG
print("alphas");
@ -137,31 +141,32 @@ void train(const np::Array<int32_t>& X_train_feat, const np::Array<uint16_t>& X_
print("final_classifiers");
print(final_classifiers);
#endif
models[i++] = { alphas, final_classifiers };
}
const long long time_spent = duration_ns(perf_counter_ns() - training_timestamp);
formatted_line(training_gaps, "", "", "", "");
formatted_row(training_gaps, { "Training summary", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str() });
footer(training_gaps);
return models;
}
void testing_and_evaluating(const np::Array<int32_t>& X_train_feat, const np::Array<uint8_t>& y_train, const np::Array<int32_t>& X_test_feat, const np::Array<uint8_t>& y_test) {
printf("\n| %-26s | Time spent (ns) (E) | %-29s | Time spent (ns) (T) | %-29s |\n", "Testing", "Formatted time spent (E)", "Formatted time spent (T)");
printf("|%s|%s|%s|%s|%s|\n", S(28), S(21), S(31), S(21), S(31));
constexpr const size_t NT = sizeof(TS) / sizeof(size_t);
std::array<std::array<float64_t, 8>, NT> results;
void testing_and_evaluating(const std::array<std::array<np::Array<float64_t>, 2>, TS.size()>& models, const np::Array<int32_t>& X_train_feat, const np::Array<uint8_t>& y_train, const np::Array<int32_t>& X_test_feat, const np::Array<uint8_t>& y_test) {
const std::array<int32_t, 5> testing_gaps = { 26, -19, 24, -19, 24 };
header({ "Testing", "Time spent (ns) (E)", "Formatted time spent (E)", "Time spent (ns) (T)", "Formatted time spent (T)" }, testing_gaps);
std::array<std::array<float64_t, 8>, TS.size()> results;
size_t i = 0;
for (const size_t T : TS) {
long long total_train_timestamp = 0;
long long total_test_timestamp = 0;
for (const auto& [ alphas, final_classifiers ] : models) {
char title[BUFFER_SIZE] = { 0 };
char alphas_title[BUFFER_SIZE] = { 0 };
char final_classifiers_title[BUFFER_SIZE] = { 0 };
sprintf(title, "ViolaJones T = %-4lu (%s)", T, LABEL);
sprintf(alphas_title, MODEL_DIR "/alphas_%lu_%s.bin", T, LABEL);
sprintf(final_classifiers_title, MODEL_DIR "/final_classifiers_%lu_%s.bin", T, LABEL);
const np::Array<float64_t> alphas = load<float64_t>(alphas_title);
const np::Array<float64_t> final_classifiers = load<float64_t>(final_classifiers_title);
snprintf(title, BUFFER_SIZE, "ViolaJones T = %-4i (%s)", TS[i], LABEL);
std::chrono::system_clock::time_point start = perf_counter_ns();
const np::Array<uint8_t> y_pred_train = classify_viola_jones(alphas, final_classifiers, X_train_feat);
const long long t_pred_train = duration_ns(perf_counter_ns() - start);
total_train_timestamp += t_pred_train;
const float64_t e_acc = accuracy_score(y_train, y_pred_train);
const float64_t e_f1 = f1_score(y_train, y_pred_train);
float64_t e_FN, e_FP;
@ -170,48 +175,53 @@ void testing_and_evaluating(const np::Array<int32_t>& X_train_feat, const np::Ar
start = perf_counter_ns();
const np::Array<uint8_t> y_pred_test = classify_viola_jones(alphas, final_classifiers, X_test_feat);
const long long t_pred_test = duration_ns(perf_counter_ns() - start);
total_test_timestamp += t_pred_test;
const float64_t t_acc = accuracy_score(y_test, y_pred_test);
const float64_t t_f1 = f1_score(y_test, y_pred_test);
float64_t t_FN, t_FP;
std::tie(std::ignore, t_FN, t_FP, std::ignore) = confusion_matrix(y_test, y_pred_test);
results[i++] = { e_acc, e_f1, e_FN, e_FP, t_acc, t_f1, t_FN, t_FP };
printf("| %-26s | %'19lld | %-29s | %'19lld | %-29s |\n", title, t_pred_train, format_time_ns(t_pred_train).c_str(), t_pred_test, format_time_ns(t_pred_test).c_str());
formatted_row(testing_gaps, { title, thousand_sep(t_pred_train).c_str(), format_time_ns(t_pred_train).c_str(), thousand_sep(t_pred_test).c_str(), format_time_ns(t_pred_test).c_str() });
}
formatted_line(testing_gaps, "", "", "", "");
formatted_row(testing_gaps, { "Testing summary", thousand_sep(total_train_timestamp).c_str(), format_time_ns(total_train_timestamp).c_str(), thousand_sep(total_test_timestamp).c_str(), format_time_ns(total_test_timestamp).c_str() });
footer(testing_gaps);
printf("\n| %-19s | ACC (E) | F1 (E) | FN (E) | FP (E) | ACC (T) | F1 (T) | FN (T) | FP (T) |\n", "Evaluating");
printf("|%s|%s|%s|%s|%s|%s|%s|%s|%s|\n", S(21), S(9), S(8), S(8), S(8), S(9), S(8), S(8), S(8));
const std::array<int32_t, 9> evaluating_gaps = { 19, -7, -6, -6, -6, -7, -6, -6, -6 };
header({ "Evaluating", "ACC (E)", "F1 (E)", "FN (E)", "FP (E)", "ACC (T)", "F1 (T)", "FN (T)", "FP (T)"}, evaluating_gaps);
i = 0;
for (const size_t T : TS) {
char title[BUFFER_SIZE] = { 0 };
sprintf(title, "ViolaJones T = %-4lu", T);
snprintf(title, BUFFER_SIZE, "ViolaJones T = %-4lu", T);
const auto [e_acc, e_f1, e_FN, e_FP, t_acc, t_f1, t_FN, t_FP] = results[i++];
printf("| %-19s | %'6.2f%% | %'6.2f | %'6.0f | %'6.0f | %6.2f%% | %'6.2f | %'6.0f | %'6.0f |\n", title, e_acc * 100, e_f1, e_FN, e_FP, t_acc * 100, t_f1, t_FN, t_FP);
printf(" %-19s %'6.2f%% %'6.2f %'6.0f %'6.0f %6.2f%% %'6.2f %'6.0f %'6.0f \n", title, e_acc * 100, e_f1, e_FN, e_FP, t_acc * 100, t_f1, t_FN, t_FP);
}
footer(evaluating_gaps);
}
void unit_test(void) {
printf("\n| %-37s | %-10s | %-18s | %-29s |\n", "Unit testing", "Test state", "Time spent (ns)", "Formatted time spent");
printf("|%s|%s|%s|%s|\n", S(39), S(12), S(20), S(31));
const std::chrono::system_clock::time_point unit_timestamp = perf_counter_ns();
const std::array<int32_t, 4> unit_gaps = { 37, -10, -18, 29};
header({ "Unit testing", "Test state", "Time spent (ns)", "Formatted time spent" }, unit_gaps);
char title[BUFFER_SIZE] = { 0 };
char tmp_title[BUFFER_SIZE / 2] = { 0 };
char file_cpu[BUFFER_SIZE] = { 0 };
char file_gpu[BUFFER_SIZE] = { 0 };
const std::chrono::system_clock::time_point fnc_s = perf_counter_ns();
uint64_t n_total = 0, n_success = 0;
auto test_fnc = [&n_total, &n_success](const char* title, const auto& fnc) {
const auto test_fnc = [&unit_gaps, &n_total, &n_success](const char* const title, const auto& fnc) noexcept {
++n_total;
const std::chrono::system_clock::time_point start = perf_counter_ns();
const bool state = fnc();
const long long time_spent = duration_ns(perf_counter_ns() - start);
if(state){
printf("| %-37s | %10s | %18s | %-29s |\n", title, "Passed", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str());
formatted_row(unit_gaps, { title, "Passed", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str() });
++n_success;
} else
printf("| %-37s | %10s | %18s | %-29s |\n", title, "Failed", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str());
formatted_row(unit_gaps, { title, "Failed", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str() });
};
for (const char* label : { "train", "test" }) {
@ -275,32 +285,38 @@ void unit_test(void) {
}
}
const long long time_spent = duration_ns(perf_counter_ns() - fnc_s);
sprintf(title, "%ld/%ld", n_success, n_total);
const long long time_spent = duration_ns(perf_counter_ns() - unit_timestamp);
snprintf(title, BUFFER_SIZE, "%ld/%ld", n_success, n_total);
printf("|%s|%s|%s|%s|\n", S(39), S(12), S(20), S(31));
printf("| %-37s | %10s | %18s | %-29s |\n", "Unit testing summary", title, thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str());
formatted_line(unit_gaps, "", "", "", "");
formatted_row(unit_gaps, { "Unit testing summary", title, thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str() });
footer(unit_gaps);
}
int main(){
setlocale(LC_NUMERIC, ""); // Allow proper number display
printf("| %-49s | %-18s | %-29s |\n", "Unit testing", "Time spent (ns)", "Formatted time spent");
printf("|%s|%s|%s|\n", S(51), S(20), S(31));
const std::chrono::system_clock::time_point unit_timestamp = perf_counter_ns();
const std::array<int32_t, 3> unit_gaps = { 27, -18, 29 };
header({ "Unit testing", "Time spent (ns)", "Formatted time spent" }, unit_gaps);
#if GPU_BOOSTED
benchmark_function_void("Testing GPU capabilities 1D", test_working, 3 + (1<<29));
benchmark_function_void("Testing GPU capabilities 2D", test_working_2d, 3 + (1<<15), 2 + (1<<14));
benchmark_function_void("Testing GPU capabilities 3D", test_working_3d, 9 + (1<<10), 5 + (1<<10), 7 + (1<<9));
benchmark_function_void("Testing GPU capabilities 1D", unit_gaps[0], test_working, 50000);
benchmark_function_void("Testing GPU capabilities 2D", unit_gaps[0], test_working_2d, 200, 500);
benchmark_function_void("Testing GPU capabilities 3D", unit_gaps[0], test_working_3d, 30, 40, 500);
#endif
benchmark_function_void("Testing format_time", format_time_test);
benchmark_function_void("Testing format_time_ns", format_time_ns_test);
benchmark_function_void("Testing format_byte_size", format_byte_size_test);
benchmark_function_void("Testing thousand_sep", thousand_sep_test);
printf("\n");
benchmark_function_void("Testing format_time", unit_gaps[0], format_time_test);
benchmark_function_void("Testing format_time_ns", unit_gaps[0], format_time_ns_test);
benchmark_function_void("Testing format_byte_size", unit_gaps[0], format_byte_size_test);
benchmark_function_void("Testing thousand_sep", unit_gaps[0], thousand_sep_test);
const long long time_spent = duration_ns(perf_counter_ns() - unit_timestamp);
formatted_line(unit_gaps, "", "", "", "");
formatted_row(unit_gaps, { "Unit testing summary", thousand_sep(time_spent).c_str(), format_time_ns(time_spent).c_str() });
footer(unit_gaps);
const auto [ X_train_feat, X_train_feat_argsort, y_train, X_test_feat, y_test ] = preprocessing();
train(X_train_feat, X_train_feat_argsort, y_train);
testing_and_evaluating(X_train_feat, y_train, X_test_feat, y_test);
const std::array<std::array<np::Array<float64_t>, 2>, TS.size()> models = train(X_train_feat, X_train_feat_argsort, y_train);
testing_and_evaluating(models, X_train_feat, y_train, X_test_feat, y_test);
unit_test();
return EXIT_SUCCESS;
}