Fix log_* formats which expect size_t but receive uint32_t.

This commit is contained in:
Iestyn Pryce
2017-05-19 22:31:56 +01:00
parent 87a76bf967
commit ecd07b18c1
8 changed files with 17 additions and 16 deletions

View File

@@ -15,7 +15,7 @@ address_dictionary_t *get_address_dictionary(void) {
address_expansion_value_t *address_dictionary_get_expansions(uint32_t i) { address_expansion_value_t *address_dictionary_get_expansions(uint32_t i) {
if (address_dict == NULL || address_dict->values == NULL || i > address_dict->values->n) { if (address_dict == NULL || address_dict->values == NULL || i > address_dict->values->n) {
log_error("i=%zu, address_dict->values->n=%zu\n", i, address_dict->values->n); log_error("i=%" PRIu32 ", address_dict->values->n=%zu\n", i, address_dict->values->n);
log_error(ADDRESS_DICTIONARY_SETUP_ERROR); log_error(ADDRESS_DICTIONARY_SETUP_ERROR);
return NULL; return NULL;
} }

View File

@@ -243,7 +243,7 @@ sparse_matrix_t *ftrl_weights_finalize_sparse(ftrl_trainer_t *self) {
double lambda2 = self->lambda2; double lambda2 = self->lambda2;
sparse_matrix_t *weights = sparse_matrix_new(); sparse_matrix_t *weights = sparse_matrix_new();
log_info("weights->m = %zu\n", weights->m); log_info("weights->m = %" PRIu32 "\n", weights->m);
size_t i_start = 0; size_t i_start = 0;
@@ -259,7 +259,7 @@ sparse_matrix_t *ftrl_weights_finalize_sparse(ftrl_trainer_t *self) {
sparse_matrix_finalize_row(weights); sparse_matrix_finalize_row(weights);
i_start = 1; i_start = 1;
} }
log_info("after intercept weights->m = %zu\n", weights->m); log_info("after intercept weights->m = %" PRIu32 "\n", weights->m);
for (size_t i = i_start; i < m; i++) { for (size_t i = i_start; i < m; i++) {
double *row = double_matrix_get_row(self->z, (size_t)i); double *row = double_matrix_get_row(self->z, (size_t)i);
@@ -275,7 +275,7 @@ sparse_matrix_t *ftrl_weights_finalize_sparse(ftrl_trainer_t *self) {
sparse_matrix_finalize_row(weights); sparse_matrix_finalize_row(weights);
if (i % 1000 == 0 && i > 0) { if (i % 1000 == 0 && i > 0) {
log_info("adding rows, weights->m = %zu\n", weights->m); log_info("adding rows, weights->m = %" PRIu32 "\n", weights->m);
} }
} }

View File

@@ -53,7 +53,7 @@ double test_accuracy(char *filename) {
} }
log_info("total=%zu\n", total); log_info("total=%" PRIu32 "\n", total);
trie_destroy(label_ids); trie_destroy(label_ids);

View File

@@ -599,13 +599,13 @@ static language_classifier_t *trainer_finalize(logistic_regression_trainer_t *tr
sparse_matrix_t *sparse_weights = logistic_regression_trainer_final_weights_sparse(trainer); sparse_matrix_t *sparse_weights = logistic_regression_trainer_final_weights_sparse(trainer);
classifier->weights_type = MATRIX_SPARSE; classifier->weights_type = MATRIX_SPARSE;
classifier->weights.sparse = sparse_weights; classifier->weights.sparse = sparse_weights;
log_info("Weights sparse: %zu rows (m=%u), %zu cols, %zu elements\n", sparse_weights->indptr->n, sparse_weights->m, sparse_weights->n, sparse_weights->data->n); log_info("Weights sparse: %zu rows (m=%u), %" PRIu32 " cols, %zu elements\n", sparse_weights->indptr->n, sparse_weights->m, sparse_weights->n, sparse_weights->data->n);
} }
} else if (trainer->optimizer_type == LOGISTIC_REGRESSION_OPTIMIZER_FTRL) { } else if (trainer->optimizer_type == LOGISTIC_REGRESSION_OPTIMIZER_FTRL) {
sparse_matrix_t *sparse_weights = logistic_regression_trainer_final_weights_sparse(trainer); sparse_matrix_t *sparse_weights = logistic_regression_trainer_final_weights_sparse(trainer);
classifier->weights_type = MATRIX_SPARSE; classifier->weights_type = MATRIX_SPARSE;
classifier->weights.sparse = sparse_weights; classifier->weights.sparse = sparse_weights;
log_info("Weights sparse: %zu rows (m=%u), %zu cols, %zu elements\n", sparse_weights->indptr->n, sparse_weights->m, sparse_weights->n, sparse_weights->data->n); log_info("Weights sparse: %zu rows (m=%u), %" PRIu32 " cols, %zu elements\n", sparse_weights->indptr->n, sparse_weights->m, sparse_weights->n, sparse_weights->data->n);
} }

View File

@@ -665,7 +665,7 @@ static bool add_affix_expansions(string_tree_t *tree, char *str, char *lang, tok
} }
} else if (have_suffix) { } else if (have_suffix) {
log_debug("suffix.start=%zu\n", suffix.start); log_debug("suffix.start=%" PRId32 "\n", suffix.start);
root_len = suffix.start; root_len = suffix.start;
root_token = (token_t){token.offset, root_len, token.type}; root_token = (token_t){token.offset, root_len, token.type};
log_debug("root_len=%zu\n", root_len); log_debug("root_len=%zu\n", root_len);
@@ -887,7 +887,7 @@ static void expand_alternative(cstring_array *strings, khash_t(str_set) *unique_
log_debug("Adding alternatives for single normalization\n"); log_debug("Adding alternatives for single normalization\n");
alternatives = add_string_alternatives(tokenized_str, options); alternatives = add_string_alternatives(tokenized_str, options);
log_debug("num strings = %zu\n", string_tree_num_strings(alternatives)); log_debug("num strings = %" PRIu32 "\n", string_tree_num_strings(alternatives));
if (alternatives == NULL) { if (alternatives == NULL) {
log_debug("alternatives = NULL\n"); log_debug("alternatives = NULL\n");

View File

@@ -3,6 +3,7 @@
#include <stdio.h> #include <stdio.h>
#include <errno.h> #include <errno.h>
#include <inttypes.h>
#include <string.h> #include <string.h>
#define LOG_LEVEL_DEBUG 10 #define LOG_LEVEL_DEBUG 10

View File

@@ -13,7 +13,7 @@ bool logistic_regression_model_expectation_sparse(sparse_matrix_t *theta, sparse
} }
if (sparse_matrix_dot_sparse(x, theta, p_y) != 0) { if (sparse_matrix_dot_sparse(x, theta, p_y) != 0) {
log_error("x->m = %zu, x->n = %zu, theta->m = %zu, theta->n = %zu, p_y->m = %zu, p_y->n = %zu\n", x->m, x->n, theta->m, theta->n, p_y->m, p_y->n); log_error("x->m = %" PRIu32 ", x->n = %" PRIu32 ", theta->m = %" PRIu32 ", theta->n = %" PRIu32 ", p_y->m = %zu, p_y->n = %zu\n", x->m, x->n, theta->m, theta->n, p_y->m, p_y->n);
return false; return false;
} }
@@ -31,7 +31,7 @@ bool logistic_regression_model_expectation(double_matrix_t *theta, sparse_matrix
} }
if (sparse_matrix_dot_dense(x, theta, p_y) != 0) { if (sparse_matrix_dot_dense(x, theta, p_y) != 0) {
log_error("x->m = %zu, x->n = %zu, theta->m = %zu, theta->n = %zu, p_y->m = %zu, p_y->n = %zu\n", x->m, x->n, theta->m, theta->n, p_y->m, p_y->n); log_error("x->m = %" PRIu32 ", x->n = %" PRIu32 ", theta->m = %" PRIu32 ", theta->n = %" PRIu32 ", p_y->m = %zu, p_y->n = %zu\n", x->m, x->n, theta->m, theta->n, p_y->m, p_y->n);
return false; return false;
} }

View File

@@ -203,7 +203,7 @@ bool stochastic_gradient_descent_update_sparse(sgd_trainer_t *self, double_matri
lambda_update = lambda / (double)batch_size * gamma_t; lambda_update = lambda / (double)batch_size * gamma_t;
if (t > self->penalties->n) { if (t > self->penalties->n) {
log_info("t = %zu, penalties->n = %zu\n", t, self->penalties->n); log_info("t = %" PRIu32 ", penalties->n = %zu\n", t, self->penalties->n);
return false; return false;
} }
penalty = self->penalties->a[t]; penalty = self->penalties->a[t];
@@ -219,7 +219,7 @@ bool stochastic_gradient_descent_update_sparse(sgd_trainer_t *self, double_matri
if (self->iterations > 0) { if (self->iterations > 0) {
if (last_updated >= self->penalties->n) { if (last_updated >= self->penalties->n) {
log_info("col = %u, t = %zu, last_updated = %zu, penalties->n = %zu\n", col, t, last_updated, self->penalties->n); log_info("col = %u, t = %" PRIu32 ", last_updated = %" PRIu32 ", penalties->n = %zu\n", col, t, last_updated, self->penalties->n);
return false; return false;
} }
@@ -376,7 +376,7 @@ bool stochastic_gradient_descent_set_regularized_weights(sgd_trainer_t *self, do
uint32_t last_updated = updates[i]; uint32_t last_updated = updates[i];
if (last_updated >= self->penalties->n) { if (last_updated >= self->penalties->n) {
log_error("last_updated (%zu) >= self->penalties-> (%zu)\n", last_updated, self->penalties->n); log_error("last_updated (%" PRIu32 ") >= self->penalties-> (%zu)\n", last_updated, self->penalties->n);
return false; return false;
} }
double last_update_penalty = penalties[last_updated]; double last_update_penalty = penalties[last_updated];