[classification] correcting cost functions in SGD and FTRL for use in parameter sweeps

This commit is contained in:
Al
2017-04-05 14:08:51 -04:00
parent 6219cc6378
commit caebf4e2c9
7 changed files with 169 additions and 87 deletions

View File

@@ -38,6 +38,7 @@ bool sgd_trainer_reset_params(sgd_trainer_t *self, double lambda, double gamma_0
bool stochastic_gradient_descent_update(sgd_trainer_t *self, double_matrix_t *gradient, size_t batch_size);
bool stochastic_gradient_descent_update_sparse(sgd_trainer_t *self, double_matrix_t *gradient, uint32_array *update_indices, size_t batch_size);
double stochastic_gradient_descent_reg_cost(sgd_trainer_t *self, uint32_array *indices, size_t batch_size);
bool stochastic_gradient_descent_set_regularized_weights(sgd_trainer_t *self, double_matrix_t *w, uint32_array *indices);
bool stochastic_gradient_descent_regularize_weights(sgd_trainer_t *self);
double_matrix_t *stochastic_gradient_descent_get_weights(sgd_trainer_t *self);
sparse_matrix_t *stochastic_gradient_descent_get_weights_sparse(sgd_trainer_t *self);