Skip to content
This repository was archived by the owner on Jan 11, 2026. It is now read-only.

Commit e3ad27e

Browse files
logregress: add loss as model member and add model print function
1 parent bda3aea commit e3ad27e

2 files changed

Lines changed: 19 additions & 7 deletions

File tree

include/ml/logisticregress.h

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ along with this program. If not, see <https://www.gnu.org/licenses/>.
2727

2828
typedef struct {
2929
double bias;
30+
double loss;
3031
gsl_vector *weights;
3132
} LogisticRegressionModel;
3233

@@ -61,8 +62,10 @@ returns an Array of predictions for multiple x
6162
Array *logregress_predict_many(LogisticRegressionModel *model, Mat *x);
6263

6364
/*
64-
Score/test the logistic regression model based on known x and y values
65+
Display the model details
66+
67+
fmt - the format string used to print the double values
6568
*/
66-
double logregress_score(LogisticRegressionModel *model, Mat *x_test, Mat *y_test);
69+
void logregress_print(LogisticRegressionModel *model);
6770

6871
#endif

src/ml/logisticregress.c

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ void logregress_set_max_iter(size_t iter) {
2020
LogisticRegressionModel *logregress_init() {
2121
LogisticRegressionModel *model = malloc_with_check(sizeof(LogisticRegressionModel));
2222
model->bias = 0.0;
23+
model->loss = 0.0;
2324
model->weights = NULL;
2425
return model;
2526
}
@@ -50,8 +51,8 @@ static double _loss(Mat *y_pred, Mat *y_true) {
5051

5152
double sum = 0.0;
5253
for (size_t i = 0; i < y_pred->rows; i++) {
53-
sum += (mat_get(y_true, i, 0) * log(mat_get(y_pred, i, 0) + 1e-4) +
54-
(i - mat_get(y_true, i, 0) * log(i - mat_get(y_pred, i, 0) + 1e-4)));
54+
sum += (mat_get(y_true, i, 0) * log(mat_get(y_pred, i, 0) + 1e-9) +
55+
(1 - mat_get(y_true, i, 0) * log(1 - mat_get(y_pred, i, 0) + 1e-9)));
5556
}
5657

5758
double loss = -(sum / y_pred->rows);
@@ -96,7 +97,7 @@ static void _update_parameters(gsl_vector *weights, double *bias, gsl_vector *er
9697
}
9798
}
9899

99-
static void _do_logregress_fit(Mat *x, Mat *y, gsl_vector *m_weights, double *m_bias) {
100+
static void _do_logregress_fit(Mat *x, Mat *y, gsl_vector *m_weights, double *m_bias, double *m_loss) {
100101
size_t n_cols = x->cols;
101102
size_t n_rows = x->rows;
102103

@@ -115,7 +116,7 @@ static void _do_logregress_fit(Mat *x, Mat *y, gsl_vector *m_weights, double *m_
115116
mat_set(y_pred, v, 0, sig);
116117
}
117118

118-
double loss = _loss(y_pred, y);
119+
*m_loss = _loss(y_pred, y);
119120
_get_gradient(x, y_pred, y, &error_b, error_w);
120121
_update_parameters(m_weights, m_bias, error_w, error_b);
121122

@@ -137,12 +138,14 @@ LogisticRegressionModel *logregress_fit(LogisticRegressionModel *model, Mat *X,
137138

138139
gsl_vector *weights = gsl_vector_calloc(X->cols);
139140
double bias = 0.0;
141+
double loss = 0.0;
140142

141143
for (size_t e = 0; e < MAX_ITER; e++) {
142-
_do_logregress_fit(X, Y, weights, &bias);
144+
_do_logregress_fit(X, Y, weights, &bias, &loss);
143145
}
144146
model->weights = weights;
145147
model->bias = bias;
148+
model->loss = loss;
146149
return model;
147150
}
148151

@@ -167,3 +170,9 @@ Array *logregress_predict_many(LogisticRegressionModel *model, Mat *x) {
167170
gsl_vector_free(curr_row);
168171
return pred_array;
169172
}
173+
174+
void logregress_print(LogisticRegressionModel *model) {
175+
printf("LogisticRegressionModel(bias: %.7lf, loss: %.7lf, weights: %p)\n", model->bias, model->loss, model->weights);
176+
printf("weights: \n");
177+
gsl_vector_fprintf(stdout, model->weights, "%.7lf");
178+
}

0 commit comments

Comments
 (0)