along with this program. If not, see <http://www.gnu.org/licenses/>. */
-/*
- References:
+/*
+ References:
1. "Coding Logistic Regression with Newton-Raphson", James McCaffrey
http://msdn.microsoft.com/en-us/magazine/jj618304.aspx
2. "SPSS Statistical Algorithms" Chapter LOGISTIC REGRESSION Algorithms
- The Newton Raphson method finds successive approximations to $\bf b$ where
+ The Newton Raphson method finds successive approximations to $\bf b$ where
approximation ${\bf b}_t$ is (hopefully) better than the previous ${\bf b}_{t-1}$.
$ {\bf b}_t = {\bf b}_{t -1} + ({\bf X}^T{\bf W}_{t-1}{\bf X})^{-1}{\bf X}^T({\bf y} - {\bf \pi}_{t-1})$
where:
- $\bf X$ is the $n \times p$ design matrix, $n$ being the number of cases,
+ $\bf X$ is the $n \times p$ design matrix, $n$ being the number of cases,
$p$ the number of parameters, \par
$\bf W$ is the diagonal matrix whose diagonal elements are
$\hat{\pi}_0(1 - \hat{\pi}_0), \, \hat{\pi}_1(1 - \hat{\pi}_2)\dots \hat{\pi}_{n-1}(1 - \hat{\pi}_{n-1})$
#include <config.h>
-#include <gsl/gsl_blas.h>
+#include <gsl/gsl_blas.h>
#include <gsl/gsl_linalg.h>
#include <gsl/gsl_cdf.h>
*/
struct lr_result
{
- /* Used to indicate if a pass should flag a warning when
+ /* Used to indicate if a pass should flag a warning when
invalid (ie negative or missing) weight values are encountered */
bool warn_bad_weight;
/* The estimates of the predictor coefficients */
gsl_vector *beta_hat;
- /* The predicted classifications:
+ /* The predicted classifications:
True Negative, True Positive, False Negative, False Positive */
double tn, tp, fn, fp;
};
static void output_depvarmap (const struct lr_spec *cmd, const struct lr_result *);
-static void output_variables (const struct lr_spec *cmd,
+static void output_variables (const struct lr_spec *cmd,
const struct lr_result *);
static void output_model_summary (const struct lr_result *,
/* Return the value of case C corresponding to the INDEX'th entry in the
model */
static double
-predictor_value (const struct ccase *c,
- const struct variable **x, size_t n_x,
+predictor_value (const struct ccase *c,
+ const struct variable **x, size_t n_x,
const struct categoricals *cats,
size_t index)
{
/* Values of the scalar predictor variables */
- if (index < n_x)
+ if (index < n_x)
return case_data (c, x[index])->f;
/* Coded values of categorical predictor variables (or interactions) */
Return the probability beta_hat (that is the estimator logit(y) )
corresponding to the coefficient estimator for case C
*/
-static double
-pi_hat (const struct lr_spec *cmd,
+static double
+pi_hat (const struct lr_spec *cmd,
const struct lr_result *res,
const struct variable **x, size_t n_x,
const struct ccase *c)
pi += gsl_vector_get (res->beta_hat, res->beta_hat->size - 1);
n_coeffs--;
}
-
+
for (v0 = 0; v0 < n_coeffs; ++v0)
{
- pi += gsl_vector_get (res->beta_hat, v0) *
+ pi += gsl_vector_get (res->beta_hat, v0) *
predictor_value (c, x, n_x, res->cats, v0);
}
/*
Calculates the Hessian matrix X' V X,
where: X is the n by N_X matrix comprising the n cases in INPUT
- V is a diagonal matrix { (pi_hat_0)(1 - pi_hat_0), (pi_hat_1)(1 - pi_hat_1), ... (pi_hat_{N-1})(1 - pi_hat_{N-1})}
+ V is a diagonal matrix { (pi_hat_0)(1 - pi_hat_0), (pi_hat_1)(1 - pi_hat_1), ... (pi_hat_{N-1})(1 - pi_hat_{N-1})}
(the partial derivative of the predicted values)
If ALL predicted values derivatives are close to zero or one, then CONVERGED
will be set to true.
*/
static void
-hessian (const struct lr_spec *cmd,
+hessian (const struct lr_spec *cmd,
struct lr_result *res,
struct casereader *input,
const struct variable **x, size_t n_x,
/* Calculates the value X' (y - pi)
- where X is the design model,
+ where X is the design model,
y is the vector of observed independent variables
pi is the vector of estimates for y
*freq += weight;
}
-static void
+static void
frq_destroy (const void *aux1 UNUSED, void *aux2 UNUSED, void *user_data UNUSED)
{
free (user_data);
\f
-/*
+/*
Makes an initial pass though the data, doing the following:
* Checks that the dependent variable is dichotomous,
double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
const union value *depval = case_data (c, cmd->dep_var);
+ if (var_is_value_missing (cmd->dep_var, depval, cmd->exclude))
+ {
+ missing = true;
+ }
+ else
for (v = 0; v < cmd->n_indep_vars; ++v)
{
const union value *val = case_data (c, cmd->indep_vars[v]);
)
{
msg (ME, _("Dependent variable's values are not dichotomous."));
+ case_unref (c);
goto error;
}
}
work.warn_bad_weight = true;
work.cats = NULL;
work.beta_hat = NULL;
+ work.hessian = NULL;
/* Get the initial estimates of \beta and their standard errors.
And perform other auxilliary initialisation. */
if (! initial_pass (cmd, &work, input))
- return false;
-
+ goto error;
+
for (i = 0; i < cmd->n_cat_predictors; ++i)
{
if (1 >= categoricals_n_count (work.cats, i))
{
struct string str;
ds_init_empty (&str);
-
+
interaction_to_string (cmd->cat_predictors[i], &str);
msg (ME, _("Category %s does not have at least two distinct values. Logistic regression will not be run."),
ds_cstr(&str));
ds_destroy (&str);
- return false;
+ goto error;
}
}
NULL,
NULL);
+ input = casereader_create_filter_missing (input,
+ &cmd->dep_var,
+ 1,
+ cmd->exclude,
+ NULL,
+ NULL);
work.hessian = gsl_matrix_calloc (work.beta_hat->size, work.beta_hat->size);
double min, max;
gsl_vector *v ;
-
+
hessian (cmd, &work, input,
cmd->predictor_vars, cmd->n_predictor_vars,
&converged);
if (converged)
break;
}
- casereader_destroy (input);
- if ( ! converged)
+
+ if ( ! converged)
msg (MW, _("Estimation terminated at iteration number %d because maximum iterations has been reached"), i );
output_classification_table (cmd, &work);
output_variables (cmd, &work);
+ casereader_destroy (input);
gsl_matrix_free (work.hessian);
- gsl_vector_free (work.beta_hat);
-
+ gsl_vector_free (work.beta_hat);
categoricals_destroy (work.cats);
return true;
+
+ error:
+ casereader_destroy (input);
+ gsl_matrix_free (work.hessian);
+ gsl_vector_free (work.beta_hat);
+ categoricals_destroy (work.cats);
+
+ return false;
}
struct variable_node
{
if (vn->var == var)
break;
-
+
fprintf (stderr, "Warning: Hash table collision\n");
}
-
+
return vn;
}
int
cmd_logistic (struct lexer *lexer, struct dataset *ds)
{
+ int i;
/* Temporary location for the predictor variables.
These may or may not include the categorical predictors */
const struct variable **pred_vars;
if (! (lr.dep_var = parse_variable_const (lexer, lr.dict)))
goto error;
- lex_force_match (lexer, T_WITH);
+ if (! lex_force_match (lexer, T_WITH))
+ goto error;
if (!parse_variables_const (lexer, lr.dict,
&pred_vars, &n_pred_vars,
sizeof (*lr.cat_predictors) * ++lr.n_cat_predictors);
lr.cat_predictors[lr.n_cat_predictors - 1] = 0;
}
- while (parse_design_interaction (lexer, lr.dict,
+ while (parse_design_interaction (lexer, lr.dict,
lr.cat_predictors + lr.n_cat_predictors - 1));
lr.n_cat_predictors--;
}
lr.print |= PRINT_CI;
if (lex_force_match (lexer, T_LPAREN))
{
- if (! lex_force_int (lexer))
+ if (! lex_force_num (lexer))
{
lex_error (lexer, NULL);
goto error;
}
- lr.confidence = lex_integer (lexer);
+ lr.confidence = lex_number (lexer);
lex_get (lexer);
if ( ! lex_force_match (lexer, T_RPAREN))
{
goto error;
}
cp = lex_number (lexer);
-
+
if (cp < 0 || cp > 1.0)
{
msg (ME, _("Cut point value must be in the range [0,1]"));
}
lr.ilogit_cut_point = - log (1/cp - 1);
-
- /* Copy the predictor variables from the temporary location into the
+
+ /* Copy the predictor variables from the temporary location into the
final one, dropping any categorical variables which appear there.
FIXME: This is O(NxM).
*/
{
vn = xmalloc (sizeof *vn);
vn->var = ivar;
-
+
hmap_insert (&allvars, &vn->node, hash);
}
free (vn);
}
hmap_destroy (&allvars);
- }
+ }
/* logistical regression for each split group */
ok = proc_commit (ds) && ok;
}
+ for (i = 0 ; i < lr.n_cat_predictors; ++i)
+ {
+ interaction_destroy (lr.cat_predictors[i]);
+ }
free (lr.predictor_vars);
free (lr.cat_predictors);
free (lr.indep_vars);
error:
+ for (i = 0 ; i < lr.n_cat_predictors; ++i)
+ {
+ interaction_destroy (lr.cat_predictors[i]);
+ }
free (lr.predictor_vars);
free (lr.cat_predictors);
free (lr.indep_vars);
tab_text (t, 0, 1 + heading_rows, 0, ds_cstr (&str));
- tab_double (t, 1, 0 + heading_rows, 0, map_dependent_var (cmd, res, &res->y0), &F_8_0);
- tab_double (t, 1, 1 + heading_rows, 0, map_dependent_var (cmd, res, &res->y1), &F_8_0);
+ tab_double (t, 1, 0 + heading_rows, 0, map_dependent_var (cmd, res, &res->y0), NULL, RC_INTEGER);
+ tab_double (t, 1, 1 + heading_rows, 0, map_dependent_var (cmd, res, &res->y1), NULL, RC_INTEGER);
ds_destroy (&str);
tab_submit (t);
/* Show the Variables in the Equation box */
static void
-output_variables (const struct lr_spec *cmd,
+output_variables (const struct lr_spec *cmd,
const struct lr_result *res)
{
int row = 0;
tab_text (t, 8, row, TAB_CENTER | TAT_TITLE, _("Lower"));
tab_text (t, 9, row, TAB_CENTER | TAT_TITLE, _("Upper"));
}
-
+
for (row = heading_rows ; row < nr; ++row)
{
const int idx = row - heading_rows - idx_correction;
if (idx < cmd->n_predictor_vars)
{
- tab_text (t, 1, row, TAB_LEFT | TAT_TITLE,
+ tab_text (t, 1, row, TAB_LEFT | TAT_TITLE,
var_to_string (cmd->predictor_vars[idx]));
}
else if (i < cmd->n_cat_predictors)
/* Calculate the Wald statistic,
which is \beta' C^-1 \beta .
where \beta is the vector of the coefficient estimates comprising this
- categorial variable. and C is the corresponding submatrix of the
+ categorial variable. and C is the corresponding submatrix of the
hessian matrix.
*/
gsl_matrix_const_view mv =
gsl_blas_dgemv (CblasTrans, 1.0, subhessian, &vv.vector, 0, temp);
gsl_blas_ddot (temp, &vv.vector, &wald);
- tab_double (t, 4, row, 0, wald, 0);
- tab_double (t, 5, row, 0, df, &F_8_0);
- tab_double (t, 6, row, 0, gsl_cdf_chisq_Q (wald, df), 0);
+ tab_double (t, 4, row, 0, wald, NULL, RC_OTHER);
+ tab_double (t, 5, row, 0, df, NULL, RC_INTEGER);
+ tab_double (t, 6, row, 0, gsl_cdf_chisq_Q (wald, df), NULL, RC_PVALUE);
idx_correction ++;
summary = true;
tab_text (t, 1, row, TAB_LEFT | TAT_TITLE, _("Constant"));
}
- tab_double (t, 2, row, 0, b, 0);
- tab_double (t, 3, row, 0, sqrt (sigma2), 0);
- tab_double (t, 4, row, 0, wald, 0);
- tab_double (t, 5, row, 0, df, &F_8_0);
- tab_double (t, 6, row, 0, gsl_cdf_chisq_Q (wald, df), 0);
- tab_double (t, 7, row, 0, exp (b), 0);
+ tab_double (t, 2, row, 0, b, NULL, RC_OTHER);
+ tab_double (t, 3, row, 0, sqrt (sigma2), NULL, RC_OTHER);
+ tab_double (t, 4, row, 0, wald, NULL, RC_OTHER);
+ tab_double (t, 5, row, 0, df, NULL, RC_INTEGER);
+ tab_double (t, 6, row, 0, gsl_cdf_chisq_Q (wald, df), NULL, RC_PVALUE);
+ tab_double (t, 7, row, 0, exp (b), NULL, RC_OTHER);
if (cmd->print & PRINT_CI)
{
+ int last_ci = nr;
double wc = gsl_cdf_ugaussian_Pinv (0.5 + cmd->confidence / 200.0);
wc *= sqrt (sigma2);
- if (idx < cmd->n_predictor_vars)
+ if (cmd->constant)
+ last_ci--;
+
+ if (row < last_ci)
{
- tab_double (t, 8, row, 0, exp (b - wc), 0);
- tab_double (t, 9, row, 0, exp (b + wc), 0);
+ tab_double (t, 8, row, 0, exp (b - wc), NULL, RC_OTHER);
+ tab_double (t, 9, row, 0, exp (b + wc), NULL, RC_OTHER);
}
}
}
tab_text (t, 0, 0, TAB_LEFT | TAT_TITLE, _("Step 1"));
tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("-2 Log likelihood"));
- tab_double (t, 1, 1, 0, -2 * log_likelihood, 0);
+ tab_double (t, 1, 1, 0, -2 * log_likelihood, NULL, RC_OTHER);
tab_text (t, 2, 0, TAB_CENTER | TAT_TITLE, _("Cox & Snell R Square"));
cox = 1.0 - exp((initial_log_likelihood - log_likelihood) * (2 / res->cc));
- tab_double (t, 2, 1, 0, cox, 0);
+ tab_double (t, 2, 1, 0, cox, NULL, RC_OTHER);
tab_text (t, 3, 0, TAB_CENTER | TAT_TITLE, _("Nagelkerke R Square"));
- tab_double (t, 3, 1, 0, cox / ( 1.0 - exp(initial_log_likelihood * (2 / res->cc))), 0);
+ tab_double (t, 3, 1, 0, cox / ( 1.0 - exp(initial_log_likelihood * (2 / res->cc))), NULL, RC_OTHER);
tab_submit (t);
tab_text (t, 0, 2, TAB_LEFT | TAT_TITLE, _("Missing Cases"));
tab_text (t, 0, 3, TAB_LEFT | TAT_TITLE, _("Total"));
- tab_double (t, 1, 1, 0, res->n_nonmissing, &F_8_0);
- tab_double (t, 1, 2, 0, res->n_missing, &F_8_0);
+ tab_double (t, 1, 1, 0, res->n_nonmissing, NULL, RC_INTEGER);
+ tab_double (t, 1, 2, 0, res->n_missing, NULL, RC_INTEGER);
total = res->n_nonmissing + res->n_missing;
- tab_double (t, 1, 3, 0, total , &F_8_0);
+ tab_double (t, 1, 3, 0, total , NULL, RC_INTEGER);
- tab_double (t, 2, 1, 0, 100 * res->n_nonmissing / (double) total, 0);
- tab_double (t, 2, 2, 0, 100 * res->n_missing / (double) total, 0);
- tab_double (t, 2, 3, 0, 100 * total / (double) total, 0);
+ tab_double (t, 2, 1, 0, 100 * res->n_nonmissing / (double) total, NULL, RC_OTHER);
+ tab_double (t, 2, 2, 0, 100 * res->n_missing / (double) total, NULL, RC_OTHER);
+ tab_double (t, 2, 3, 0, 100 * total / (double) total, NULL, RC_OTHER);
tab_submit (t);
}
nr = heading_rows + total_cats;
t = tab_create (nc, nr);
+ tab_set_format (t, RC_WEIGHT, wfmt);
+
tab_title (t, _("Categorical Variables' Codings"));
tab_headers (t, heading_columns, 0, heading_rows, 0);
struct string str;
const struct ccase *c = categoricals_get_case_by_category_real (res->cats, v, cat);
const double *freq = categoricals_get_user_data_by_category_real (res->cats, v, cat);
-
+
int x;
ds_init_empty (&str);
if (x < cat_predictors->n_vars - 1)
ds_put_cstr (&str, " ");
}
-
+
tab_text (t, 1, heading_rows + r, 0, ds_cstr (&str));
ds_destroy (&str);
- tab_double (t, 2, heading_rows + r, 0, *freq, wfmt);
+ tab_double (t, 2, heading_rows + r, 0, *freq, NULL, RC_WEIGHT);
for (x = 0; x < df; ++x)
{
- tab_double (t, heading_columns + 1 + x, heading_rows + r, 0, (cat == x), &F_8_0);
+ tab_double (t, heading_columns + 1 + x, heading_rows + r, 0, (cat == x), NULL, RC_INTEGER);
}
++r;
}
}
-static void
+static void
output_classification_table (const struct lr_spec *cmd, const struct lr_result *res)
{
const struct fmt_spec *wfmt =
const int nr = heading_rows + 3;
struct tab_table *t = tab_create (nc, nr);
+ tab_set_format (t, RC_WEIGHT, wfmt);
ds_init_empty (&sv0);
ds_init_empty (&sv1);
tab_joint_text (t, heading_columns, 0, nc - 1, 0,
TAB_CENTER | TAT_TITLE, _("Predicted"));
- tab_joint_text (t, heading_columns, 1, heading_columns + 1, 1,
+ tab_joint_text (t, heading_columns, 1, heading_columns + 1, 1,
0, var_to_string (cmd->dep_var) );
tab_joint_text (t, 1, 2, 2, 2,
ds_destroy (&sv0);
ds_destroy (&sv1);
- tab_double (t, heading_columns, 3, 0, res->tn, wfmt);
- tab_double (t, heading_columns + 1, 4, 0, res->tp, wfmt);
+ tab_double (t, heading_columns, 3, 0, res->tn, NULL, RC_WEIGHT);
+ tab_double (t, heading_columns + 1, 4, 0, res->tp, NULL, RC_WEIGHT);
- tab_double (t, heading_columns + 1, 3, 0, res->fp, wfmt);
- tab_double (t, heading_columns, 4, 0, res->fn, wfmt);
+ tab_double (t, heading_columns + 1, 3, 0, res->fp, NULL, RC_WEIGHT);
+ tab_double (t, heading_columns, 4, 0, res->fn, NULL, RC_WEIGHT);
- tab_double (t, heading_columns + 2, 3, 0, 100 * res->tn / (res->tn + res->fp), 0);
- tab_double (t, heading_columns + 2, 4, 0, 100 * res->tp / (res->tp + res->fn), 0);
+ tab_double (t, heading_columns + 2, 3, 0, 100 * res->tn / (res->tn + res->fp), NULL, RC_OTHER);
+ tab_double (t, heading_columns + 2, 4, 0, 100 * res->tp / (res->tp + res->fn), NULL, RC_OTHER);
- tab_double (t, heading_columns + 2, 5, 0,
- 100 * (res->tp + res->tn) / (res->tp + res->tn + res->fp + res->fn), 0);
+ tab_double (t, heading_columns + 2, 5, 0,
+ 100 * (res->tp + res->tn) / (res->tp + res->tn + res->fp + res->fn), NULL, RC_OTHER);
tab_submit (t);