1 /* pspp - a program for statistical analysis.
2 Copyright (C) 2012 Free Software Foundation, Inc.
4 This program is free software: you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation, either version 3 of the License, or
7 (at your option) any later version.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 1. "Coding Logistic Regression with Newton-Raphson", James McCaffrey
21 http://msdn.microsoft.com/en-us/magazine/jj618304.aspx
23 2. "SPSS Statistical Algorithms" Chapter LOGISTIC REGRESSION Algorithms
26 The Newton Raphson method finds successive approximations to $\bf b$ where
27 approximation ${\bf b}_t$ is (hopefully) better than the previous ${\bf b}_{t-1}$.
29 $ {\bf b}_t = {\bf b}_{t -1} + ({\bf X}^T{\bf W}_{t-1}{\bf X})^{-1}{\bf X}^T({\bf y} - {\bf \pi}_{t-1})$
32 $\bf X$ is the $n \times p$ design matrix, $n$ being the number of cases,
33 $p$ the number of parameters, \par
34 $\bf W$ is the diagonal matrix whose diagonal elements are
35 $\hat{\pi}_0(1 - \hat{\pi}_0), \, \hat{\pi}_1(1 - \hat{\pi}_2)\dots \hat{\pi}_{n-1}(1 - \hat{\pi}_{n-1})$
42 #include <gsl/gsl_blas.h>
44 #include <gsl/gsl_linalg.h>
45 #include <gsl/gsl_cdf.h>
46 #include <gsl/gsl_matrix.h>
47 #include <gsl/gsl_vector.h>
50 #include "data/case.h"
51 #include "data/casegrouper.h"
52 #include "data/casereader.h"
53 #include "data/dataset.h"
54 #include "data/dictionary.h"
55 #include "data/format.h"
56 #include "data/value.h"
57 #include "language/command.h"
58 #include "language/dictionary/split-file.h"
59 #include "language/lexer/lexer.h"
60 #include "language/lexer/value-parser.h"
61 #include "language/lexer/variable-parser.h"
62 #include "libpspp/assertion.h"
63 #include "libpspp/ll.h"
64 #include "libpspp/message.h"
65 #include "libpspp/misc.h"
66 #include "math/categoricals.h"
67 #include "output/tab.h"
70 #define _(msgid) gettext (msgid)
75 #define PRINT_EACH_STEP 0x01
76 #define PRINT_SUMMARY 0x02
77 #define PRINT_CORR 0x04
78 #define PRINT_ITER 0x08
79 #define PRINT_GOODFIT 0x10
83 #define PRINT_DEFAULT (PRINT_SUMMARY | PRINT_EACH_STEP)
86 The constant parameters of the procedure.
87 That is, those which are set by the user.
91 /* The dependent variable */
92 const struct variable *dep_var;
94 size_t n_predictor_vars;
95 const struct variable **predictor_vars;
97 /* Which classes of missing vars are to be excluded */
98 enum mv_class exclude;
100 /* The weight variable */
101 const struct variable *wv;
103 const struct dictionary *dict;
105 /* True iff the constant (intercept) is to be included in the model */
108 /* Ths maximum number of iterations */
111 /* Other iteration limiting conditions */
116 /* The confidence interval (in percent) */
119 /* What results should be presented */
125 /* The results and intermediate result of the procedure.
126 These are mutated as the procedure runs. Used for
127 temporary variables etc.
131 bool warn_bad_weight;
134 /* The two values of the dependent variable. */
139 /* The sum of caseweights */
142 casenumber n_missing;
143 casenumber n_nonmissing;
148 Convert INPUT into a dichotomous scalar. For simple cases, this is a 1:1 mapping
149 The return value is always either 0 or 1
152 map_dependent_var (const struct lr_spec *cmd, const struct lr_result *res, const union value *input)
154 int width = var_get_width (cmd->dep_var);
155 if (value_equal (input, &res->y0, width))
158 if (value_equal (input, &res->y1, width))
168 static void output_depvarmap (const struct lr_spec *cmd, const struct lr_result *);
170 static void output_variables (const struct lr_spec *cmd,
174 static void output_model_summary (const struct lr_result *,
175 double initial_likelihood, double likelihood);
177 static void case_processing_summary (const struct lr_result *);
181 Return the probability estimator (that is the estimator of logit(y) )
182 corresponding to the coefficient estimator beta_hat for case C
185 pi_hat (const struct lr_spec *cmd,
186 const gsl_vector *beta_hat,
187 const struct variable **x, size_t n_x,
188 const struct ccase *c)
192 for (v0 = 0; v0 < n_x; ++v0)
194 pi += gsl_vector_get (beta_hat, v0) *
195 case_data (c, x[v0])->f;
199 pi += gsl_vector_get (beta_hat, beta_hat->size - 1);
201 pi = 1.0 / (1.0 + exp(-pi));
208 Calculates the Hessian matrix X' V X,
209 where: X is the n by N_X matrix comprising the n cases in INPUT
210 V is a diagonal matrix { (pi_hat_0)(1 - pi_hat_0), (pi_hat_1)(1 - pi_hat_1), ... (pi_hat_{N-1})(1 - pi_hat_{N-1})}
211 (the partial derivative of the predicted values)
213 If ALL predicted values derivatives are close to zero or one, then CONVERGED
217 hessian (const struct lr_spec *cmd,
218 struct lr_result *res,
219 struct casereader *input,
220 const struct variable **x, size_t n_x,
221 const gsl_vector *beta_hat,
225 struct casereader *reader;
227 gsl_matrix *output = gsl_matrix_calloc (beta_hat->size, beta_hat->size);
229 double max_w = -DBL_MAX;
231 for (reader = casereader_clone (input);
232 (c = casereader_read (reader)) != NULL; case_unref (c))
235 double pi = pi_hat (cmd, beta_hat, x, n_x, c);
237 double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
238 double w = pi * (1 - pi);
243 for (v0 = 0; v0 < beta_hat->size; ++v0)
245 double in0 = v0 < n_x ? case_data (c, x[v0])->f : 1.0;
246 for (v1 = 0; v1 < beta_hat->size; ++v1)
248 double in1 = v1 < n_x ? case_data (c, x[v1])->f : 1.0 ;
249 double *o = gsl_matrix_ptr (output, v0, v1);
254 casereader_destroy (reader);
257 if ( max_w < cmd->min_epsilon)
260 msg (MN, _("All predicted values are either 1 or 0"));
267 /* Calculates the value X' (y - pi)
268 where X is the design model,
269 y is the vector of observed independent variables
270 pi is the vector of estimates for y
272 As a side effect, the likelihood is stored in LIKELIHOOD
275 xt_times_y_pi (const struct lr_spec *cmd,
276 struct lr_result *res,
277 struct casereader *input,
278 const struct variable **x, size_t n_x,
279 const struct variable *y_var,
280 const gsl_vector *beta_hat,
283 struct casereader *reader;
285 gsl_vector *output = gsl_vector_calloc (beta_hat->size);
288 for (reader = casereader_clone (input);
289 (c = casereader_read (reader)) != NULL; case_unref (c))
292 double pi = pi_hat (cmd, beta_hat, x, n_x, c);
293 double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
296 double y = map_dependent_var (cmd, res, case_data (c, y_var));
298 *likelihood *= pow (pi, weight * y) * pow (1 - pi, weight * (1 - y));
300 for (v0 = 0; v0 < beta_hat->size; ++v0)
302 double in0 = v0 < n_x ? case_data (c, x[v0])->f : 1.0;
303 double *o = gsl_vector_ptr (output, v0);
304 *o += in0 * (y - pi) * weight;
308 casereader_destroy (reader);
315 Makes an initial pass though the data, checks that the dependent variable is
316 dichotomous, and calculates necessary initial values.
318 Returns an initial value for \hat\beta the vector of estimators of \beta
321 beta_hat_initial (const struct lr_spec *cmd, struct lr_result *res, struct casereader *input)
323 const int width = var_get_width (cmd->dep_var);
326 struct casereader *reader;
335 size_t n_coefficients = cmd->n_predictor_vars;
339 b0 = gsl_vector_calloc (n_coefficients);
342 for (reader = casereader_clone (input);
343 (c = casereader_read (reader)) != NULL; case_unref (c))
346 bool missing = false;
347 double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
348 const union value *depval = case_data (c, cmd->dep_var);
350 for (v = 0; v < cmd->n_predictor_vars; ++v)
352 const union value *val = case_data (c, cmd->predictor_vars[v]);
353 if (var_is_value_missing (cmd->predictor_vars[v], val, cmd->exclude))
370 value_clone (&res->y0, depval, width);
375 if ( !value_equal (&res->y0, depval, width))
377 value_clone (&res->y1, depval, width);
383 if (! value_equal (&res->y0, depval, width)
385 ! value_equal (&res->y1, depval, width)
388 msg (ME, _("Dependent variable's values are not dichotomous."));
393 if (v0set && value_equal (&res->y0, depval, width))
396 if (v1set && value_equal (&res->y1, depval, width))
402 casereader_destroy (reader);
406 /* Ensure that Y0 is less than Y1. Otherwise the mapping gets
407 inverted, which is confusing to users */
408 if (var_is_numeric (cmd->dep_var) && value_compare_3way (&res->y0, &res->y1, width) > 0)
411 value_clone (&tmp, &res->y0, width);
412 value_copy (&res->y0, &res->y1, width);
413 value_copy (&res->y1, &tmp, width);
414 value_destroy (&tmp, width);
420 double mean = sum / res->cc;
421 gsl_vector_set (b0, b0->size - 1, log (mean / (1 - mean)));
427 casereader_destroy (reader);
434 run_lr (const struct lr_spec *cmd, struct casereader *input,
435 const struct dataset *ds UNUSED)
439 gsl_vector *beta_hat;
442 bool converged = false;
443 double likelihood = -1;
444 double prev_likelihood = -1;
445 double initial_likelihood = -1;
447 struct lr_result work;
449 work.n_nonmissing = 0;
450 work.warn_bad_weight = true;
453 /* Get the initial estimates of \beta and their standard errors */
454 beta_hat = beta_hat_initial (cmd, &work, input);
455 if (NULL == beta_hat)
458 output_depvarmap (cmd, &work);
460 se = gsl_vector_alloc (beta_hat->size);
462 case_processing_summary (&work);
465 input = casereader_create_filter_missing (input,
467 cmd->n_predictor_vars,
473 /* Start the Newton Raphson iteration process... */
474 for( i = 0 ; i < cmd->max_iter ; ++i)
480 m = hessian (cmd, &work, input,
481 cmd->predictor_vars, cmd->n_predictor_vars,
485 gsl_linalg_cholesky_decomp (m);
486 gsl_linalg_cholesky_invert (m);
488 v = xt_times_y_pi (cmd, &work, input,
489 cmd->predictor_vars, cmd->n_predictor_vars,
496 gsl_vector *delta = gsl_vector_alloc (v->size);
497 gsl_blas_dgemv (CblasNoTrans, 1.0, m, v, 0, delta);
500 for (j = 0; j < se->size; ++j)
502 double *ptr = gsl_vector_ptr (se, j);
503 *ptr = gsl_matrix_get (m, j, j);
508 gsl_vector_add (beta_hat, delta);
510 gsl_vector_minmax (delta, &min, &max);
512 if ( fabs (min) < cmd->bcon && fabs (max) < cmd->bcon)
514 msg (MN, _("Estimation terminated at iteration number %d because parameter estimates changed by less than %g"),
519 gsl_vector_free (delta);
522 if ( prev_likelihood >= 0)
524 if (-log (likelihood) > -(1.0 - cmd->lcon) * log (prev_likelihood))
526 msg (MN, _("Estimation terminated at iteration number %d because Log Likelihood decreased by less than %g%%"), i + 1, 100 * cmd->lcon);
531 initial_likelihood = likelihood;
532 prev_likelihood = likelihood;
537 casereader_destroy (input);
538 assert (initial_likelihood >= 0);
540 for (i = 0; i < se->size; ++i)
542 double *ptr = gsl_vector_ptr (se, i);
546 output_model_summary (&work, initial_likelihood, likelihood);
547 output_variables (cmd, beta_hat, se);
549 gsl_vector_free (beta_hat);
550 gsl_vector_free (se);
555 /* Parse the LOGISTIC REGRESSION command syntax */
557 cmd_logistic (struct lexer *lexer, struct dataset *ds)
560 lr.dict = dataset_dict (ds);
561 lr.n_predictor_vars = 0;
562 lr.predictor_vars = NULL;
564 lr.wv = dict_get_weight (lr.dict);
568 lr.min_epsilon = 0.00000001;
572 lr.print = PRINT_DEFAULT;
575 if (lex_match_id (lexer, "VARIABLES"))
576 lex_match (lexer, T_EQUALS);
578 if (! (lr.dep_var = parse_variable_const (lexer, lr.dict)))
581 lex_force_match (lexer, T_WITH);
583 if (!parse_variables_const (lexer, lr.dict,
584 &lr.predictor_vars, &lr.n_predictor_vars,
585 PV_NO_DUPLICATE | PV_NUMERIC))
589 while (lex_token (lexer) != T_ENDCMD)
591 lex_match (lexer, T_SLASH);
593 if (lex_match_id (lexer, "MISSING"))
595 lex_match (lexer, T_EQUALS);
596 while (lex_token (lexer) != T_ENDCMD
597 && lex_token (lexer) != T_SLASH)
599 if (lex_match_id (lexer, "INCLUDE"))
601 lr.exclude = MV_SYSTEM;
603 else if (lex_match_id (lexer, "EXCLUDE"))
609 lex_error (lexer, NULL);
614 else if (lex_match_id (lexer, "ORIGIN"))
618 else if (lex_match_id (lexer, "NOORIGIN"))
622 else if (lex_match_id (lexer, "NOCONST"))
626 else if (lex_match_id (lexer, "EXTERNAL"))
628 /* This is for compatibility. It does nothing */
630 else if (lex_match_id (lexer, "PRINT"))
632 lex_match (lexer, T_EQUALS);
633 while (lex_token (lexer) != T_ENDCMD && lex_token (lexer) != T_SLASH)
635 if (lex_match_id (lexer, "DEFAULT"))
637 lr.print |= PRINT_DEFAULT;
639 else if (lex_match_id (lexer, "SUMMARY"))
641 lr.print |= PRINT_SUMMARY;
644 else if (lex_match_id (lexer, "CORR"))
646 lr.print |= PRINT_CORR;
648 else if (lex_match_id (lexer, "ITER"))
650 lr.print |= PRINT_ITER;
652 else if (lex_match_id (lexer, "GOODFIT"))
654 lr.print |= PRINT_GOODFIT;
657 else if (lex_match_id (lexer, "CI"))
659 lr.print |= PRINT_CI;
660 if (lex_force_match (lexer, T_LPAREN))
662 if (! lex_force_int (lexer))
664 lex_error (lexer, NULL);
667 lr.confidence = lex_integer (lexer);
669 if ( ! lex_force_match (lexer, T_RPAREN))
671 lex_error (lexer, NULL);
676 else if (lex_match_id (lexer, "ALL"))
682 lex_error (lexer, NULL);
687 else if (lex_match_id (lexer, "CRITERIA"))
689 lex_match (lexer, T_EQUALS);
690 while (lex_token (lexer) != T_ENDCMD && lex_token (lexer) != T_SLASH)
692 if (lex_match_id (lexer, "BCON"))
694 if (lex_force_match (lexer, T_LPAREN))
696 if (! lex_force_num (lexer))
698 lex_error (lexer, NULL);
701 lr.bcon = lex_number (lexer);
703 if ( ! lex_force_match (lexer, T_RPAREN))
705 lex_error (lexer, NULL);
710 else if (lex_match_id (lexer, "ITERATE"))
712 if (lex_force_match (lexer, T_LPAREN))
714 if (! lex_force_int (lexer))
716 lex_error (lexer, NULL);
719 lr.max_iter = lex_integer (lexer);
721 if ( ! lex_force_match (lexer, T_RPAREN))
723 lex_error (lexer, NULL);
728 else if (lex_match_id (lexer, "LCON"))
730 if (lex_force_match (lexer, T_LPAREN))
732 if (! lex_force_num (lexer))
734 lex_error (lexer, NULL);
737 lr.lcon = lex_number (lexer);
739 if ( ! lex_force_match (lexer, T_RPAREN))
741 lex_error (lexer, NULL);
746 else if (lex_match_id (lexer, "EPS"))
748 if (lex_force_match (lexer, T_LPAREN))
750 if (! lex_force_num (lexer))
752 lex_error (lexer, NULL);
755 lr.min_epsilon = lex_number (lexer);
757 if ( ! lex_force_match (lexer, T_RPAREN))
759 lex_error (lexer, NULL);
766 lex_error (lexer, NULL);
773 lex_error (lexer, NULL);
781 struct casegrouper *grouper;
782 struct casereader *group;
785 grouper = casegrouper_create_splits (proc_open (ds), lr.dict);
786 while (casegrouper_get_next_group (grouper, &group))
787 ok = run_lr (&lr, group, ds);
788 ok = casegrouper_destroy (grouper);
789 ok = proc_commit (ds) && ok;
792 free (lr.predictor_vars);
797 free (lr.predictor_vars);
804 /* Show the Dependent Variable Encoding box.
805 This indicates how the dependent variable
806 is mapped to the internal zero/one values.
809 output_depvarmap (const struct lr_spec *cmd, const struct lr_result *res)
811 const int heading_columns = 0;
812 const int heading_rows = 1;
817 int nr = heading_rows + 2;
819 t = tab_create (nc, nr);
820 tab_title (t, _("Dependent Variable Encoding"));
822 tab_headers (t, heading_columns, 0, heading_rows, 0);
824 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
826 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
827 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
829 tab_text (t, 0, 0, TAB_CENTER | TAT_TITLE, _("Original Value"));
830 tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("Internal Value"));
834 ds_init_empty (&str);
835 var_append_value_name (cmd->dep_var, &res->y0, &str);
836 tab_text (t, 0, 0 + heading_rows, 0, ds_cstr (&str));
839 var_append_value_name (cmd->dep_var, &res->y1, &str);
840 tab_text (t, 0, 1 + heading_rows, 0, ds_cstr (&str));
843 tab_double (t, 1, 0 + heading_rows, 0, map_dependent_var (cmd, res, &res->y0), &F_8_0);
844 tab_double (t, 1, 1 + heading_rows, 0, map_dependent_var (cmd, res, &res->y1), &F_8_0);
851 /* Show the Variables in the Equation box */
853 output_variables (const struct lr_spec *cmd,
854 const gsl_vector *beta,
855 const gsl_vector *se)
858 const int heading_columns = 1;
859 int heading_rows = 1;
863 int n_rows = cmd->n_predictor_vars;
867 if (cmd->print & PRINT_CI)
873 nr = heading_rows + cmd->n_predictor_vars;
877 t = tab_create (nc, nr);
878 tab_title (t, _("Variables in the Equation"));
880 tab_headers (t, heading_columns, 0, heading_rows, 0);
882 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
884 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
885 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
887 tab_text (t, 0, row + 1, TAB_CENTER | TAT_TITLE, _("Step 1"));
889 tab_text (t, 2, row, TAB_CENTER | TAT_TITLE, _("B"));
890 tab_text (t, 3, row, TAB_CENTER | TAT_TITLE, _("S.E."));
891 tab_text (t, 4, row, TAB_CENTER | TAT_TITLE, _("Wald"));
892 tab_text (t, 5, row, TAB_CENTER | TAT_TITLE, _("df"));
893 tab_text (t, 6, row, TAB_CENTER | TAT_TITLE, _("Sig."));
894 tab_text (t, 7, row, TAB_CENTER | TAT_TITLE, _("Exp(B)"));
896 if (cmd->print & PRINT_CI)
898 tab_joint_text_format (t, 8, 0, 9, 0,
899 TAB_CENTER | TAT_TITLE, _("%d%% CI for Exp(B)"), cmd->confidence);
901 tab_text (t, 8, row, TAB_CENTER | TAT_TITLE, _("Lower"));
902 tab_text (t, 9, row, TAB_CENTER | TAT_TITLE, _("Upper"));
908 for (idx = 0 ; idx < n_rows; ++idx)
910 const int r = idx + heading_rows;
912 const double b = gsl_vector_get (beta, idx);
913 const double sigma = gsl_vector_get (se, idx);
914 const double wald = pow2 (b / sigma);
917 if (idx < cmd->n_predictor_vars)
918 tab_text (t, 1, r, TAB_LEFT | TAT_TITLE,
919 var_to_string (cmd->predictor_vars[idx]));
921 tab_double (t, 2, r, 0, b, 0);
922 tab_double (t, 3, r, 0, sigma, 0);
923 tab_double (t, 4, r, 0, wald, 0);
924 tab_double (t, 5, r, 0, df, &F_8_0);
925 tab_double (t, 6, r, 0, gsl_cdf_chisq_Q (wald, df), 0);
926 tab_double (t, 7, r, 0, exp (b), 0);
928 if (cmd->print & PRINT_CI)
930 double wc = gsl_cdf_ugaussian_Pinv (0.5 + cmd->confidence / 200.0);
933 if (idx < cmd->n_predictor_vars)
935 tab_double (t, 8, r, 0, exp (b - wc), 0);
936 tab_double (t, 9, r, 0, exp (b + wc), 0);
942 tab_text (t, 1, nr - 1, TAB_LEFT | TAT_TITLE, _("Constant"));
948 /* Show the model summary box */
950 output_model_summary (const struct lr_result *res,
951 double initial_likelihood, double likelihood)
953 const int heading_columns = 0;
954 const int heading_rows = 1;
958 int nr = heading_rows + 1;
961 t = tab_create (nc, nr);
962 tab_title (t, _("Model Summary"));
964 tab_headers (t, heading_columns, 0, heading_rows, 0);
966 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
968 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
969 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
971 tab_text (t, 0, 0, TAB_LEFT | TAT_TITLE, _("Step 1"));
972 tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("-2 Log likelihood"));
973 tab_double (t, 1, 1, 0, -2 * log (likelihood), 0);
976 tab_text (t, 2, 0, TAB_CENTER | TAT_TITLE, _("Cox & Snell R Square"));
977 cox = 1.0 - pow (initial_likelihood /likelihood, 2 / res->cc);
978 tab_double (t, 2, 1, 0, cox, 0);
980 tab_text (t, 3, 0, TAB_CENTER | TAT_TITLE, _("Nagelkerke R Square"));
981 tab_double (t, 3, 1, 0, cox / ( 1.0 - pow (initial_likelihood, 2 / res->cc)), 0);
987 /* Show the case processing summary box */
989 case_processing_summary (const struct lr_result *res)
991 const int heading_columns = 1;
992 const int heading_rows = 1;
996 const int nr = heading_rows + 3;
999 t = tab_create (nc, nr);
1000 tab_title (t, _("Case Processing Summary"));
1002 tab_headers (t, heading_columns, 0, heading_rows, 0);
1004 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
1006 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
1007 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
1009 tab_text (t, 0, 0, TAB_LEFT | TAT_TITLE, _("Unweighted Cases"));
1010 tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("N"));
1011 tab_text (t, 2, 0, TAB_CENTER | TAT_TITLE, _("Percent"));
1014 tab_text (t, 0, 1, TAB_LEFT | TAT_TITLE, _("Included in Analysis"));
1015 tab_text (t, 0, 2, TAB_LEFT | TAT_TITLE, _("Missing Cases"));
1016 tab_text (t, 0, 3, TAB_LEFT | TAT_TITLE, _("Total"));
1018 tab_double (t, 1, 1, 0, res->n_nonmissing, &F_8_0);
1019 tab_double (t, 1, 2, 0, res->n_missing, &F_8_0);
1021 total = res->n_nonmissing + res->n_missing;
1022 tab_double (t, 1, 3, 0, total , &F_8_0);
1024 tab_double (t, 2, 1, 0, 100 * res->n_nonmissing / (double) total, 0);
1025 tab_double (t, 2, 2, 0, 100 * res->n_missing / (double) total, 0);
1026 tab_double (t, 2, 3, 0, 100 * total / (double) total, 0);