1 /* pspp - a program for statistical analysis.
2 Copyright (C) 2012 Free Software Foundation, Inc.
4 This program is free software: you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation, either version 3 of the License, or
7 (at your option) any later version.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 1. "Coding Logistic Regression with Newton-Raphson", James McCaffrey
21 http://msdn.microsoft.com/en-us/magazine/jj618304.aspx
23 2. "SPSS Statistical Algorithms" Chapter LOGISTIC REGRESSION Algorithms
26 The Newton Raphson method finds successive approximations to $\bf b$ where
27 approximation ${\bf b}_t$ is (hopefully) better than the previous ${\bf b}_{t-1}$.
29 $ {\bf b}_t = {\bf b}_{t -1} + ({\bf X}^T{\bf W}_{t-1}{\bf X})^{-1}{\bf X}^T({\bf y} - {\bf \pi}_{t-1})$
32 $\bf X$ is the $n \times p$ design matrix, $n$ being the number of cases,
33 $p$ the number of parameters, \par
34 $\bf W$ is the diagonal matrix whose diagonal elements are
35 $\hat{\pi}_0(1 - \hat{\pi}_0), \, \hat{\pi}_1(1 - \hat{\pi}_2)\dots \hat{\pi}_{n-1}(1 - \hat{\pi}_{n-1})$
42 #include <gsl/gsl_blas.h>
44 #include <gsl/gsl_linalg.h>
45 #include <gsl/gsl_cdf.h>
46 #include <gsl/gsl_matrix.h>
47 #include <gsl/gsl_vector.h>
50 #include "data/case.h"
51 #include "data/casegrouper.h"
52 #include "data/casereader.h"
53 #include "data/dataset.h"
54 #include "data/dictionary.h"
55 #include "data/format.h"
56 #include "data/value.h"
57 #include "language/command.h"
58 #include "language/dictionary/split-file.h"
59 #include "language/lexer/lexer.h"
60 #include "language/lexer/value-parser.h"
61 #include "language/lexer/variable-parser.h"
62 #include "libpspp/assertion.h"
63 #include "libpspp/ll.h"
64 #include "libpspp/message.h"
65 #include "libpspp/misc.h"
66 #include "math/categoricals.h"
67 #include "output/tab.h"
70 #define _(msgid) gettext (msgid)
75 #define PRINT_EACH_STEP 0x01
76 #define PRINT_SUMMARY 0x02
77 #define PRINT_CORR 0x04
78 #define PRINT_ITER 0x08
79 #define PRINT_GOODFIT 0x10
83 #define PRINT_DEFAULT (PRINT_SUMMARY | PRINT_EACH_STEP)
86 The constant parameters of the procedure.
87 That is, those which are set by the user.
91 /* The dependent variable */
92 const struct variable *dep_var;
94 size_t n_predictor_vars;
95 const struct variable **predictor_vars;
97 /* Which classes of missing vars are to be excluded */
98 enum mv_class exclude;
100 /* The weight variable */
101 const struct variable *wv;
103 const struct dictionary *dict;
105 /* True iff the constant (intercept) is to be included in the model */
108 /* Ths maximum number of iterations */
111 /* Other iteration limiting conditions */
116 /* The confidence interval (in percent) */
119 /* What results should be presented */
125 /* The results and intermediate result of the procedure.
126 These are mutated as the procedure runs. Used for
127 temporary variables etc.
131 bool warn_bad_weight;
134 /* The two values of the dependent variable. */
139 /* The sum of caseweights */
142 casenumber n_missing;
143 casenumber n_nonmissing;
148 Convert INPUT into a dichotomous scalar. For simple cases, this is a 1:1 mapping
149 The return value is always either 0 or 1
152 map_dependent_var (const struct lr_spec *cmd, const struct lr_result *res, const union value *input)
154 int width = var_get_width (cmd->dep_var);
155 if (value_equal (input, &res->y0, width))
158 if (value_equal (input, &res->y1, width))
168 static void output_depvarmap (const struct lr_spec *cmd, const struct lr_result *);
170 static void output_variables (const struct lr_spec *cmd,
174 static void output_model_summary (const struct lr_result *,
175 double initial_likelihood, double likelihood);
177 static void case_processing_summary (const struct lr_result *);
181 Return the probability estimator (that is the estimator of logit(y) )
182 corresponding to the coefficient estimator beta_hat for case C
185 pi_hat (const struct lr_spec *cmd,
186 const gsl_vector *beta_hat,
187 const struct variable **x, size_t n_x,
188 const struct ccase *c)
192 for (v0 = 0; v0 < n_x; ++v0)
194 pi += gsl_vector_get (beta_hat, v0) *
195 case_data (c, x[v0])->f;
199 pi += gsl_vector_get (beta_hat, beta_hat->size - 1);
201 pi = 1.0 / (1.0 + exp(-pi));
208 Calculates the Hessian matrix X' V X,
209 where: X is the n by N_X matrix comprising the n cases in INPUT
210 V is a diagonal matrix { (pi_hat_0)(1 - pi_hat_0), (pi_hat_1)(1 - pi_hat_1), ... (pi_hat_{N-1})(1 - pi_hat_{N-1})}
211 (the partial derivative of the predicted values)
213 If ALL predicted values derivatives are close to zero or one, then CONVERGED
217 hessian (const struct lr_spec *cmd,
218 struct lr_result *res,
219 struct casereader *input,
220 const struct variable **x, size_t n_x,
221 const gsl_vector *beta_hat,
225 struct casereader *reader;
227 gsl_matrix *output = gsl_matrix_calloc (beta_hat->size, beta_hat->size);
229 double max_w = -DBL_MAX;
231 for (reader = casereader_clone (input);
232 (c = casereader_read (reader)) != NULL; case_unref (c))
235 double pi = pi_hat (cmd, beta_hat, x, n_x, c);
237 double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
238 double w = pi * (1 - pi);
243 for (v0 = 0; v0 < beta_hat->size; ++v0)
245 double in0 = v0 < n_x ? case_data (c, x[v0])->f : 1.0;
246 for (v1 = 0; v1 < beta_hat->size; ++v1)
248 double in1 = v1 < n_x ? case_data (c, x[v1])->f : 1.0 ;
249 double *o = gsl_matrix_ptr (output, v0, v1);
254 casereader_destroy (reader);
257 if ( max_w < cmd->min_epsilon)
260 msg (MN, _("All predicted values are either 1 or 0"));
267 /* Calculates the value X' (y - pi)
268 where X is the design model,
269 y is the vector of observed independent variables
270 pi is the vector of estimates for y
272 As a side effect, the likelihood is stored in LIKELIHOOD
275 xt_times_y_pi (const struct lr_spec *cmd,
276 struct lr_result *res,
277 struct casereader *input,
278 const struct variable **x, size_t n_x,
279 const struct variable *y_var,
280 const gsl_vector *beta_hat,
283 struct casereader *reader;
285 gsl_vector *output = gsl_vector_calloc (beta_hat->size);
288 for (reader = casereader_clone (input);
289 (c = casereader_read (reader)) != NULL; case_unref (c))
292 double pi = pi_hat (cmd, beta_hat, x, n_x, c);
293 double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
296 double y = map_dependent_var (cmd, res, case_data (c, y_var));
298 *likelihood *= pow (pi, weight * y) * pow (1 - pi, weight * (1 - y));
300 for (v0 = 0; v0 < beta_hat->size; ++v0)
302 double in0 = v0 < n_x ? case_data (c, x[v0])->f : 1.0;
303 double *o = gsl_vector_ptr (output, v0);
304 *o += in0 * (y - pi) * weight;
308 casereader_destroy (reader);
315 Makes an initial pass though the data, checks that the dependent variable is
316 dichotomous, and calculates necessary initial values.
318 Returns an initial value for \hat\beta the vector of estimators of \beta
321 beta_hat_initial (const struct lr_spec *cmd, struct lr_result *res, struct casereader *input)
323 const int width = var_get_width (cmd->dep_var);
326 struct casereader *reader;
335 size_t n_coefficients = cmd->n_predictor_vars;
339 b0 = gsl_vector_calloc (n_coefficients);
342 for (reader = casereader_clone (input);
343 (c = casereader_read (reader)) != NULL; case_unref (c))
346 bool missing = false;
347 double weight = dict_get_case_weight (cmd->dict, c, &res->warn_bad_weight);
348 const union value *depval = case_data (c, cmd->dep_var);
350 for (v = 0; v < cmd->n_predictor_vars; ++v)
352 const union value *val = case_data (c, cmd->predictor_vars[v]);
353 if (var_is_value_missing (cmd->predictor_vars[v], val, cmd->exclude))
370 value_clone (&res->y0, depval, width);
375 if ( !value_equal (&res->y0, depval, width))
377 value_clone (&res->y1, depval, width);
383 if (! value_equal (&res->y0, depval, width)
385 ! value_equal (&res->y1, depval, width)
388 msg (ME, _("Dependent variable's values are not dichotomous."));
393 if (value_equal (&res->y0, depval, width))
396 if (value_equal (&res->y1, depval, width))
402 casereader_destroy (reader);
406 /* Ensure that Y0 is less than Y1. Otherwise the mapping gets
407 inverted, which is confusing to users */
408 if (var_is_numeric (cmd->dep_var) && value_compare_3way (&res->y0, &res->y1, width) > 0)
411 value_clone (&tmp, &res->y0, width);
412 value_copy (&res->y0, &res->y1, width);
413 value_copy (&res->y1, &tmp, width);
414 value_destroy (&tmp, width);
420 double mean = sum / res->cc;
421 gsl_vector_set (b0, b0->size - 1, log (mean / (1 - mean)));
427 casereader_destroy (reader);
434 run_lr (const struct lr_spec *cmd, struct casereader *input,
435 const struct dataset *ds UNUSED)
439 gsl_vector *beta_hat;
442 bool converged = false;
444 double prev_likelihood = -1;
445 double initial_likelihood ;
447 struct lr_result work;
449 work.n_nonmissing = 0;
450 work.warn_bad_weight = true;
453 /* Get the initial estimates of \beta and their standard errors */
454 beta_hat = beta_hat_initial (cmd, &work, input);
455 if (NULL == beta_hat)
458 output_depvarmap (cmd, &work);
460 se = gsl_vector_alloc (beta_hat->size);
462 case_processing_summary (&work);
465 input = casereader_create_filter_missing (input,
467 cmd->n_predictor_vars,
473 /* Start the Newton Raphson iteration process... */
474 for( i = 0 ; i < cmd->max_iter ; ++i)
480 m = hessian (cmd, &work, input,
481 cmd->predictor_vars, cmd->n_predictor_vars,
485 gsl_linalg_cholesky_decomp (m);
486 gsl_linalg_cholesky_invert (m);
488 v = xt_times_y_pi (cmd, &work, input,
489 cmd->predictor_vars, cmd->n_predictor_vars,
496 gsl_vector *delta = gsl_vector_alloc (v->size);
497 gsl_blas_dgemv (CblasNoTrans, 1.0, m, v, 0, delta);
500 for (j = 0; j < se->size; ++j)
502 double *ptr = gsl_vector_ptr (se, j);
503 *ptr = gsl_matrix_get (m, j, j);
508 gsl_vector_add (beta_hat, delta);
510 gsl_vector_minmax (delta, &min, &max);
512 if ( fabs (min) < cmd->bcon && fabs (max) < cmd->bcon)
514 msg (MN, _("Estimation terminated at iteration number %d because parameter estimates changed by less than %g"),
519 gsl_vector_free (delta);
522 if ( prev_likelihood >= 0)
524 if (-log (likelihood) > -(1.0 - cmd->lcon) * log (prev_likelihood))
526 msg (MN, _("Estimation terminated at iteration number %d because Log Likelihood decreased by less than %g%%"), i + 1, 100 * cmd->lcon);
531 initial_likelihood = likelihood;
532 prev_likelihood = likelihood;
537 casereader_destroy (input);
539 for (i = 0; i < se->size; ++i)
541 double *ptr = gsl_vector_ptr (se, i);
545 output_model_summary (&work, initial_likelihood, likelihood);
546 output_variables (cmd, beta_hat, se);
548 gsl_vector_free (beta_hat);
549 gsl_vector_free (se);
554 /* Parse the LOGISTIC REGRESSION command syntax */
556 cmd_logistic (struct lexer *lexer, struct dataset *ds)
559 lr.dict = dataset_dict (ds);
560 lr.n_predictor_vars = 0;
561 lr.predictor_vars = NULL;
563 lr.wv = dict_get_weight (lr.dict);
567 lr.min_epsilon = 0.00000001;
571 lr.print = PRINT_DEFAULT;
574 if (lex_match_id (lexer, "VARIABLES"))
575 lex_match (lexer, T_EQUALS);
577 if (! (lr.dep_var = parse_variable_const (lexer, lr.dict)))
580 lex_force_match (lexer, T_WITH);
582 if (!parse_variables_const (lexer, lr.dict,
583 &lr.predictor_vars, &lr.n_predictor_vars,
584 PV_NO_DUPLICATE | PV_NUMERIC))
588 while (lex_token (lexer) != T_ENDCMD)
590 lex_match (lexer, T_SLASH);
592 if (lex_match_id (lexer, "MISSING"))
594 lex_match (lexer, T_EQUALS);
595 while (lex_token (lexer) != T_ENDCMD
596 && lex_token (lexer) != T_SLASH)
598 if (lex_match_id (lexer, "INCLUDE"))
600 lr.exclude = MV_SYSTEM;
602 else if (lex_match_id (lexer, "EXCLUDE"))
608 lex_error (lexer, NULL);
613 else if (lex_match_id (lexer, "ORIGIN"))
617 else if (lex_match_id (lexer, "NOORIGIN"))
621 else if (lex_match_id (lexer, "NOCONST"))
625 else if (lex_match_id (lexer, "EXTERNAL"))
627 /* This is for compatibility. It does nothing */
629 else if (lex_match_id (lexer, "PRINT"))
631 lex_match (lexer, T_EQUALS);
632 while (lex_token (lexer) != T_ENDCMD && lex_token (lexer) != T_SLASH)
634 if (lex_match_id (lexer, "DEFAULT"))
636 lr.print |= PRINT_DEFAULT;
638 else if (lex_match_id (lexer, "SUMMARY"))
640 lr.print |= PRINT_SUMMARY;
643 else if (lex_match_id (lexer, "CORR"))
645 lr.print |= PRINT_CORR;
647 else if (lex_match_id (lexer, "ITER"))
649 lr.print |= PRINT_ITER;
651 else if (lex_match_id (lexer, "GOODFIT"))
653 lr.print |= PRINT_GOODFIT;
656 else if (lex_match_id (lexer, "CI"))
658 lr.print |= PRINT_CI;
659 if (lex_force_match (lexer, T_LPAREN))
661 if (! lex_force_int (lexer))
663 lex_error (lexer, NULL);
666 lr.confidence = lex_integer (lexer);
668 if ( ! lex_force_match (lexer, T_RPAREN))
670 lex_error (lexer, NULL);
675 else if (lex_match_id (lexer, "ALL"))
681 lex_error (lexer, NULL);
686 else if (lex_match_id (lexer, "CRITERIA"))
688 lex_match (lexer, T_EQUALS);
689 while (lex_token (lexer) != T_ENDCMD && lex_token (lexer) != T_SLASH)
691 if (lex_match_id (lexer, "BCON"))
693 if (lex_force_match (lexer, T_LPAREN))
695 if (! lex_force_num (lexer))
697 lex_error (lexer, NULL);
700 lr.bcon = lex_number (lexer);
702 if ( ! lex_force_match (lexer, T_RPAREN))
704 lex_error (lexer, NULL);
709 else if (lex_match_id (lexer, "ITERATE"))
711 if (lex_force_match (lexer, T_LPAREN))
713 if (! lex_force_int (lexer))
715 lex_error (lexer, NULL);
718 lr.max_iter = lex_integer (lexer);
720 if ( ! lex_force_match (lexer, T_RPAREN))
722 lex_error (lexer, NULL);
727 else if (lex_match_id (lexer, "LCON"))
729 if (lex_force_match (lexer, T_LPAREN))
731 if (! lex_force_num (lexer))
733 lex_error (lexer, NULL);
736 lr.lcon = lex_number (lexer);
738 if ( ! lex_force_match (lexer, T_RPAREN))
740 lex_error (lexer, NULL);
745 else if (lex_match_id (lexer, "EPS"))
747 if (lex_force_match (lexer, T_LPAREN))
749 if (! lex_force_num (lexer))
751 lex_error (lexer, NULL);
754 lr.min_epsilon = lex_number (lexer);
756 if ( ! lex_force_match (lexer, T_RPAREN))
758 lex_error (lexer, NULL);
765 lex_error (lexer, NULL);
772 lex_error (lexer, NULL);
780 struct casegrouper *grouper;
781 struct casereader *group;
784 grouper = casegrouper_create_splits (proc_open (ds), lr.dict);
785 while (casegrouper_get_next_group (grouper, &group))
786 ok = run_lr (&lr, group, ds);
787 ok = casegrouper_destroy (grouper);
788 ok = proc_commit (ds) && ok;
791 free (lr.predictor_vars);
796 free (lr.predictor_vars);
803 /* Show the Dependent Variable Encoding box.
804 This indicates how the dependent variable
805 is mapped to the internal zero/one values.
808 output_depvarmap (const struct lr_spec *cmd, const struct lr_result *res)
810 const int heading_columns = 0;
811 const int heading_rows = 1;
816 int nr = heading_rows + 2;
818 t = tab_create (nc, nr);
819 tab_title (t, _("Dependent Variable Encoding"));
821 tab_headers (t, heading_columns, 0, heading_rows, 0);
823 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
825 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
826 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
828 tab_text (t, 0, 0, TAB_CENTER | TAT_TITLE, _("Original Value"));
829 tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("Internal Value"));
833 ds_init_empty (&str);
834 var_append_value_name (cmd->dep_var, &res->y0, &str);
835 tab_text (t, 0, 0 + heading_rows, 0, ds_cstr (&str));
838 var_append_value_name (cmd->dep_var, &res->y1, &str);
839 tab_text (t, 0, 1 + heading_rows, 0, ds_cstr (&str));
842 tab_double (t, 1, 0 + heading_rows, 0, map_dependent_var (cmd, res, &res->y0), &F_8_0);
843 tab_double (t, 1, 1 + heading_rows, 0, map_dependent_var (cmd, res, &res->y1), &F_8_0);
850 /* Show the Variables in the Equation box */
852 output_variables (const struct lr_spec *cmd,
853 const gsl_vector *beta,
854 const gsl_vector *se)
857 const int heading_columns = 1;
858 int heading_rows = 1;
862 int n_rows = cmd->n_predictor_vars;
866 if (cmd->print & PRINT_CI)
872 nr = heading_rows + cmd->n_predictor_vars;
876 t = tab_create (nc, nr);
877 tab_title (t, _("Variables in the Equation"));
879 tab_headers (t, heading_columns, 0, heading_rows, 0);
881 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
883 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
884 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
886 tab_text (t, 0, row + 1, TAB_CENTER | TAT_TITLE, _("Step 1"));
888 tab_text (t, 2, row, TAB_CENTER | TAT_TITLE, _("B"));
889 tab_text (t, 3, row, TAB_CENTER | TAT_TITLE, _("S.E."));
890 tab_text (t, 4, row, TAB_CENTER | TAT_TITLE, _("Wald"));
891 tab_text (t, 5, row, TAB_CENTER | TAT_TITLE, _("df"));
892 tab_text (t, 6, row, TAB_CENTER | TAT_TITLE, _("Sig."));
893 tab_text (t, 7, row, TAB_CENTER | TAT_TITLE, _("Exp(B)"));
895 if (cmd->print & PRINT_CI)
897 tab_joint_text_format (t, 8, 0, 9, 0,
898 TAB_CENTER | TAT_TITLE, _("%d%% CI for Exp(B)"), cmd->confidence);
900 tab_text (t, 8, row, TAB_CENTER | TAT_TITLE, _("Lower"));
901 tab_text (t, 9, row, TAB_CENTER | TAT_TITLE, _("Upper"));
907 for (idx = 0 ; idx < n_rows; ++idx)
909 const int r = idx + heading_rows;
911 const double b = gsl_vector_get (beta, idx);
912 const double sigma = gsl_vector_get (se, idx);
913 const double wald = pow2 (b / sigma);
916 if (idx < cmd->n_predictor_vars)
917 tab_text (t, 1, r, TAB_LEFT | TAT_TITLE,
918 var_to_string (cmd->predictor_vars[idx]));
920 tab_double (t, 2, r, 0, b, 0);
921 tab_double (t, 3, r, 0, sigma, 0);
922 tab_double (t, 4, r, 0, wald, 0);
923 tab_double (t, 5, r, 0, df, &F_8_0);
924 tab_double (t, 6, r, 0, gsl_cdf_chisq_Q (wald, df), 0);
925 tab_double (t, 7, r, 0, exp (b), 0);
927 if (cmd->print & PRINT_CI)
929 double wc = gsl_cdf_ugaussian_Pinv (0.5 + cmd->confidence / 200.0);
932 if (idx < cmd->n_predictor_vars)
934 tab_double (t, 8, r, 0, exp (b - wc), 0);
935 tab_double (t, 9, r, 0, exp (b + wc), 0);
941 tab_text (t, 1, nr - 1, TAB_LEFT | TAT_TITLE, _("Constant"));
947 /* Show the model summary box */
949 output_model_summary (const struct lr_result *res,
950 double initial_likelihood, double likelihood)
952 const int heading_columns = 0;
953 const int heading_rows = 1;
957 int nr = heading_rows + 1;
960 t = tab_create (nc, nr);
961 tab_title (t, _("Model Summary"));
963 tab_headers (t, heading_columns, 0, heading_rows, 0);
965 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
967 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
968 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
970 tab_text (t, 0, 0, TAB_LEFT | TAT_TITLE, _("Step 1"));
971 tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("-2 Log likelihood"));
972 tab_double (t, 1, 1, 0, -2 * log (likelihood), 0);
975 tab_text (t, 2, 0, TAB_CENTER | TAT_TITLE, _("Cox & Snell R Square"));
976 cox = 1.0 - pow (initial_likelihood /likelihood, 2 / res->cc);
977 tab_double (t, 2, 1, 0, cox, 0);
979 tab_text (t, 3, 0, TAB_CENTER | TAT_TITLE, _("Nagelkerke R Square"));
980 tab_double (t, 3, 1, 0, cox / ( 1.0 - pow (initial_likelihood, 2 / res->cc)), 0);
986 /* Show the case processing summary box */
988 case_processing_summary (const struct lr_result *res)
990 const int heading_columns = 1;
991 const int heading_rows = 1;
995 const int nr = heading_rows + 3;
998 t = tab_create (nc, nr);
999 tab_title (t, _("Case Processing Summary"));
1001 tab_headers (t, heading_columns, 0, heading_rows, 0);
1003 tab_box (t, TAL_2, TAL_2, -1, TAL_1, 0, 0, nc - 1, nr - 1);
1005 tab_hline (t, TAL_2, 0, nc - 1, heading_rows);
1006 tab_vline (t, TAL_2, heading_columns, 0, nr - 1);
1008 tab_text (t, 0, 0, TAB_LEFT | TAT_TITLE, _("Unweighted Cases"));
1009 tab_text (t, 1, 0, TAB_CENTER | TAT_TITLE, _("N"));
1010 tab_text (t, 2, 0, TAB_CENTER | TAT_TITLE, _("Percent"));
1013 tab_text (t, 0, 1, TAB_LEFT | TAT_TITLE, _("Included in Analysis"));
1014 tab_text (t, 0, 2, TAB_LEFT | TAT_TITLE, _("Missing Cases"));
1015 tab_text (t, 0, 3, TAB_LEFT | TAT_TITLE, _("Total"));
1017 tab_double (t, 1, 1, 0, res->n_nonmissing, &F_8_0);
1018 tab_double (t, 1, 2, 0, res->n_missing, &F_8_0);
1020 total = res->n_nonmissing + res->n_missing;
1021 tab_double (t, 1, 3, 0, total , &F_8_0);
1023 tab_double (t, 2, 1, 0, 100 * res->n_nonmissing / (double) total, 0);
1024 tab_double (t, 2, 2, 0, 100 * res->n_missing / (double) total, 0);
1025 tab_double (t, 2, 3, 0, 100 * total / (double) total, 0);