lex_match (lexer, T_EQUALS);
int vars_start = lex_ofs (lexer);
- if (!parse_variables_const (lexer, dict,
- &v1, &n_v1,
- PV_NO_DUPLICATE | PV_NUMERIC))
+ if (!parse_variables_const (lexer, dict, &v1, &n_v1,
+ PV_DUPLICATE | PV_NUMERIC))
goto exit;
if (lex_match (lexer, T_WITH))
{
with = true;
- if (!parse_variables_const (lexer, dict,
- &v2, &n_v2,
- PV_NO_DUPLICATE | PV_NUMERIC))
+ if (!parse_variables_const (lexer, dict, &v2, &n_v2,
+ PV_DUPLICATE | PV_NUMERIC))
goto exit;
int vars_end = lex_ofs (lexer) - 1;
AT_CLEANUP
+AT_SETUP([T-TEST /PAIRS with duplicate variables in list])
+
+AT_DATA([ref.sps], [dnl
+data list list /id * a * b * c * d *.
+begin data.
+1 2.0 3.0 4.0 4.0
+2 1.0 2.0 5.1 3.9
+3 2.0 4.5 5.2 3.8
+4 2.0 4.5 5.3 3.7
+56 3.0 6.0 5.9 3.6
+end data.
+
+t-test /PAIRS a c a with b d c (PAIRED).
+])
+
+AT_DATA([expout], [dnl
+Table: Reading free-form data from INLINE.
+Variable,Format
+id,F8.0
+a,F8.0
+b,F8.0
+c,F8.0
+d,F8.0
+
+Table: Paired Sample Statistics
+,,N,Mean,Std. Deviation,S.E. Mean
+Pair 1,a,5,2.00,.71,.32
+,b,5,4.00,1.54,.69
+Pair 2,c,5,5.10,.69,.31
+,d,5,3.80,.16,.07
+Pair 3,a,5,2.00,.71,.32
+,c,5,5.10,.69,.31
+
+Table: Paired Samples Correlations
+,,N,Correlation,Sig.
+Pair 1,a & b,5,.918,.028
+Pair 2,c & d,5,-.918,.028
+Pair 3,a & c,5,.410,.493
+
+Table: Paired Samples Test
+,,Paired Differences,,,,,t,df,Sig. (2-tailed)
+,,Mean,Std. Deviation,S.E. Mean,95% Confidence Interval of the Difference,,,,
+,,,,,Lower,Upper,,,
+Pair 1,a - b,-2.00,.94,.42,-3.16,-.84,-4.78,4,.009
+Pair 2,c - d,1.30,.84,.37,.26,2.34,3.47,4,.025
+Pair 3,a - c,-3.10,.76,.34,-4.04,-2.16,-9.14,4,.001
+])
+
+AT_CHECK([pspp -o ref.csv ref.sps])
+AT_CHECK([cat ref.csv], [0], [expout])
+AT_CLEANUP
+
AT_SETUP([T-TEST /PAIRS with per-analysis missing values])
AT_DATA([ref.sps], [dnl