segment: Allow '.' as separate token with a line, for macros.
authorBen Pfaff <blp@cs.stanford.edu>
Tue, 13 Apr 2021 17:35:02 +0000 (10:35 -0700)
committerBen Pfaff <blp@cs.stanford.edu>
Tue, 13 Apr 2021 17:35:09 +0000 (10:35 -0700)
The description of macros just says that ^ and \ can't be used as macro
delimiters, so . is probably OK.

src/language/lexer/lexer.c
src/language/lexer/scan.c
src/language/lexer/scan.h
src/language/lexer/segment.c
src/language/lexer/segment.h
tests/language/lexer/lexer.at
tests/language/lexer/scan.at

index 5044731348ecce0c6a7573dfaabd28b9b3709d67..baebda3f6d89244cdba9eea628b764d65b2aae57 100644 (file)
@@ -1607,10 +1607,6 @@ lex_source_get__ (const struct lex_source *src_)
                      token->token.string.string);
       break;
 
-    case SCAN_UNEXPECTED_DOT:
-      lex_get_error (src, _("Unexpected `.' in middle of command"));
-      break;
-
     case SCAN_UNEXPECTED_CHAR:
       {
         char c_name[16];
index 2ad467d978bde55ae2a7dc2330b75b831cb3851c..86ebb7d00675cd6c89d223d49b1924cb278473e9 100644 (file)
@@ -506,10 +506,6 @@ scan_start__ (struct scanner *scanner, enum segment_type type,
       ss_alloc_substring (&token->string, s);
       return SCAN_DONE;
 
-    case SEG_UNEXPECTED_DOT:
-      token->type = SCAN_UNEXPECTED_DOT;
-      return SCAN_DONE;
-
     case SEG_UNEXPECTED_CHAR:
       return scan_unexpected_char (&s, token);
     }
index 4327e9bb0bf02f38d79b8e970d07e2ded25187a1..866321b0c84c5c61e8dec03b29fc3901554db3b6 100644 (file)
@@ -45,7 +45,6 @@ struct token;
                                                 \
     SCAN_TYPE(EXPECTED_QUOTE)                   \
     SCAN_TYPE(EXPECTED_EXPONENT)                \
-    SCAN_TYPE(UNEXPECTED_DOT)                   \
     SCAN_TYPE(UNEXPECTED_CHAR)                  \
                                                 \
     SCAN_TYPE(SKIP)
index d2aa391748f024ed32aeb51364e8cda1442dae60..62e9970bb1cff0a6709278303230b1f1dbbca0cd 100644 (file)
@@ -676,7 +676,6 @@ next_id_in_command (const struct segmenter *s, const char *input, size_t n,
         case SEG_END:
         case SEG_EXPECTED_QUOTE:
         case SEG_EXPECTED_EXPONENT:
-        case SEG_UNEXPECTED_DOT:
         case SEG_UNEXPECTED_CHAR:
           id[0] = '\0';
           return ofs + retval;
@@ -981,7 +980,7 @@ segmenter_parse_mid_command__ (struct segmenter *s,
           s->substate = SS_START_OF_COMMAND;
         }
       else
-        *type = SEG_UNEXPECTED_DOT;
+        *type = SEG_PUNCT;
       return 1;
 
     case '0': case '1': case '2': case '3': case '4':
index bb788f42306ad0ea8dbf3c99a468fda459381da4..02a269bdd2779b53a0f0bddd00e2641ddaf184b9 100644 (file)
@@ -91,7 +91,6 @@ enum segmenter_mode
                                                 \
     SEG_TYPE(EXPECTED_QUOTE)                    \
     SEG_TYPE(EXPECTED_EXPONENT)                 \
-    SEG_TYPE(UNEXPECTED_DOT)                    \
     SEG_TYPE(UNEXPECTED_CHAR)
 
 /* Types of segments. */
index 8438bfb26db62c026842980d0f93bd4dac847ecb..c572e5fd864ad8ac042aaf0c983f3b3bd425e066 100644 (file)
@@ -68,9 +68,7 @@ lexer.sps:8.1-8.70: error: Syntax error at `'very long unterminated string that
 
 lexer.sps:9.1-9.2: error: Syntax error at `1e': Missing exponent following `1e'.
 
-lexer.sps:9.4: error: Syntax error at `.': Unexpected `.' in middle of command.
-
-lexer.sps:9: error: Unknown command `x'.
+lexer.sps:9.4: error: Syntax error at `.': expecting command name.
 
 lexer.sps:10.1: error: Syntax error at `^': Bad character `^' in input.
 
@@ -89,8 +87,6 @@ lexer.sps:1: error: Unknown command `datA dist'.
 
 lexer.sps:2: error: LIST: LIST is allowed only after the active dataset has been defined.
 
-lexer.sps:2.5: error: LIST: Syntax error at `.': Unexpected `.' in middle of command.
-
 lexer.sps:2.6: error: LIST: Syntax error at `...': Bad character U+0000 in input.
 ])
 AT_CLEANUP
index 6df0caae9e3f99d029e0fed43868cbe5d62b967e..d263352d88f686adcfe9641feab967cf59c3c999 100644 (file)
@@ -49,7 +49,7 @@ MACRO_ID "!abcd"
 SKIP
 ID "#.#"
 SKIP
-UNEXPECTED_DOT
+MACRO_PUNCT "."
 ID "x"
 SKIP
 MACRO_PUNCT "_"