+static void
+macro_tokenize (struct macro *m, struct lexer *lexer)
+{
+ struct state
+ {
+ struct segmenter segmenter;
+ struct substring body;
+ };
+
+ struct state state = {
+ .segmenter = SEGMENTER_INIT (lex_get_syntax_mode (lexer)),
+ .body = m->body,
+ };
+ struct state saved = state;
+
+ struct token token = { .type = T_STOP };
+
+ while (state.body.length > 0)
+ {
+ struct scanner scanner;
+ scanner_init (&scanner, &token);
+
+ for (;;)
+ {
+ enum segment_type type;
+ int seg_len = segmenter_push (&state.segmenter, state.body.string,
+ state.body.length, true, &type);
+ assert (seg_len >= 0);
+
+ struct substring segment = ss_head (state.body, seg_len);
+ ss_advance (&state.body, seg_len);
+
+ enum scan_result result = scanner_push (&scanner, type, segment, &token);
+ if (result == SCAN_SAVE)
+ saved = state;
+ else if (result == SCAN_BACK)
+ {
+ state = saved;
+ break;
+ }
+ else if (result == SCAN_DONE)
+ break;
+ }
+
+ /* We have a token in 'token'. */
+ if (is_scan_type (token.type))
+ {
+ if (token.type != SCAN_SKIP)
+ {
+ /* XXX report error */
+ }
+ }
+ else
+ tokens_add (&m->body_tokens, &token);
+ token_destroy (&token);
+ }
+}
+