return true;
}
-static void
-macro_tokenize (struct macro *m, const struct substring body, struct lexer *lexer)
-{
- struct state
- {
- struct segmenter segmenter;
- struct substring body;
- };
-
- struct state state = {
- .segmenter = SEGMENTER_INIT (lex_get_syntax_mode (lexer)),
- .body = body,
- };
- struct state saved = state;
-
- while (state.body.length > 0)
- {
- struct macro_token mt = {
- .token = { .type = T_STOP },
- .representation = { .string = state.body.string },
- };
- struct token *token = &mt.token;
-
- struct scanner scanner;
- scanner_init (&scanner, token);
-
- for (;;)
- {
- enum segment_type type;
- int seg_len = segmenter_push (&state.segmenter, state.body.string,
- state.body.length, true, &type);
- assert (seg_len >= 0);
-
- struct substring segment = ss_head (state.body, seg_len);
- ss_advance (&state.body, seg_len);
-
- enum scan_result result = scanner_push (&scanner, type, segment, token);
- if (result == SCAN_SAVE)
- saved = state;
- else if (result == SCAN_BACK)
- {
- state = saved;
- break;
- }
- else if (result == SCAN_DONE)
- break;
- }
-
- /* We have a token in 'token'. */
- if (is_scan_type (token->type))
- {
- if (token->type != SCAN_SKIP)
- {
- /* XXX report error */
- }
- }
- else
- {
- mt.representation.length = state.body.string - mt.representation.string;
- macro_tokens_add (&m->body, &mt);
- }
- token_uninit (token);
- }
-}
-
int
cmd_define (struct lexer *lexer, struct dataset *ds UNUSED)
{
lex_get (lexer);
}
- macro_tokenize (m, body.ss, lexer);
+ macro_tokens_from_string (&m->body, body.ss, lex_get_syntax_mode (lexer));
ds_destroy (&body);
lex_define_macro (lexer, m);