macro_token_copy (struct macro_token *dst, const struct macro_token *src)
{
token_copy (&dst->token, &src->token);
- ss_alloc_substring (&dst->syntax, src->syntax);
+ dst->syntax = ss_clone (src->syntax);
}
void
macro_token_copy (macro_tokens_add_uninit (mts), mt);
}
-/* Tokenizes SRC according to MODE and appends the tokens to MTS. Uses STACK,
- if nonull, for error reporting. */
+/* Tokenizes SRC according to MODE and appends the tokens to MTS, using STACK
+ for error reporting. */
static void
-macro_tokens_from_string__ (struct macro_tokens *mts, const struct substring src,
- enum segmenter_mode mode,
- const struct macro_expansion_stack *stack)
+macro_tokens_from_string (struct macro_tokens *mts, const struct substring src,
+ enum segmenter_mode mode,
+ const struct macro_expansion_stack *stack)
{
struct segmenter segmenter = segmenter_init (mode, true);
struct substring body = src;
while (body.length > 0)
{
- struct macro_token mt = {
- .token = { .type = T_STOP },
- .syntax = { .string = body.string },
- };
- struct token *token = &mt.token;
-
enum segment_type type;
int seg_len = segmenter_push (&segmenter, body.string,
body.length, true, &type);
assert (seg_len >= 0);
- struct substring segment = ss_head (body, seg_len);
- enum tokenize_result result = token_from_segment (type, segment, token);
+ struct macro_token mt = {
+ .token = { .type = T_STOP },
+ .syntax = ss_head (body, seg_len),
+ };
+ enum tokenize_result result
+ = token_from_segment (type, mt.syntax, &mt.token);
ss_advance (&body, seg_len);
switch (result)
break;
case TOKENIZE_TOKEN:
- mt.syntax.length = body.string - mt.syntax.string;
macro_tokens_add (mts, &mt);
break;
case TOKENIZE_ERROR:
- mt.syntax.length = body.string - mt.syntax.string;
- macro_error (stack, &mt, "%s", token->string.string);
+ macro_error (stack, &mt, "%s", mt.token.string.string);
break;
}
- token_uninit (token);
+ token_uninit (&mt.token);
}
}
-/* Tokenizes SRC according to MODE and appends the tokens to MTS. */
-void
-macro_tokens_from_string (struct macro_tokens *mts, const struct substring src,
- enum segmenter_mode mode)
-{
- macro_tokens_from_string__ (mts, src, mode, NULL);
-}
-
void
macro_tokens_print (const struct macro_tokens *mts, FILE *stream)
{
case T_RPAREN:
case T_LBRACK:
case T_RBRACK:
+ case T_LCURLY:
+ case T_RCURLY:
return TC_PUNCT;
case T_PLUS:
case T_ASTERISK:
case T_SLASH:
case T_EQUALS:
+ case T_COLON:
case T_AND:
case T_OR:
case T_NOT:
return TC_BINOP;
case T_COMMA:
+ case T_SEMICOLON:
return TC_COMMA;
}
/* If TOKEN is the first token of a call to a macro in MACROS, create a new
macro expander, initializes *MCP to it. Returns 0 if more tokens are needed
and should be added via macro_call_add() or 1 if the caller should next call
- macro_call_get_expansion().
+ macro_call_expand().
If TOKEN is not the first token of a macro call, returns -1 and sets *MCP to
NULL. */
Returns a positive number to indicate that the returned number of tokens
invoke a macro. The number returned might be less than the number of tokens
added because it can take a few tokens of lookahead to determine whether the
- macro invocation is finished. The caller should call
- macro_call_get_expansion() to obtain the expansion. */
+ macro invocation is finished. The caller should call macro_call_expand() to
+ obtain the expansion. */
int
macro_call_add (struct macro_call *mc, const struct macro_token *mt,
const struct msg_location *loc)
struct string_array args = STRING_ARRAY_INITIALIZER;
size_t n_consumed = parse_function_args (me, input, n_input, mf->name, &args);
if (!n_consumed)
- return 0;
+ {
+ string_array_destroy (&args);
+ return 0;
+ }
if (args.n < mf->min_args || args.n > mf->max_args)
{
mf->name);
else
NOT_REACHED ();
+ string_array_destroy (&args);
return 0;
}
me->segmenter_mode, &tmp);
struct macro_tokens mts = { .n = 0 };
- macro_tokens_from_string__ (&mts, ss_cstr (s), me->segmenter_mode,
- me->stack);
+ macro_tokens_from_string (&mts, ss_cstr (s), me->segmenter_mode,
+ me->stack);
if (mts.n > 0)
ds_put_substring (output, mts.mts[0].syntax);
macro_tokens_uninit (&mts);
me->segmenter_mode, &tmp);
struct macro_tokens mts = { .n = 0 };
- macro_tokens_from_string__ (&mts, ss_cstr (s), me->segmenter_mode,
- me->stack);
+ macro_tokens_from_string (&mts, ss_cstr (s), me->segmenter_mode,
+ me->stack);
if (mts.n > 1)
{
struct macro_tokens tail = { .mts = mts.mts + 1, .n = mts.n - 1 };
case MF_EVAL:
{
struct macro_tokens mts = { .n = 0 };
- macro_tokens_from_string__ (&mts, ss_cstr (args.strings[0]),
- me->segmenter_mode, me->stack);
+ macro_tokens_from_string (&mts, ss_cstr (args.strings[0]),
+ me->segmenter_mode, me->stack);
struct macro_tokens exp = { .n = 0 };
struct macro_expansion_stack stack = {
.name = "!EVAL",
return false;
struct macro_tokens mts = { .n = 0 };
- macro_tokens_from_string__ (&mts, ss_cstr (s), me->segmenter_mode, me->stack);
+ macro_tokens_from_string (&mts, ss_cstr (s), me->segmenter_mode, me->stack);
if (mts.n != 1 || !token_is_number (&mts.mts[0].token))
{
macro_error (me->stack, mts.n > 0 ? &mts.mts[0] : NULL,
return 0;
struct macro_tokens items = { .n = 0 };
- macro_tokens_from_string__ (&items, ss_cstr (list), me->segmenter_mode,
- me->stack);
+ macro_tokens_from_string (&items, ss_cstr (list), me->segmenter_mode,
+ me->stack);
free (list);
const struct macro_token *do_end = find_doend (subme.stack, p, end);
macro_expand (p, do_end - p, &subme, exp);
}
+ macro_tokens_uninit (&items);
return do_end - tokens + 1;
}
else if (p < end && p->token.type == T_EQUALS)
token->string.length);
if (var)
{
- macro_tokens_from_string__ (exp, ss_cstr (var),
- me->segmenter_mode, me->stack);
+ macro_tokens_from_string (exp, ss_cstr (var),
+ me->segmenter_mode, me->stack);
return true;
}
size_t n_function = expand_macro_function (me, mts, n, &function_output);
if (n_function)
{
- macro_tokens_from_string__ (exp, function_output.ss,
- me->segmenter_mode, me->stack);
+ macro_tokens_from_string (exp, function_output.ss,
+ me->segmenter_mode, me->stack);
ds_destroy (&function_output);
return n_function;