/* The regular token information. */
struct token token;
- /* Location of token in terms of the lex_source's buffer.
+ /* For a token obtained through the lexer in an ordinary way, this is the
+ location of the token in terms of the lex_source's buffer.
+
+ For a token produced through macro expansion, this is the entire macro
+ call.
+
src->tail <= line_pos <= token_pos <= src->head. */
size_t token_pos; /* Start of token. */
size_t token_len; /* Length of source for token in bytes. */
size_t line_pos; /* Start of line containing token_pos. */
int first_line; /* Line number at token_pos. */
- bool from_macro;
+
+ /* For a token obtained through macro expansion, this is just this token. */
+ char *macro_rep; /* The whole macro expansion. */
+ size_t ofs; /* Offset of this token in macro_rep. */
+ size_t len; /* Length of this token in macro_rep. */
+ size_t *ref_cnt; /* Number of lex_tokens that refer to macro_rep. */
};
+static void
+lex_token_uninit (struct lex_token *t)
+{
+ token_uninit (&t->token);
+ if (t->ref_cnt)
+ {
+ assert (*t->ref_cnt > 0);
+ if (!--*t->ref_cnt)
+ {
+ free (t->macro_rep);
+ free (t->ref_cnt);
+ }
+ }
+}
+
/* A source of tokens, corresponding to a syntax file.
This is conceptually a lex_reader wrapped with everything needed to convert
};
static struct lex_source *lex_source__ (const struct lexer *);
-static struct substring lex_source_get_syntax__ (const struct lex_source *,
- int n0, int n1);
+static char *lex_source_get_syntax__ (const struct lex_source *,
+ int n0, int n1);
static const struct lex_token *lex_next__ (const struct lexer *, int n);
static void lex_source_push_endcmd__ (struct lex_source *);
token = &src->tokens[deque_push_front (&src->deque)];
token->token = (struct token) { .type = T_STOP };
- token->from_macro = false;
+ token->macro_rep = NULL;
+ token->ref_cnt = NULL;
return token;
}
static void
lex_source_pop__ (struct lex_source *src)
{
- token_uninit (&src->tokens[deque_pop_back (&src->deque)].token);
+ lex_token_uninit (&src->tokens[deque_pop_back (&src->deque)]);
}
static void
lex_source_pop_front (struct lex_source *src)
{
- token_uninit (&src->tokens[deque_pop_front (&src->deque)].token);
+ lex_token_uninit (&src->tokens[deque_pop_front (&src->deque)]);
}
/* Advances LEXER to the next token, consuming the current token. */
return lex_next (lexer, n)->string;
}
-struct substring
+char *
lex_next_representation (const struct lexer *lexer, int n0, int n1)
{
return lex_source_get_syntax__ (lex_source__ (lexer), n0, n1);
bool
lex_next_is_from_macro (const struct lexer *lexer, int n)
{
- return lex_next__ (lexer, n)->from_macro;
+ return lex_next__ (lexer, n)->macro_rep != NULL;
}
static bool
: ll_data (ll_head (&lexer->sources), struct lex_source, ll));
}
-static struct substring
-lex_tokens_get_syntax__ (const struct lex_source *src,
- const struct lex_token *token0,
- const struct lex_token *token1)
+static char *
+lex_source_get_syntax__ (const struct lex_source *src, int n0, int n1)
{
- size_t start = token0->token_pos;
- size_t end = token1->token_pos + token1->token_len;
+ struct string s = DS_EMPTY_INITIALIZER;
+ for (size_t i = n0; i <= n1; )
+ {
+ /* Find [I,J) as the longest sequence of tokens not produced by macro
+ expansion, or otherwise the longest sequence expanded from a single
+ macro call. */
+ const struct lex_token *first = lex_source_next__ (src, i);
+ size_t j;
+ for (j = i + 1; j <= n1; j++)
+ {
+ const struct lex_token *cur = lex_source_next__ (src, j);
+ if ((first->macro_rep != NULL) != (cur->macro_rep != NULL)
+ || first->macro_rep != cur->macro_rep)
+ break;
+ }
+ const struct lex_token *last = lex_source_next__ (src, j - 1);
- return ss_buffer (&src->buffer[start - src->tail], end - start);
-}
+ if (!ds_is_empty (&s))
+ ds_put_byte (&s, ' ');
+ if (!first->macro_rep)
+ {
+ size_t start = first->token_pos;
+ size_t end = last->token_pos + last->token_len;
+ ds_put_substring (&s, ss_buffer (&src->buffer[start - src->tail],
+ end - start));
+ }
+ else
+ {
+ size_t start = first->ofs;
+ size_t end = last->ofs + last->len;
+ ds_put_substring (&s, ss_buffer (first->macro_rep + start,
+ end - start));
+ }
-static struct substring
-lex_source_get_syntax__ (const struct lex_source *src, int n0, int n1)
-{
- return lex_tokens_get_syntax__ (src,
- lex_source_next__ (src, n0),
- lex_source_next__ (src, MAX (n0, n1)));
+ i = j;
+ }
+ return ds_steal_cstr (&s);
}
static void
strcpy (&out[out_len], out_len < in.length ? "..." : "");
}
+static bool
+lex_source_contains_macro_call (struct lex_source *src, int n0, int n1)
+{
+ for (size_t i = n0; i <= n1; i++)
+ if (lex_source_next__ (src, i)->macro_rep)
+ return true;
+ return false;
+}
+
+static struct substring
+lex_source_get_macro_call (struct lex_source *src, int n0, int n1)
+{
+ if (!lex_source_contains_macro_call (src, n0, n1))
+ return ss_empty ();
+
+ const struct lex_token *token0 = lex_source_next__ (src, n0);
+ const struct lex_token *token1 = lex_source_next__ (src, MAX (n0, n1));
+ size_t start = token0->token_pos;
+ size_t end = token1->token_pos + token1->token_len;
+
+ return ss_buffer (&src->buffer[start - src->tail], end - start);
+}
+
static void
lex_source_error_valist (struct lex_source *src, int n0, int n1,
const char *format, va_list args)
token = lex_source_next__ (src, n0);
if (token->token.type == T_ENDCMD)
ds_put_cstr (&s, _("Syntax error at end of command"));
- else if (token->from_macro)
- {
- /* XXX this isn't ideal, we should get the actual syntax */
- char *syntax = token_to_string (&token->token);
- if (syntax)
- ds_put_format (&s, _("Syntax error at `%s'"), syntax);
- else
- ds_put_cstr (&s, _("Syntax error"));
- free (syntax);
- }
else
{
- struct substring syntax = lex_source_get_syntax__ (src, n0, n1);
- if (!ss_is_empty (syntax))
- {
- char syntax_cstr[64];
+ /* Get the syntax that caused the error. */
+ char *syntax = lex_source_get_syntax__ (src, n0, n1);
+ char syntax_cstr[64];
+ lex_ellipsize__ (ss_cstr (syntax), syntax_cstr, sizeof syntax_cstr);
+ free (syntax);
+
+ /* Get the macro call(s) that expanded to the syntax that caused the
+ error. */
+ char call_cstr[64];
+ struct substring call = lex_source_get_macro_call (src, n0, n1);
+ lex_ellipsize__ (call, call_cstr, sizeof call_cstr);
- lex_ellipsize__ (syntax, syntax_cstr, sizeof syntax_cstr);
- ds_put_format (&s, _("Syntax error at `%s'"), syntax_cstr);
+ if (syntax_cstr[0])
+ {
+ if (call_cstr[0])
+ ds_put_format (&s, _("Syntax error at `%s' "
+ "(in expansion of `%s')"),
+ syntax_cstr, call_cstr);
+ else
+ ds_put_format (&s, _("Syntax error at `%s'"), syntax_cstr);
}
+ else if (call_cstr[0])
+ ds_put_format (&s, _("Syntax error in syntax expanded from `%s'"),
+ call_cstr);
else
ds_put_cstr (&s, _("Syntax error"));
}
}
const struct lex_token *front = lex_source_front (src);
+ size_t start = front->token_pos;
+ size_t end = front->token_pos + front->token_len;
const struct macro_token mt = {
.token = front->token,
- .representation = lex_tokens_get_syntax__ (src, front, front)
+ .representation = ss_buffer (&src->buffer[start - src->tail],
+ end - start),
};
retval = macro_expander_add (me, &mt);
}
}
/* XXX handle case where the macro invocation doesn't use all the tokens */
+ const struct lex_token *call_first = lex_source_next__ (src, old_count);
+ const struct lex_token *call_last = lex_source_front (src);
+ size_t call_pos = call_first->token_pos;
+ size_t call_len = (call_last->token_pos + call_last->token_len) - call_pos;
+ size_t line_pos = call_first->line_pos;
+ int first_line = call_first->first_line;
while (deque_count (&src->deque) > old_count)
lex_source_pop_front (src);
macro_expander_get_expansion (me, &expansion);
macro_expander_destroy (me);
+ size_t *ofs = xnmalloc (expansion.n, sizeof *ofs);
+ size_t *len = xnmalloc (expansion.n, sizeof *len);
+ struct string s = DS_EMPTY_INITIALIZER;
+ macro_tokens_to_representation (&expansion, &s, ofs, len);
+
if (settings_get_mprint ())
- {
- struct string mprint = DS_EMPTY_INITIALIZER;
- macro_tokens_to_representation (&expansion, &mprint);
- output_item_submit (text_item_create (TEXT_ITEM_LOG, ds_cstr (&mprint),
- _("Macro Expansion")));
- ds_destroy (&mprint);
- }
+ output_item_submit (text_item_create (TEXT_ITEM_LOG, ds_cstr (&s),
+ _("Macro Expansion")));
+ char *macro_rep = ds_steal_cstr (&s);
+ size_t *ref_cnt = xmalloc (sizeof *ref_cnt);
+ *ref_cnt = expansion.n;
for (size_t i = 0; i < expansion.n; i++)
{
*lex_push_token__ (src) = (struct lex_token) {
.token = expansion.mts[i].token,
- .from_macro = true,
- /* XXX the rest */
+ .token_pos = call_pos,
+ .token_len = call_len,
+ .line_pos = line_pos,
+ .first_line = first_line,
+ .macro_rep = macro_rep,
+ .ofs = ofs[i],
+ .len = len[i],
+ .ref_cnt = ref_cnt,
};
- ss_dealloc (&expansion.mts[i].representation); /* XXX should feed into lexer */
+ ss_dealloc (&expansion.mts[i].representation);
}
free (expansion.mts);
+ free (ofs);
+ free (len);
return true;
}
}
void
-macro_tokens_to_representation (struct macro_tokens *mts, struct string *s)
+macro_tokens_to_representation (struct macro_tokens *mts, struct string *s,
+ size_t *ofs, size_t *len)
{
+ assert ((ofs != NULL) == (len != NULL));
+
if (!mts->n)
return;
- macro_token_to_representation (&mts->mts[0], s);
- for (size_t i = 1; i < mts->n; i++)
+ for (size_t i = 0; i < mts->n; i++)
{
- enum token_type prev = mts->mts[i - 1].token.type;
- enum token_type next = mts->mts[i].token.type;
-
- if (prev == T_ENDCMD)
- ds_put_byte (s, '\n');
- else
+ if (i > 0)
{
- enum token_class pc = classify_token (prev);
- enum token_class nc = classify_token (next);
- if (needs_space (pc, nc))
- ds_put_byte (s, ' ');
+ enum token_type prev = mts->mts[i - 1].token.type;
+ enum token_type next = mts->mts[i].token.type;
+
+ if (prev == T_ENDCMD)
+ ds_put_byte (s, '\n');
+ else
+ {
+ enum token_class pc = classify_token (prev);
+ enum token_class nc = classify_token (next);
+ if (needs_space (pc, nc))
+ ds_put_byte (s, ' ');
+ }
}
+ if (ofs)
+ ofs[i] = s->ss.length;
macro_token_to_representation (&mts->mts[i], s);
+ if (len)
+ len[i] = s->ss.length - ofs[i];
}
}
if (mts.n > 1)
{
struct macro_tokens tail = { .mts = mts.mts + 1, .n = mts.n - 1 };
- macro_tokens_to_representation (&tail, output);
+ macro_tokens_to_representation (&tail, output, NULL, NULL);
}
macro_tokens_uninit (&mts);
ds_destroy (&tmp);
struct macro_tokens exp = { .n = 0 };
macro_expand (&mts, ctx->nesting_countdown - 1, ctx->macros, ctx->me,
ctx->vars, ctx->expand, NULL, &exp);
- macro_tokens_to_representation (&exp, output);
+ macro_tokens_to_representation (&exp, output, NULL, NULL);
macro_tokens_uninit (&exp);
macro_tokens_uninit (&mts);
}