X-Git-Url: https://pintos-os.org/cgi-bin/gitweb.cgi?a=blobdiff_plain;f=src%2Flanguage%2Flexer%2Flexer.c;fp=src%2Flanguage%2Flexer%2Flexer.c;h=d36e41ea8d284c6fc3bf6bfb852299f1341be7e0;hb=11658379fb66972099b33f31980c3daf6005a4df;hp=8711470fb0687314f43f4ce8377cec571678edbc;hpb=5dd9df91d08bb0283bde1b82266a29f6e5e17bb3;p=pspp diff --git a/src/language/lexer/lexer.c b/src/language/lexer/lexer.c index 8711470fb0..d36e41ea8d 100644 --- a/src/language/lexer/lexer.c +++ b/src/language/lexer/lexer.c @@ -62,15 +62,40 @@ struct lex_token /* The regular token information. */ struct token token; - /* Location of token in terms of the lex_source's buffer. + /* For a token obtained through the lexer in an ordinary way, this is the + location of the token in terms of the lex_source's buffer. + + For a token produced through macro expansion, this is the entire macro + call. + src->tail <= line_pos <= token_pos <= src->head. */ size_t token_pos; /* Start of token. */ size_t token_len; /* Length of source for token in bytes. */ size_t line_pos; /* Start of line containing token_pos. */ int first_line; /* Line number at token_pos. */ - bool from_macro; + + /* For a token obtained through macro expansion, this is just this token. */ + char *macro_rep; /* The whole macro expansion. */ + size_t ofs; /* Offset of this token in macro_rep. */ + size_t len; /* Length of this token in macro_rep. */ + size_t *ref_cnt; /* Number of lex_tokens that refer to macro_rep. */ }; +static void +lex_token_uninit (struct lex_token *t) +{ + token_uninit (&t->token); + if (t->ref_cnt) + { + assert (*t->ref_cnt > 0); + if (!--*t->ref_cnt) + { + free (t->macro_rep); + free (t->ref_cnt); + } + } +} + /* A source of tokens, corresponding to a syntax file. This is conceptually a lex_reader wrapped with everything needed to convert @@ -114,8 +139,8 @@ struct lexer }; static struct lex_source *lex_source__ (const struct lexer *); -static struct substring lex_source_get_syntax__ (const struct lex_source *, - int n0, int n1); +static char *lex_source_get_syntax__ (const struct lex_source *, + int n0, int n1); static const struct lex_token *lex_next__ (const struct lexer *, int n); static void lex_source_push_endcmd__ (struct lex_source *); @@ -216,20 +241,21 @@ lex_push_token__ (struct lex_source *src) token = &src->tokens[deque_push_front (&src->deque)]; token->token = (struct token) { .type = T_STOP }; - token->from_macro = false; + token->macro_rep = NULL; + token->ref_cnt = NULL; return token; } static void lex_source_pop__ (struct lex_source *src) { - token_uninit (&src->tokens[deque_pop_back (&src->deque)].token); + lex_token_uninit (&src->tokens[deque_pop_back (&src->deque)]); } static void lex_source_pop_front (struct lex_source *src) { - token_uninit (&src->tokens[deque_pop_front (&src->deque)].token); + lex_token_uninit (&src->tokens[deque_pop_front (&src->deque)]); } /* Advances LEXER to the next token, consuming the current token. */ @@ -957,7 +983,7 @@ lex_next_tokss (const struct lexer *lexer, int n) return lex_next (lexer, n)->string; } -struct substring +char * lex_next_representation (const struct lexer *lexer, int n0, int n1) { return lex_source_get_syntax__ (lex_source__ (lexer), n0, n1); @@ -966,7 +992,7 @@ lex_next_representation (const struct lexer *lexer, int n0, int n1) bool lex_next_is_from_macro (const struct lexer *lexer, int n) { - return lex_next__ (lexer, n)->from_macro; + return lex_next__ (lexer, n)->macro_rep != NULL; } static bool @@ -1343,23 +1369,46 @@ lex_source__ (const struct lexer *lexer) : ll_data (ll_head (&lexer->sources), struct lex_source, ll)); } -static struct substring -lex_tokens_get_syntax__ (const struct lex_source *src, - const struct lex_token *token0, - const struct lex_token *token1) +static char * +lex_source_get_syntax__ (const struct lex_source *src, int n0, int n1) { - size_t start = token0->token_pos; - size_t end = token1->token_pos + token1->token_len; + struct string s = DS_EMPTY_INITIALIZER; + for (size_t i = n0; i <= n1; ) + { + /* Find [I,J) as the longest sequence of tokens not produced by macro + expansion, or otherwise the longest sequence expanded from a single + macro call. */ + const struct lex_token *first = lex_source_next__ (src, i); + size_t j; + for (j = i + 1; j <= n1; j++) + { + const struct lex_token *cur = lex_source_next__ (src, j); + if ((first->macro_rep != NULL) != (cur->macro_rep != NULL) + || first->macro_rep != cur->macro_rep) + break; + } + const struct lex_token *last = lex_source_next__ (src, j - 1); - return ss_buffer (&src->buffer[start - src->tail], end - start); -} + if (!ds_is_empty (&s)) + ds_put_byte (&s, ' '); + if (!first->macro_rep) + { + size_t start = first->token_pos; + size_t end = last->token_pos + last->token_len; + ds_put_substring (&s, ss_buffer (&src->buffer[start - src->tail], + end - start)); + } + else + { + size_t start = first->ofs; + size_t end = last->ofs + last->len; + ds_put_substring (&s, ss_buffer (first->macro_rep + start, + end - start)); + } -static struct substring -lex_source_get_syntax__ (const struct lex_source *src, int n0, int n1) -{ - return lex_tokens_get_syntax__ (src, - lex_source_next__ (src, n0), - lex_source_next__ (src, MAX (n0, n1))); + i = j; + } + return ds_steal_cstr (&s); } static void @@ -1397,6 +1446,29 @@ lex_ellipsize__ (struct substring in, char *out, size_t out_size) strcpy (&out[out_len], out_len < in.length ? "..." : ""); } +static bool +lex_source_contains_macro_call (struct lex_source *src, int n0, int n1) +{ + for (size_t i = n0; i <= n1; i++) + if (lex_source_next__ (src, i)->macro_rep) + return true; + return false; +} + +static struct substring +lex_source_get_macro_call (struct lex_source *src, int n0, int n1) +{ + if (!lex_source_contains_macro_call (src, n0, n1)) + return ss_empty (); + + const struct lex_token *token0 = lex_source_next__ (src, n0); + const struct lex_token *token1 = lex_source_next__ (src, MAX (n0, n1)); + size_t start = token0->token_pos; + size_t end = token1->token_pos + token1->token_len; + + return ss_buffer (&src->buffer[start - src->tail], end - start); +} + static void lex_source_error_valist (struct lex_source *src, int n0, int n1, const char *format, va_list args) @@ -1409,26 +1481,32 @@ lex_source_error_valist (struct lex_source *src, int n0, int n1, token = lex_source_next__ (src, n0); if (token->token.type == T_ENDCMD) ds_put_cstr (&s, _("Syntax error at end of command")); - else if (token->from_macro) - { - /* XXX this isn't ideal, we should get the actual syntax */ - char *syntax = token_to_string (&token->token); - if (syntax) - ds_put_format (&s, _("Syntax error at `%s'"), syntax); - else - ds_put_cstr (&s, _("Syntax error")); - free (syntax); - } else { - struct substring syntax = lex_source_get_syntax__ (src, n0, n1); - if (!ss_is_empty (syntax)) - { - char syntax_cstr[64]; + /* Get the syntax that caused the error. */ + char *syntax = lex_source_get_syntax__ (src, n0, n1); + char syntax_cstr[64]; + lex_ellipsize__ (ss_cstr (syntax), syntax_cstr, sizeof syntax_cstr); + free (syntax); + + /* Get the macro call(s) that expanded to the syntax that caused the + error. */ + char call_cstr[64]; + struct substring call = lex_source_get_macro_call (src, n0, n1); + lex_ellipsize__ (call, call_cstr, sizeof call_cstr); - lex_ellipsize__ (syntax, syntax_cstr, sizeof syntax_cstr); - ds_put_format (&s, _("Syntax error at `%s'"), syntax_cstr); + if (syntax_cstr[0]) + { + if (call_cstr[0]) + ds_put_format (&s, _("Syntax error at `%s' " + "(in expansion of `%s')"), + syntax_cstr, call_cstr); + else + ds_put_format (&s, _("Syntax error at `%s'"), syntax_cstr); } + else if (call_cstr[0]) + ds_put_format (&s, _("Syntax error in syntax expanded from `%s'"), + call_cstr); else ds_put_cstr (&s, _("Syntax error")); } @@ -1700,9 +1778,12 @@ lex_source_get (const struct lex_source *src_) } const struct lex_token *front = lex_source_front (src); + size_t start = front->token_pos; + size_t end = front->token_pos + front->token_len; const struct macro_token mt = { .token = front->token, - .representation = lex_tokens_get_syntax__ (src, front, front) + .representation = ss_buffer (&src->buffer[start - src->tail], + end - start), }; retval = macro_expander_add (me, &mt); } @@ -1715,6 +1796,12 @@ lex_source_get (const struct lex_source *src_) } /* XXX handle case where the macro invocation doesn't use all the tokens */ + const struct lex_token *call_first = lex_source_next__ (src, old_count); + const struct lex_token *call_last = lex_source_front (src); + size_t call_pos = call_first->token_pos; + size_t call_len = (call_last->token_pos + call_last->token_len) - call_pos; + size_t line_pos = call_first->line_pos; + int first_line = call_first->first_line; while (deque_count (&src->deque) > old_count) lex_source_pop_front (src); @@ -1722,26 +1809,37 @@ lex_source_get (const struct lex_source *src_) macro_expander_get_expansion (me, &expansion); macro_expander_destroy (me); + size_t *ofs = xnmalloc (expansion.n, sizeof *ofs); + size_t *len = xnmalloc (expansion.n, sizeof *len); + struct string s = DS_EMPTY_INITIALIZER; + macro_tokens_to_representation (&expansion, &s, ofs, len); + if (settings_get_mprint ()) - { - struct string mprint = DS_EMPTY_INITIALIZER; - macro_tokens_to_representation (&expansion, &mprint); - output_item_submit (text_item_create (TEXT_ITEM_LOG, ds_cstr (&mprint), - _("Macro Expansion"))); - ds_destroy (&mprint); - } + output_item_submit (text_item_create (TEXT_ITEM_LOG, ds_cstr (&s), + _("Macro Expansion"))); + char *macro_rep = ds_steal_cstr (&s); + size_t *ref_cnt = xmalloc (sizeof *ref_cnt); + *ref_cnt = expansion.n; for (size_t i = 0; i < expansion.n; i++) { *lex_push_token__ (src) = (struct lex_token) { .token = expansion.mts[i].token, - .from_macro = true, - /* XXX the rest */ + .token_pos = call_pos, + .token_len = call_len, + .line_pos = line_pos, + .first_line = first_line, + .macro_rep = macro_rep, + .ofs = ofs[i], + .len = len[i], + .ref_cnt = ref_cnt, }; - ss_dealloc (&expansion.mts[i].representation); /* XXX should feed into lexer */ + ss_dealloc (&expansion.mts[i].representation); } free (expansion.mts); + free (ofs); + free (len); return true; }