size_t *ref_cnt; /* Number of lex_tokens that refer to macro_rep. */
};
+static struct msg_point lex_token_start_point (const struct lex_source *,
+ const struct lex_token *);
+static struct msg_point lex_token_end_point (const struct lex_source *,
+ const struct lex_token *);
+
+/* Source offset of the last byte in TOKEN. */
+static size_t
+lex_token_end (const struct lex_token *token)
+{
+ return token->token_pos + MAX (token->token_len, 1) - 1;
+}
+
static void
lex_token_destroy (struct lex_token *t)
{
struct lex_source
{
struct ll ll; /* In lexer's list of sources. */
+
+ /* Reference count:
+
+ - One for struct lexer.
+
+ - One for each struct msg_location that references this source. */
+ size_t n_refs;
+
struct lex_reader *reader;
struct lexer *lexer;
struct segmenter segmenter;
static struct lex_source *lex_source_create (struct lexer *,
struct lex_reader *);
-static void lex_source_destroy (struct lex_source *);
/* Lexer. */
struct lexer
struct lex_source *source, *next;
ll_for_each_safe (source, next, struct lex_source, ll, &lexer->sources)
- lex_source_destroy (source);
+ {
+ ll_remove (&source->ll);
+ lex_source_unref (source);
+ }
macro_set_destroy (lexer->macros);
free (lexer);
}
while (src->parse_ofs == src->n_parse)
if (!lex_source_get_parse (src))
{
- lex_source_destroy (src);
+ ll_remove (&src->ll);
+ lex_source_unref (src);
src = lex_source__ (lexer);
if (src == NULL)
return;
}
static const struct lex_token *
-lex_source_next__ (const struct lex_source *src_, int n)
+lex_source_ofs__ (const struct lex_source *src_, int ofs)
{
struct lex_source *src = CONST_CAST (struct lex_source *, src_);
- if (n < 0)
+ if (ofs < 0)
{
- if (-n <= src->parse_ofs)
- return src->parse[src->parse_ofs - (-n)];
- else
- {
- static const struct lex_token endcmd_token
- = { .token = { .type = T_ENDCMD } };
- return &endcmd_token;
- }
+ static const struct lex_token endcmd_token
+ = { .token = { .type = T_ENDCMD } };
+ return &endcmd_token;
}
- while (src->n_parse - src->parse_ofs <= n)
+ while (ofs >= src->n_parse)
{
if (src->n_parse > 0)
{
lex_source_get_parse (src);
}
- return src->parse[src->parse_ofs + n];
+ return src->parse[ofs];
+}
+
+static const struct lex_token *
+lex_source_next__ (const struct lex_source *src, int n)
+{
+ return lex_source_ofs__ (src, n + src->parse_ofs);
}
/* Returns the "struct token" of the token N after the current one in LEXER.
return lex_get_location (lexer, ofs0 - ofs, ofs1 - ofs);
}
+struct msg_point
+lex_ofs_start_point (const struct lexer *lexer, int ofs)
+{
+ const struct lex_source *src = lex_source__ (lexer);
+ return (src
+ ? lex_token_start_point (src, lex_source_ofs__ (src, ofs))
+ : (struct msg_point) { 0, 0 });
+}
+
+struct msg_point lex_ofs_end_point (const struct lexer *, int ofs);
+
/* Returns the text of the syntax in tokens N0 ahead of the current one,
through N1 ahead of the current one, inclusive. (For example, if N0 and N1
are both zero, this requests the syntax for the current token.) The caller
return n > 0;
}
+/* Returns the 1-based line number of the source text at the byte OFFSET in
+ SRC. */
static int
-count_newlines (char *s, size_t length)
+lex_source_ofs_to_line_number (const struct lex_source *src, size_t offset)
{
- int n_newlines = 0;
- char *newline;
-
- while ((newline = memchr (s, '\n', length)) != NULL)
+ size_t lo = 0;
+ size_t hi = src->n_lines;
+ for (;;)
{
- n_newlines++;
- length -= (newline + 1) - s;
- s = newline + 1;
+ size_t mid = (lo + hi) / 2;
+ if (mid + 1 >= src->n_lines)
+ return src->n_lines;
+ else if (offset >= src->lines[mid + 1])
+ lo = mid;
+ else if (offset < src->lines[mid])
+ hi = mid;
+ else
+ return mid + 1;
}
+}
- return n_newlines;
+/* Returns the 1-based column number of the source text at the byte OFFSET in
+ SRC. */
+static int
+lex_source_ofs_to_column_number (const struct lex_source *src, size_t offset)
+{
+ const char *newline = memrchr (src->buffer, '\n', offset);
+ size_t line_ofs = newline ? newline - src->buffer + 1 : 0;
+ return utf8_count_columns (&src->buffer[line_ofs], offset - line_ofs) + 1;
+}
+
+static struct msg_point
+lex_source_ofs_to_point__ (const struct lex_source *src, size_t offset)
+{
+ return (struct msg_point) {
+ .line = lex_source_ofs_to_line_number (src, offset),
+ .column = lex_source_ofs_to_column_number (src, offset),
+ };
}
static int
-lex_token_get_last_line_number (const struct lex_source *src,
- const struct lex_token *token)
+lex_token_get_first_line_number (const struct lex_source *src,
+ const struct lex_token *token)
{
- size_t end = token->token_pos + token->token_len
- return lex_source_ofs_to_line_number (src,
- if (token->first_line == 0)
- return 0;
- else
- {
- char *token_str = &src->buffer[token->token_pos];
- return token->first_line + count_newlines (token_str, token->token_len) + 1;
- }
+ return lex_source_ofs_to_line_number (src, token->token_pos);
}
static int
-lex_token_get_column__ (const struct lex_source *src, size_t offset)
+lex_token_get_last_line_number (const struct lex_source *src,
+ const struct lex_token *token)
{
- const char *newline = memrchr (src->buffer, '\n', offset);
- size_t line_ofs = newline ? newline - src->buffer + 1 : 0;
- return utf8_count_columns (&src->buffer[line_ofs], offset - line_ofs) + 1;
+ return lex_source_ofs_to_line_number (src, lex_token_end (token)) + 1;
}
static int
lex_token_get_first_column (const struct lex_source *src,
const struct lex_token *token)
{
- return lex_token_get_column__ (src, token->token_pos);
+ return lex_source_ofs_to_column_number (src, token->token_pos);
+}
+
+static struct msg_point
+lex_token_start_point (const struct lex_source *src,
+ const struct lex_token *token)
+{
+ return lex_source_ofs_to_point__ (src, token->token_pos);
+}
+
+static struct msg_point
+lex_token_end_point (const struct lex_source *src,
+ const struct lex_token *token)
+{
+ return lex_source_ofs_to_point__ (src, lex_token_end (token));
}
static int
lex_token_get_last_column (const struct lex_source *src,
const struct lex_token *token)
{
- return lex_token_get_column__ (src, token->token_pos + token->token_len);
+ return lex_source_ofs_to_column_number (
+ src, token->token_pos + token->token_len);
}
static struct msg_location
const struct lex_token *t0,
const struct lex_token *t1)
{
- int first_column = lex_token_get_first_column (src, t0);
- int last_line = lex_token_get_last_line_number (src, t1) - 1;
- int last_column = lex_token_get_last_column (src, t1) - 1;
return (struct msg_location) {
.file_name = intern_new_if_nonnull (src->reader->file_name),
- .p[0] = { .line = t0->first_line, .column = first_column },
- .p[1] = { .line = last_line, .column = last_column },
+ .p[0] = lex_token_start_point (src, t0),
+ .p[1] = lex_token_end_point (src, t1),
};
}
lex_get_first_line_number (const struct lexer *lexer, int n)
{
const struct lex_source *src = lex_source__ (lexer);
- return src ? lex_source_next__ (src, n)->first_line : 0;
+ return src ? lex_token_get_first_line_number (src,
+ lex_source_next__ (src, n)) : 0;
}
/* Returns the 1-based line number of the end of the syntax that represents the
struct msg_location *loc = lex_get_lines (lexer, n0, n1);
loc->p[0].column = lex_get_first_column (lexer, n0);
loc->p[1].column = lex_get_last_column (lexer, n1) - 1;
+ loc->src = lex_source__ (lexer);
+ lex_source_ref (loc->src);
return loc;
}
{
src->length = 0;
src->journal_pos = src->seg_pos = 0;
- src->n_newlines = 0;
+ src->n_lines = 0;
src->suppress_next_newline = false;
src->segmenter = segmenter_init (segmenter_get_mode (&src->segmenter),
false);
for (; src != NULL && src->reader->error != LEX_ERROR_TERMINAL;
src = lex_source__ (lexer))
- lex_source_destroy (src);
+ {
+ ll_remove (&src->ll);
+ lex_source_unref (src);
+ }
}
}
\f
token->macro_rep = NULL;
token->ref_cnt = NULL;
token->token_pos = src->seg_pos;
- if (src->reader->line_number > 0)
- token->first_line = src->reader->line_number + src->n_newlines;
- else
- token->first_line = 0;
/* Extract a segment. */
const char *segment;
token->token_len = seg_len;
src->seg_pos += seg_len;
if (seg_type == SEG_NEWLINE)
- src->n_newlines++;
+ {
+ if (src->n_lines >= src->allocated_lines)
+ src->lines = x2nrealloc (src->lines, &src->allocated_lines,
+ sizeof *src->lines);
+ src->lines[src->n_lines++] = src->seg_pos;
+ }
/* Get a token from the segment. */
enum tokenize_result result = token_from_segment (
}
const struct lex_token *t = lex_stage_nth (&src->pp, ofs);
- size_t start = t->token_pos;
- size_t end = t->token_pos + t->token_len;
const struct macro_token mt = {
.token = t->token,
- .syntax = ss_buffer (&src->buffer[start], end - start),
+ .syntax = ss_buffer (&src->buffer[t->token_pos], t->token_len),
};
const struct msg_location loc = lex_token_location (src, t, t);
n_call = macro_call_add (mc, &mt, &loc);
.token = expansion.mts[i].token,
.token_pos = c0->token_pos,
.token_len = (c1->token_pos + c1->token_len) - c0->token_pos,
- .first_line = c0->first_line,
.macro_rep = macro_rep,
.ofs = ofs[i],
.len = len[i],
.token = out,
.token_pos = first->token_pos,
.token_len = (last->token_pos - first->token_pos) + last->token_len,
- .first_line = first->first_line,
/* This works well if all the tokens were not expanded from macros,
or if they came from the same macro expansion. It just gives up
static struct lex_source *
lex_source_create (struct lexer *lexer, struct lex_reader *reader)
{
+ size_t allocated_lines = 4;
+ size_t *lines = xmalloc (allocated_lines * sizeof *lines);
+ *lines = 0;
+
struct lex_source *src = xmalloc (sizeof *src);
*src = (struct lex_source) {
+ .n_refs = 1,
.reader = reader,
.segmenter = segmenter_init (reader->syntax, false),
.lexer = lexer,
+ .lines = lines,
+ .n_lines = 1,
+ .allocated_lines = allocated_lines,
};
lex_source_push_endcmd__ (src);
return src;
}
-static void
-lex_source_destroy (struct lex_source *src)
+void
+lex_source_ref (const struct lex_source *src_)
+{
+ struct lex_source *src = CONST_CAST (struct lex_source *, src_);
+ if (src)
+ {
+ assert (src->n_refs > 0);
+ src->n_refs++;
+ }
+}
+
+void
+lex_source_unref (struct lex_source *src)
{
+ if (!src)
+ return;
+
+ assert (src->n_refs > 0);
+ if (--src->n_refs > 0)
+ return;
+
char *file_name = src->reader->file_name;
char *encoding = src->reader->encoding;
if (src->reader->class->destroy != NULL)
free (file_name);
free (encoding);
free (src->buffer);
+ free (src->lines);
lex_stage_uninit (&src->pp);
lex_stage_uninit (&src->merge);
lex_source_clear_parse (src);
free (src->parse);
- ll_remove (&src->ll);
free (src);
}
\f
lex_string_read,
lex_string_close
};
+\f
+struct substring
+lex_source_get_line (const struct lex_source *src, int line)
+{
+ if (line < 1 || line > src->n_lines)
+ return ss_empty ();
+
+ size_t ofs = src->lines[line - 1];
+ size_t end = line >= src->n_lines ? src->length : src->lines[line];
+ return ss_buffer (&src->buffer[ofs], end - ofs);
+}