+ size_t max_tail = lex_source_max_tail__ (src);
+ if (max_tail > src->tail)
+ {
+ /* Advance the tail, freeing up room at the head. */
+ memmove (src->buffer, src->buffer + (max_tail - src->tail),
+ src->head - max_tail);
+ src->tail = max_tail;
+ }
+ else
+ {
+ /* Buffer is completely full. Expand it. */
+ src->buffer = x2realloc (src->buffer, &src->allocated);
+ }
+ }
+ else
+ {
+ /* There's space available at the head of the buffer. Nothing to do. */
+ }
+}
+
+static void
+lex_source_read__ (struct lex_source *src)
+{
+ do
+ {
+ size_t head_ofs;
+ size_t space;
+ size_t n;
+
+ lex_source_expand__ (src);
+
+ head_ofs = src->head - src->tail;
+ space = src->allocated - head_ofs;
+ n = src->reader->class->read (src->reader, &src->buffer[head_ofs],
+ space,
+ segmenter_get_prompt (&src->segmenter));
+ assert (n <= space);
+
+ if (n == 0)
+ {
+ /* End of input.
+
+ Ensure that the input always ends in a new-line followed by a null
+ byte, as required by the segmenter library. */
+
+ if (src->head == src->tail
+ || src->buffer[src->head - src->tail - 1] != '\n')
+ src->buffer[src->head++ - src->tail] = '\n';
+
+ lex_source_expand__ (src);
+ src->buffer[src->head++ - src->tail] = '\0';
+
+ return;
+ }
+
+ src->head += n;
+ }
+ while (!memchr (&src->buffer[src->seg_pos - src->tail], '\n',
+ src->head - src->seg_pos));
+}
+
+static struct lex_source *
+lex_source__ (const struct lexer *lexer)
+{
+ return (ll_is_empty (&lexer->sources) ? NULL
+ : ll_data (ll_head (&lexer->sources), struct lex_source, ll));
+}
+
+static struct substring
+lex_source_get_syntax__ (const struct lex_source *src, int n0, int n1)
+{
+ const struct lex_token *token0 = lex_source_next__ (src, n0);
+ const struct lex_token *token1 = lex_source_next__ (src, MAX (n0, n1));
+ size_t start = token0->token_pos;
+ size_t end = token1->token_pos + token1->token_len;
+
+ return ss_buffer (&src->buffer[start - src->tail], end - start);
+}
+
+static void
+lex_ellipsize__ (struct substring in, char *out, size_t out_size)
+{
+ size_t out_maxlen;
+ size_t out_len;
+ int mblen;
+
+ assert (out_size >= 16);
+ out_maxlen = out_size - (in.length >= out_size ? 3 : 0) - 1;
+ for (out_len = 0; out_len < in.length; out_len += mblen)
+ {
+ if (in.string[out_len] == '\n'
+ || (in.string[out_len] == '\r'
+ && out_len + 1 < in.length
+ && in.string[out_len + 1] == '\n'))
+ break;
+
+ mblen = u8_mblen (CHAR_CAST (const uint8_t *, in.string + out_len),
+ in.length - out_len);
+ if (out_len + mblen > out_maxlen)
+ break;
+ }
+
+ memcpy (out, in.string, out_len);
+ strcpy (&out[out_len], out_len < in.length ? "..." : "");
+}
+
+static void
+lex_source_error_valist (struct lex_source *src, int n0, int n1,
+ const char *format, va_list args)
+{
+ const struct lex_token *token;
+ struct string s;
+ struct msg m;
+
+ ds_init_empty (&s);
+
+ token = lex_source_next__ (src, n0);
+ if (token->token.type == T_ENDCMD)
+ ds_put_cstr (&s, _("Syntax error at end of command"));
+ else
+ {
+ struct substring syntax = lex_source_get_syntax__ (src, n0, n1);
+ if (!ss_is_empty (syntax))
+ {
+ char syntax_cstr[64];
+
+ lex_ellipsize__ (syntax, syntax_cstr, sizeof syntax_cstr);
+ ds_put_format (&s, _("Syntax error at `%s'"), syntax_cstr);
+ }
+ else
+ ds_put_cstr (&s, _("Syntax error"));
+ }
+
+ if (format)
+ {
+ ds_put_cstr (&s, ": ");
+ ds_put_vformat (&s, format, args);
+ }
+ ds_put_byte (&s, '.');
+
+ m.category = MSG_C_SYNTAX;
+ m.severity = MSG_S_ERROR;
+ m.file_name = src->reader->file_name;
+ m.first_line = lex_source_get_first_line_number (src, n0);
+ m.last_line = lex_source_get_last_line_number (src, n1);
+ m.first_column = lex_source_get_first_column (src, n0);
+ m.last_column = lex_source_get_last_column (src, n1);
+ m.text = ds_steal_cstr (&s);
+ msg_emit (&m);
+}
+
+static void PRINTF_FORMAT (2, 3)
+lex_get_error (struct lex_source *src, const char *format, ...)
+{
+ va_list args;
+ int n;
+
+ va_start (args, format);
+
+ n = deque_count (&src->deque) - 1;
+ lex_source_error_valist (src, n, n, format, args);
+ lex_source_pop_front (src);
+
+ va_end (args);
+}
+
+static bool
+lex_source_get__ (const struct lex_source *src_)
+{
+ struct lex_source *src = CONST_CAST (struct lex_source *, src_);
+
+ struct state
+ {
+ struct segmenter segmenter;
+ enum segment_type last_segment;
+ int newlines;
+ size_t line_pos;
+ size_t seg_pos;
+ };
+
+ struct state state, saved;
+ enum scan_result result;
+ struct scanner scanner;
+ struct lex_token *token;
+ int n_lines;
+ int i;
+
+ if (src->eof)
+ return false;
+
+ state.segmenter = src->segmenter;
+ state.newlines = 0;
+ state.seg_pos = src->seg_pos;
+ state.line_pos = src->line_pos;
+ saved = state;
+
+ token = lex_push_token__ (src);
+ scanner_init (&scanner, &token->token);
+ token->line_pos = src->line_pos;
+ token->token_pos = src->seg_pos;
+ if (src->reader->line_number > 0)
+ token->first_line = src->reader->line_number + src->n_newlines;
+ else
+ token->first_line = 0;
+
+ for (;;)
+ {
+ enum segment_type type;
+ const char *segment;
+ size_t seg_maxlen;
+ int seg_len;
+
+ segment = &src->buffer[state.seg_pos - src->tail];
+ seg_maxlen = src->head - state.seg_pos;
+ seg_len = segmenter_push (&state.segmenter, segment, seg_maxlen, &type);
+ if (seg_len < 0)
+ {
+ lex_source_read__ (src);
+ continue;
+ }
+
+ state.last_segment = type;
+ state.seg_pos += seg_len;
+ if (type == SEG_NEWLINE)
+ {
+ state.newlines++;
+ state.line_pos = state.seg_pos;
+ }
+
+ result = scanner_push (&scanner, type, ss_buffer (segment, seg_len),
+ &token->token);
+ if (result == SCAN_SAVE)
+ saved = state;
+ else if (result == SCAN_BACK)
+ {
+ state = saved;
+ break;
+ }
+ else if (result == SCAN_DONE)
+ break;
+ }
+
+ n_lines = state.newlines;
+ if (state.last_segment == SEG_END_COMMAND && !src->suppress_next_newline)
+ {
+ n_lines++;
+ src->suppress_next_newline = true;
+ }
+ else if (n_lines > 0 && src->suppress_next_newline)
+ {
+ n_lines--;
+ src->suppress_next_newline = false;
+ }
+ for (i = 0; i < n_lines; i++)
+ {
+ const char *newline;
+ const char *line;
+ size_t line_len;
+ char *syntax;
+
+ line = &src->buffer[src->journal_pos - src->tail];
+ newline = rawmemchr (line, '\n');
+ line_len = newline - line;
+ if (line_len > 0 && line[line_len - 1] == '\r')
+ line_len--;
+
+ syntax = malloc (line_len + 2);
+ memcpy (syntax, line, line_len);
+ syntax[line_len] = '\n';
+ syntax[line_len + 1] = '\0';
+
+ text_item_submit (text_item_create_nocopy (TEXT_ITEM_SYNTAX, syntax));
+
+ src->journal_pos += newline - line + 1;
+ }
+
+ token->token_len = state.seg_pos - src->seg_pos;
+
+ src->segmenter = state.segmenter;
+ src->seg_pos = state.seg_pos;
+ src->line_pos = state.line_pos;
+ src->n_newlines += state.newlines;
+
+ switch (token->token.type)
+ {
+ default: