+ va_list args;
+ int n;
+
+ va_start (args, format);
+
+ n = deque_count (&src->deque) - 1;
+ lex_source_error_valist (src, n, n, format, args);
+ lex_source_pop_front (src);
+
+ va_end (args);
+}
+
+/* Attempts to append an additional token into SRC's deque, reading more from
+ the underlying lex_reader if necessary.. Returns true if successful, false
+ if the deque already represents (a suffix of) the whole lex_reader's
+ contents, */
+static bool
+lex_source_get__ (const struct lex_source *src_)
+{
+ struct lex_source *src = CONST_CAST (struct lex_source *, src_);
+ if (src->eof)
+ return false;
+
+ /* State maintained while scanning tokens. Usually we only need a single
+ state, but scanner_push() can return SCAN_SAVE to indicate that the state
+ needs to be saved and possibly restored later with SCAN_BACK. */
+ struct state
+ {
+ struct segmenter segmenter;
+ enum segment_type last_segment;
+ int newlines; /* Number of newlines encountered so far. */
+ /* Maintained here so we can update lex_source's similar members when we
+ finish. */
+ size_t line_pos;
+ size_t seg_pos;
+ };
+
+ /* Initialize state. */
+ struct state state =
+ {
+ .segmenter = src->segmenter,
+ .newlines = 0,
+ .seg_pos = src->seg_pos,
+ .line_pos = src->line_pos,
+ };
+ struct state saved = state;
+
+ /* Append a new token to SRC and initialize it. */
+ struct lex_token *token = lex_push_token__ (src);
+ struct scanner scanner;
+ scanner_init (&scanner, &token->token);
+ token->line_pos = src->line_pos;
+ token->token_pos = src->seg_pos;
+ if (src->reader->line_number > 0)
+ token->first_line = src->reader->line_number + src->n_newlines;
+ else
+ token->first_line = 0;
+
+ /* Extract segments and pass them through the scanner until we obtain a
+ token. */