#include <unictype.h>
#include <unistd.h>
#include <unistr.h>
-#include <uniwidth.h>
#include "language/command.h"
#include "language/lexer/macro.h"
#include "libpspp/cast.h"
#include "libpspp/deque.h"
#include "libpspp/i18n.h"
+#include "libpspp/intern.h"
#include "libpspp/ll.h"
#include "libpspp/message.h"
#include "libpspp/misc.h"
location of the token in terms of the lex_source's buffer.
For a token produced through macro expansion, this is the entire macro
- call.
-
- src->tail <= line_pos <= token_pos <= src->head. */
- size_t token_pos; /* Start of token. */
+ call. */
+ size_t token_pos; /* Offset into src->buffer of token start. */
size_t token_len; /* Length of source for token in bytes. */
- size_t line_pos; /* Start of line containing token_pos. */
int first_line; /* Line number at token_pos. */
/* For a token obtained through macro expansion, this is just this token.
static size_t lex_stage_count (const struct lex_stage *);
static bool lex_stage_is_empty (const struct lex_stage *);
-static struct lex_token *lex_stage_last (struct lex_stage *);
static struct lex_token *lex_stage_first (struct lex_stage *);
static struct lex_token *lex_stage_nth (struct lex_stage *, size_t ofs);
return deque_count (&stage->deque);
}
-/* Returns the last token in STAGE, which must be nonempty. The last token is
- the one accessed with the greatest lookahead. */
-static struct lex_token *
-lex_stage_last (struct lex_stage *stage)
-{
- return stage->tokens[deque_front (&stage->deque, 0)];
-}
-
/* Returns the first token in STAGE, which must be nonempty.
The first token is the one accessed with the least lookahead. */
static struct lex_token *
stage->tokens[deque_push_front (&stage->deque)] = token;
}
+/* Removes and returns the first token from STAGE. */
+static struct lex_token *
+lex_stage_take_first (struct lex_stage *stage)
+{
+ return stage->tokens[deque_pop_back (&stage->deque)];
+}
+
/* Removes the first token from STAGE and uninitializes it. */
static void
lex_stage_pop_first (struct lex_stage *stage)
{
- lex_token_destroy (stage->tokens[deque_pop_back (&stage->deque)]);
+ lex_token_destroy (lex_stage_take_first (stage));
}
/* Removes the first N tokens from SRC, appending them to DST as the last
lex_stage_shift (struct lex_stage *dst, struct lex_stage *src, size_t n)
{
for (size_t i = 0; i < n; i++)
- {
- lex_stage_push_last (dst, lex_stage_first (src));
- deque_pop_back (&src->deque);
- }
+ lex_stage_push_last (dst, lex_stage_take_first (src));
}
/* A source of tokens, corresponding to a syntax file.
bool eof; /* True if T_STOP was read from 'reader'. */
/* Buffer of UTF-8 bytes. */
- char *buffer;
+ char *buffer; /* Source file contents. */
+ size_t length; /* Number of bytes filled. */
size_t allocated; /* Number of bytes allocated. */
- size_t tail; /* &buffer[0] offset into UTF-8 source. */
- size_t head; /* &buffer[head - tail] offset into source. */
- /* Positions in source file, tail <= pos <= head for each member here. */
+ /* Offsets into 'buffer'. */
size_t journal_pos; /* First byte not yet output to journal. */
size_t seg_pos; /* First byte not yet scanned as token. */
- size_t line_pos; /* First byte of line containing seg_pos. */
int n_newlines; /* Number of new-lines up to seg_pos. */
bool suppress_next_newline;
in 'merge'.
- merge: Tokens that need to pass through scan_merge() to end up in
- 'lookahead'.
+ 'parse'.
+
+ - parse: Tokens available to the client for parsing.
- - lookahead: Tokens available to the client for parsing. */
+ 'pp' and 'merge' store tokens only temporarily until they pass into
+ 'parse'. Tokens then live in 'parse' until the command is fully
+ consumed, at which time they are freed together. */
struct lex_stage pp;
struct lex_stage merge;
- struct lex_stage lookahead;
+ struct lex_token **parse;
+ size_t n_parse, allocated_parse, parse_ofs;
};
static struct lex_source *lex_source_create (struct lexer *,
int n0, int n1);
static const struct lex_token *lex_next__ (const struct lexer *, int n);
static void lex_source_push_endcmd__ (struct lex_source *);
+static void lex_source_push_parse (struct lex_source *, struct lex_token *);
+static void lex_source_clear_parse (struct lex_source *);
-static bool lex_source_get_lookahead (struct lex_source *);
+static bool lex_source_get_parse (struct lex_source *);
static void lex_source_error_valist (struct lex_source *, int n0, int n1,
const char *format, va_list)
PRINTF_FORMAT (4, 0);
if (src == NULL)
return;
- if (!lex_stage_is_empty (&src->lookahead))
- lex_stage_pop_first (&src->lookahead);
+ if (src->parse_ofs < src->n_parse)
+ {
+ if (src->parse[src->parse_ofs]->token.type == T_ENDCMD)
+ lex_source_clear_parse (src);
+ else
+ src->parse_ofs++;
+ }
- while (lex_stage_is_empty (&src->lookahead))
- if (!lex_source_get_lookahead (src))
+ while (src->parse_ofs == src->n_parse)
+ if (!lex_source_get_parse (src))
{
lex_source_destroy (src);
src = lex_source__ (lexer);
return;
}
}
+
+/* Advances LEXER by N tokens. */
+void
+lex_get_n (struct lexer *lexer, size_t n)
+{
+ while (n-- > 0)
+ lex_get (lexer);
+}
\f
/* Issuing errors. */
ds_put_cstr (&s, ": ");
ds_put_vformat (&s, format, args);
}
- ds_put_byte (&s, '.');
+ if (ds_last (&s) != '.')
+ ds_put_byte (&s, '.');
msg (SE, "%s", ds_cstr (&s));
ds_destroy (&s);
}
lex_source_next__ (const struct lex_source *src_, int n)
{
struct lex_source *src = CONST_CAST (struct lex_source *, src_);
- while (lex_stage_count (&src->lookahead) <= n)
+
+ if (n < 0)
{
- if (!lex_stage_is_empty (&src->lookahead))
+ if (-n <= src->parse_ofs)
+ return src->parse[src->parse_ofs - (-n)];
+ else
{
- const struct lex_token *t = lex_stage_last (&src->lookahead);
+ static const struct lex_token endcmd_token
+ = { .token = { .type = T_ENDCMD } };
+ return &endcmd_token;
+ }
+ }
+
+ while (src->n_parse - src->parse_ofs <= n)
+ {
+ if (src->n_parse > 0)
+ {
+ const struct lex_token *t = src->parse[src->n_parse - 1];
if (t->token.type == T_STOP || t->token.type == T_ENDCMD)
return t;
}
- lex_source_get_lookahead (src);
+ lex_source_get_parse (src);
}
- return lex_stage_nth (&src->lookahead, n);
+ return src->parse[src->parse_ofs + n];
}
/* Returns the "struct token" of the token N after the current one in LEXER.
}
}
-/* If LEXER is positioned at the sequence of tokens that may be parsed from S,
- skips it and returns true. Otherwise, returns false.
-
- S may consist of an arbitrary sequence of tokens, e.g. "KRUSKAL-WALLIS",
- "2SLS", or "END INPUT PROGRAM". Identifiers may be abbreviated to their
- first three letters. */
-bool
-lex_match_phrase (struct lexer *lexer, const char *s)
+static size_t
+lex_at_phrase__ (struct lexer *lexer, const char *s)
{
struct string_lexer slex;
struct token token;
- int i;
- i = 0;
+ size_t i = 0;
string_lexer_init (&slex, s, strlen (s), SEG_MODE_INTERACTIVE, true);
while (string_lexer_next (&slex, &token))
{
bool match = lex_tokens_match (lex_next (lexer, i++), &token);
token_uninit (&token);
if (!match)
- return false;
+ return 0;
}
+ return i;
+}
- while (i-- > 0)
- lex_get (lexer);
- return true;
+/* If LEXER is positioned at the sequence of tokens that may be parsed from S,
+ returns true. Otherwise, returns false.
+
+ S may consist of an arbitrary sequence of tokens, e.g. "KRUSKAL-WALLIS",
+ "2SLS", or "END INPUT PROGRAM". Identifiers may be abbreviated to their
+ first three letters. */
+bool
+lex_at_phrase (struct lexer *lexer, const char *s)
+{
+ return lex_at_phrase__ (lexer, s) > 0;
+}
+
+/* If LEXER is positioned at the sequence of tokens that may be parsed from S,
+ skips it and returns true. Otherwise, returns false.
+
+ S may consist of an arbitrary sequence of tokens, e.g. "KRUSKAL-WALLIS",
+ "2SLS", or "END INPUT PROGRAM". Identifiers may be abbreviated to their
+ first three letters. */
+bool
+lex_match_phrase (struct lexer *lexer, const char *s)
+{
+ size_t n = lex_at_phrase__ (lexer, s);
+ if (n > 0)
+ lex_get_n (lexer, n);
+ return n > 0;
}
static int
return 0;
else
{
- char *token_str = &src->buffer[token->token_pos - src->tail];
+ char *token_str = &src->buffer[token->token_pos];
return token->first_line + count_newlines (token_str, token->token_len) + 1;
}
}
static int
-count_columns (const char *s_, size_t length)
+lex_token_get_column__ (const struct lex_source *src, size_t offset)
{
- const uint8_t *s = CHAR_CAST (const uint8_t *, s_);
- int columns;
- size_t ofs;
- int mblen;
-
- columns = 0;
- for (ofs = 0; ofs < length; ofs += mblen)
- {
- ucs4_t uc;
-
- mblen = u8_mbtouc (&uc, s + ofs, length - ofs);
- if (uc != '\t')
- {
- int width = uc_width (uc, "UTF-8");
- if (width > 0)
- columns += width;
- }
- else
- columns = ROUND_UP (columns + 1, 8);
- }
-
- return columns + 1;
+ const char *newline = memrchr (src->buffer, '\n', offset);
+ size_t line_ofs = newline ? newline - src->buffer + 1 : 0;
+ return utf8_count_columns (&src->buffer[line_ofs], offset - line_ofs) + 1;
}
static int
lex_token_get_first_column (const struct lex_source *src,
const struct lex_token *token)
{
- return count_columns (&src->buffer[token->line_pos - src->tail],
- token->token_pos - token->line_pos);
+ return lex_token_get_column__ (src, token->token_pos);
}
static int
lex_token_get_last_column (const struct lex_source *src,
const struct lex_token *token)
{
- char *start, *end, *newline;
-
- start = &src->buffer[token->line_pos - src->tail];
- end = &src->buffer[(token->token_pos + token->token_len) - src->tail];
- newline = memrchr (start, '\n', end - start);
- if (newline != NULL)
- start = newline + 1;
- return count_columns (start, end - start);
+ return lex_token_get_column__ (src, token->token_pos + token->token_len);
}
static struct msg_location
const struct lex_token *t1)
{
return (struct msg_location) {
- .file_name = src->reader->file_name,
+ .file_name = intern_new_if_nonnull (src->reader->file_name),
.first_line = t0->first_line,
.last_line = lex_token_get_last_line_number (src, t1),
.first_column = lex_token_get_first_column (src, t0),
{
struct msg_location *loc = xmalloc (sizeof *loc);
*loc = (struct msg_location) {
- .file_name = xstrdup_if_nonnull (lex_get_file_name (lexer)),
+ .file_name = intern_new_if_nonnull (lex_get_file_name (lexer)),
.first_line = lex_get_first_line_number (lexer, n0),
.last_line = lex_get_last_line_number (lexer, n1),
};
struct lex_source *src = lex_source__ (lexer);
if (src != NULL && src->reader->error == LEX_ERROR_TERMINAL)
{
- src->head = src->tail = 0;
- src->journal_pos = src->seg_pos = src->line_pos = 0;
+ src->length = 0;
+ src->journal_pos = src->seg_pos = 0;
src->n_newlines = 0;
src->suppress_next_newline = false;
src->segmenter = segmenter_init (segmenter_get_mode (&src->segmenter),
false);
lex_stage_clear (&src->pp);
lex_stage_clear (&src->merge);
- lex_stage_clear (&src->lookahead);
+ lex_source_clear_parse (src);
lex_source_push_endcmd__ (src);
}
}
{
lex_stage_clear (&src->pp);
lex_stage_clear (&src->merge);
- lex_stage_clear (&src->lookahead);
+ lex_source_clear_parse (src);
for (; src != NULL && src->reader->error != LEX_ERROR_TERMINAL;
src = lex_source__ (lexer))
}
}
\f
-static size_t
-lex_source_max_tail__ (const struct lex_source *src_)
-{
- struct lex_source *src = CONST_CAST (struct lex_source *, src_);
-
- assert (src->seg_pos >= src->line_pos);
- size_t max_tail = MIN (src->journal_pos, src->line_pos);
-
- /* Use the oldest token also. */
- struct lex_stage *stages[] = { &src->lookahead, &src->merge, &src->pp };
- for (size_t i = 0; i < sizeof stages / sizeof *stages; i++)
- if (!lex_stage_is_empty (stages[i]))
- {
- struct lex_token *first = lex_stage_first (stages[i]);
- assert (first->token_pos >= first->line_pos);
- return MIN (max_tail, first->line_pos);
- }
-
- return max_tail;
-}
-
static void
lex_source_expand__ (struct lex_source *src)
{
- if (src->head - src->tail >= src->allocated)
- {
- size_t max_tail = lex_source_max_tail__ (src);
- if (max_tail > src->tail)
- {
- /* Advance the tail, freeing up room at the head. */
- memmove (src->buffer, src->buffer + (max_tail - src->tail),
- src->head - max_tail);
- src->tail = max_tail;
- }
- else
- {
- /* Buffer is completely full. Expand it. */
- src->buffer = x2realloc (src->buffer, &src->allocated);
- }
- }
- else
- {
- /* There's space available at the head of the buffer. Nothing to do. */
- }
+ if (src->length >= src->allocated)
+ src->buffer = x2realloc (src->buffer, &src->allocated);
}
static void
{
lex_source_expand__ (src);
- size_t head_ofs = src->head - src->tail;
- size_t space = src->allocated - head_ofs;
+ size_t space = src->allocated - src->length;
enum prompt_style prompt = segmenter_get_prompt (&src->segmenter);
- size_t n = src->reader->class->read (src->reader, &src->buffer[head_ofs],
+ size_t n = src->reader->class->read (src->reader,
+ &src->buffer[src->length],
space, prompt);
assert (n <= space);
return;
}
- src->head += n;
+ src->length += n;
}
- while (!memchr (&src->buffer[src->seg_pos - src->tail], '\n',
- src->head - src->seg_pos));
+ while (!memchr (&src->buffer[src->seg_pos], '\n',
+ src->length - src->seg_pos));
}
static struct lex_source *
{
size_t start = first->token_pos;
size_t end = last->token_pos + last->token_len;
- ds_put_substring (&s, ss_buffer (&src->buffer[start - src->tail],
- end - start));
+ ds_put_substring (&s, ss_buffer (&src->buffer[start], end - start));
}
else
{
size_t start = token0->token_pos;
size_t end = token1->token_pos + token1->token_len;
- return ss_buffer (&src->buffer[start - src->tail], end - start);
+ return ss_buffer (&src->buffer[start], end - start);
}
static void
lex_get_error (struct lex_source *src, const struct lex_token *token)
{
char syntax[64];
- str_ellipsize (ss_buffer (&src->buffer[token->token_pos - src->tail],
- token->token_len),
+ str_ellipsize (ss_buffer (&src->buffer[token->token_pos], token->token_len),
syntax, sizeof syntax);
struct string s = DS_EMPTY_INITIALIZER;
token->token = (struct token) { .type = T_STOP };
token->macro_rep = NULL;
token->ref_cnt = NULL;
- token->line_pos = src->line_pos;
token->token_pos = src->seg_pos;
if (src->reader->line_number > 0)
token->first_line = src->reader->line_number + src->n_newlines;
int seg_len;
for (;;)
{
- segment = &src->buffer[src->seg_pos - src->tail];
+ segment = &src->buffer[src->seg_pos];
seg_len = segmenter_push (&src->segmenter, segment,
- src->head - src->seg_pos,
+ src->length - src->seg_pos,
src->reader->eof, &seg_type);
if (seg_len >= 0)
break;
token->token_len = seg_len;
src->seg_pos += seg_len;
if (seg_type == SEG_NEWLINE)
- {
- src->line_pos = src->seg_pos;
- src->n_newlines++;
- }
+ src->n_newlines++;
/* Get a token from the segment. */
enum tokenize_result result = token_from_segment (
for (int i = 0; i < n_lines; i++)
{
/* Beginning of line. */
- const char *line = &src->buffer[src->journal_pos - src->tail];
+ const char *line = &src->buffer[src->journal_pos];
/* Calculate line length, including \n or \r\n end-of-line if present.
converted to tokens (which is only through line_pos). That's because,
if we're emitting the line due to SEG_END_COMMAND, we want to take the
whole line through the newline, not just through the '.'. */
- size_t max_len = src->head - src->journal_pos;
+ size_t max_len = src->length - src->journal_pos;
const char *newline = memchr (line, '\n', max_len);
size_t line_len = newline ? newline - line + 1 : max_len;
size_t end = t->token_pos + t->token_len;
const struct macro_token mt = {
.token = t->token,
- .syntax = ss_buffer (&src->buffer[start - src->tail], end - start),
+ .syntax = ss_buffer (&src->buffer[start], end - start),
};
const struct msg_location loc = lex_token_location (src, t, t);
n_call = macro_call_add (mc, &mt, &loc);
_("Macro Expansion")));
/* Append the macro expansion tokens to the lookahead. */
- char *macro_rep = ds_steal_cstr (&s);
- size_t *ref_cnt = xmalloc (sizeof *ref_cnt);
- *ref_cnt = expansion.n;
- for (size_t i = 0; i < expansion.n; i++)
+ if (expansion.n > 0)
{
- struct lex_token *token = xmalloc (sizeof *token);
- *token = (struct lex_token) {
- .token = expansion.mts[i].token,
- .token_pos = c0->token_pos,
- .token_len = (c1->token_pos + c1->token_len) - c0->token_pos,
- .line_pos = c0->line_pos,
- .first_line = c0->first_line,
- .macro_rep = macro_rep,
- .ofs = ofs[i],
- .len = len[i],
- .ref_cnt = ref_cnt,
- };
- lex_stage_push_last (&src->merge, token);
+ char *macro_rep = ds_steal_cstr (&s);
+ size_t *ref_cnt = xmalloc (sizeof *ref_cnt);
+ *ref_cnt = expansion.n;
+ for (size_t i = 0; i < expansion.n; i++)
+ {
+ struct lex_token *token = xmalloc (sizeof *token);
+ *token = (struct lex_token) {
+ .token = expansion.mts[i].token,
+ .token_pos = c0->token_pos,
+ .token_len = (c1->token_pos + c1->token_len) - c0->token_pos,
+ .first_line = c0->first_line,
+ .macro_rep = macro_rep,
+ .ofs = ofs[i],
+ .len = len[i],
+ .ref_cnt = ref_cnt,
+ };
+ lex_stage_push_last (&src->merge, token);
- ss_dealloc (&expansion.mts[i].syntax);
+ ss_dealloc (&expansion.mts[i].syntax);
+ }
}
+ else
+ ds_destroy (&s);
free (expansion.mts);
free (ofs);
free (len);
Returns true if successful, false on failure. In the latter case, SRC is
exhausted and 'src->eof' is now true. */
static bool
-lex_source_get_lookahead (struct lex_source *src)
+lex_source_get_parse (struct lex_source *src)
{
struct merger m = MERGER_INIT;
struct token out;
&out);
if (!retval)
{
- lex_stage_shift (&src->lookahead, &src->merge, 1);
+ lex_source_push_parse (src, lex_stage_take_first (&src->merge));
return true;
}
else if (retval > 0)
.token = out,
.token_pos = first->token_pos,
.token_len = (last->token_pos - first->token_pos) + last->token_len,
- .line_pos = first->line_pos,
.first_line = first->first_line,
/* This works well if all the tokens were not expanded from macros,
.macro_rep = macro ? first->macro_rep : NULL,
.ofs = macro ? first->ofs : 0,
.len = macro ? (last->ofs - first->ofs) + last->len : 0,
- .ref_cnt = first->ref_cnt,
+ .ref_cnt = macro ? first->ref_cnt : NULL,
};
if (t->ref_cnt)
++*t->ref_cnt;
- lex_stage_push_last (&src->lookahead, t);
+ lex_source_push_parse (src, t);
for (int i = 0; i < retval; i++)
lex_stage_pop_first (&src->merge);
static void
lex_source_push_endcmd__ (struct lex_source *src)
{
- assert (lex_stage_is_empty (&src->lookahead));
+ assert (src->n_parse == 0);
+
struct lex_token *token = xmalloc (sizeof *token);
*token = (struct lex_token) { .token = { .type = T_ENDCMD } };
- lex_stage_push_last (&src->lookahead, token);
+ lex_source_push_parse (src, token);
+}
+
+static void
+lex_source_push_parse (struct lex_source *src, struct lex_token *token)
+{
+ if (src->n_parse >= src->allocated_parse)
+ src->parse = x2nrealloc (src->parse, &src->allocated_parse,
+ sizeof *src->parse);
+ src->parse[src->n_parse++] = token;
+}
+
+static void
+lex_source_clear_parse (struct lex_source *src)
+{
+ for (size_t i = 0; i < src->n_parse; i++)
+ lex_token_destroy (src->parse[i]);
+ src->n_parse = src->parse_ofs = 0;
}
static struct lex_source *
free (src->buffer);
lex_stage_uninit (&src->pp);
lex_stage_uninit (&src->merge);
- lex_stage_uninit (&src->lookahead);
+ lex_source_clear_parse (src);
+ free (src->parse);
ll_remove (&src->ll);
free (src);
}