use std::{
borrow::{Borrow, Cow},
- collections::{HashMap, VecDeque},
+ collections::VecDeque,
fmt::{Debug, Formatter, Result as FmtResult, Write},
fs,
io::Result as IoResult,
iter::once,
- mem::{self, take},
+ mem::take,
ops::{Bound, Range, RangeBounds, RangeInclusive},
path::Path,
+ ptr,
sync::Arc,
};
use unicode_width::{UnicodeWidthChar, UnicodeWidthStr};
use crate::{
- lex::scan::Incomplete,
macros::{macro_tokens_to_syntax, MacroSet, ParseStatus, Parser},
message::{Category, Diagnostic, Location, Point, Severity},
settings::Settings,
use super::{
scan::{MergeResult, ScanError, ScanToken},
- segment::{Segment, Segmenter, Syntax},
+ segment::{Segmenter, Syntax},
token::Token,
};
}
}
-/// # Token pipeline
-///
-/// Tokens pass through a pipeline with the following stages. Each token
-/// eventually made available to the parser passes through of these stages.
-/// The stages are named after the processing that happens in each one.
-///
-/// Initially, tokens come from the segmenter and scanner to `pp`:
-///
-/// - `pp`: Tokens that need to pass through the macro preprocessor to end up
-/// in `merge`.
-///
-/// - `merge`: Tokens that need to pass through
-/// [`super::scan::ScanToken::merge`] to end up in `parse`.
-///
-/// - `parse`: Tokens available to the client for parsing.
-///
-/// `pp` and `merge` store tokens only temporarily until they pass into `parse`.
-/// Tokens then live in `parse` until the command is fully consumed, at which
-/// time they are freed together.
-pub struct Source {
- /// Error-handling mode.
- error_handling: ErrorHandling,
-
- file: SourceFile,
-
- /// 0-based line number of the first line not yet written to the journal.
- journal_line: usize,
-
- /// Byte offset of first character not yet scanned as token.
- seg_pos: usize,
-
- /// Tokens that need to pass through the macro preprocessor to end up in
- /// `merge`.
- pp: VecDeque<LexToken>,
-
- /// Tokens that need to pass through [`super::scan::ScanToken::merge`] to
- /// end up in `parse`.
- merge: VecDeque<LexToken>,
-
- eof: bool,
-
- /// Tokens available to the client for parsing.
- parse: Vec<LexToken>,
-
- /// Offset in `parse` of the current token.
- parse_ofs: usize,
-
- segmenter: Segmenter,
-
- suppress_next_newline: bool,
-}
-
-impl Default for Source {
- fn default() -> Self {
- Self {
- error_handling: ErrorHandling::default(),
- file: SourceFile::default(),
- journal_line: 0,
- seg_pos: 0,
- pp: VecDeque::new(),
- merge: VecDeque::new(),
- eof: false,
- parse: Vec::new(),
- parse_ofs: 0,
- segmenter: Segmenter::new(Syntax::default(), false),
- suppress_next_newline: false,
- }
- }
-}
-
trait StripNewline {
fn strip_newline(&self) -> &str;
}
}
}
-impl Source {
- pub fn new(file: SourceFile, syntax: Syntax, error_handling: ErrorHandling) -> Self {
- Self {
- file,
- error_handling,
- segmenter: Segmenter::new(syntax, false),
- ..Source::default()
- }
- }
-
- pub fn new_default(file: SourceFile) -> Self {
- Self::new(file, Syntax::default(), ErrorHandling::default())
- }
-
- fn get_pp(&mut self, context: &Context) -> bool {
- let Some((seg_len, seg_type)) = self
- .segmenter
- .push(&self.file.buffer[self.seg_pos..], true)
- .unwrap()
- else {
- return false;
- };
-
- let pos = self.seg_pos..self.seg_pos + seg_len;
- self.seg_pos += seg_len;
-
- let scan_token = ScanToken::from_segment(&self.file.buffer[pos.clone()], seg_type);
-
- let n_lines = match (seg_type, self.suppress_next_newline) {
- (Segment::EndCommand, false) => {
- self.suppress_next_newline = true;
- 1
- }
- (Segment::Newline, true) => {
- self.suppress_next_newline = false;
- 0
- }
- (Segment::Newline, false) => 1,
- _ => 0,
- };
- for line_num in self.journal_line..self.journal_line + n_lines {
- let _line = &self.file.get_line(line_num as i32).strip_newline();
- // XXX submit the line as syntax
- }
- self.journal_line += n_lines;
-
- match scan_token {
- None => false,
- Some(ScanToken::Token(token)) => {
- self.pp.push_back(LexToken {
- token,
- pos,
- macro_rep: None,
- });
- true
- }
- Some(ScanToken::Error(error)) => {
- (context.error)(
- Location {
- file_name: self.file.file_name.clone(),
- span: Some(
- self.file.offset_to_point(pos.start)
- ..self.file.offset_to_point(pos.end),
- ),
- omit_underlines: false,
- },
- error.into(),
- );
- false
- }
- }
- }
-
- fn get_merge(&mut self, context: &Context) -> bool {
- if self.pp.is_empty() && !self.get_pp(context) {
- return false;
- }
-
- if !Settings::global().macros.expand {
- self.merge.append(&mut self.pp);
- return true;
- }
-
- // Now pass tokens one-by-one to the macro expander.
- let Some(mut parser) = Parser::new(context.macros, &self.pp[0].token) else {
- // Common case where there is no macro to expand.
- self.merge.push_back(self.pp.pop_front().unwrap());
- return true;
- };
- for ofs in 1.. {
- if self.pp.len() <= ofs && !self.get_pp(context) {
- // This should not be reachable because we always get a
- // `Token::EndCommand` at the end of an input file, which should
- // always terminate macro expansion.
- unreachable!();
- }
- let token = &self.pp[ofs];
- if parser.push(&token.token, &self.file.buffer[token.pos.clone()], &|e| {
- println!("{e:?}")
- }) == ParseStatus::Complete
- {
- break;
- }
- }
- let call = parser.finish();
- if call.len() == 0 {
- // False alarm: no macro to expand after all.
- self.merge.push_back(self.pp.pop_front().unwrap());
- return true;
- }
-
- // Expand the tokens.
- let c0 = &self.pp[0];
- let c1 = &self.pp[call.len() - 1];
- let mut expansion = Vec::new();
- call.expand(
- self.segmenter.syntax(),
- self.file.token_location(c0..=c1),
- &mut expansion,
- |e| println!("{e:?}"),
- );
- let retval = !expansion.is_empty();
-
- if Settings::global().macros.print_expansions {
- // XXX
- }
-
- // Append the macro expansion tokens to the lookahead.
- let mut macro_rep = String::new();
- let mut pos = Vec::with_capacity(expansion.len());
- for [prefix, token] in macro_tokens_to_syntax(expansion.as_slice()) {
- macro_rep.push_str(prefix);
- let len = macro_rep.len();
- pos.push(len..=len + token.len() - 1);
- }
- let macro_rep = Arc::new(macro_rep);
- for (index, token) in expansion.into_iter().enumerate() {
- let lt = LexToken {
- token: token.token,
- pos: c0.pos.start..c1.pos.end,
- macro_rep: Some(MacroRepresentation {
- expansion: Arc::clone(¯o_rep),
- pos: pos[index].clone(),
- }),
- };
- self.merge.push_back(lt);
- }
- self.pp.drain(..call.len());
- retval
- }
-
- fn get_parse(&mut self, context: &Context) -> bool {
- loop {
- match ScanToken::merge(|index| {
- if let Some(token) = self.merge.get(index) {
- Ok(Some(&token.token))
- } else if self.eof {
- Ok(None)
- } else {
- Err(Incomplete)
- }
- }) {
- Ok(Some(MergeResult::Copy)) => {
- self.parse.push(self.merge.pop_front().unwrap());
- return true;
- }
- Ok(Some(MergeResult::Expand { n, token })) => {
- let first = &self.merge[0];
- let last = &self.merge[n - 1];
- self.parse.push(LexToken {
- token,
- pos: first.pos.start..last.pos.end,
- macro_rep: match (&first.macro_rep, &last.macro_rep) {
- (Some(a), Some(b)) if Arc::ptr_eq(&a.expansion, &b.expansion) => {
- Some(MacroRepresentation {
- expansion: a.expansion.clone(),
- pos: *a.pos.start()..=*b.pos.end(),
- })
- }
- _ => None,
- },
- });
- self.merge.drain(..n);
- return true;
- }
- Ok(None) => return false,
- Err(Incomplete) => {
- debug_assert!(!self.eof);
- if !self.get_merge(context) {
- self.eof = true;
- }
- }
- }
- }
- }
-
- fn ofs_location(&self, range: RangeInclusive<usize>) -> Location {
- if *range.start() <= *range.end() && *range.end() < self.parse.len() {
- self.file
- .token_location(&self.parse[*range.start()]..=&self.parse[*range.end()])
- } else {
- Location {
- file_name: self.file.file_name.clone(),
- span: None,
- omit_underlines: false,
- }
- }
- }
-
- fn token(&self) -> &Token {
- &self.parse[self.parse_ofs].token
- }
-
- fn next(&mut self, offset: isize, context: &Context) -> &Token {
- let Some(index) = offset.checked_add(self.parse_ofs as isize) else {
- return &Token::EndCommand;
- };
- let Ok(index) = usize::try_from(index) else {
- return &Token::EndCommand;
- };
-
- while index >= self.parse.len() {
- if let Some(token) = self.parse.last() {
- match token.token {
- Token::EndCommand => return &Token::EndCommand,
- _ => (),
- }
- }
- self.get_parse(context);
- }
- &self.parse[index].token
- }
-
- /// If the tokens in `ofs` contains a macro call, this returns the raw
- /// syntax for the macro call (not for the expansion) and for any other
- /// tokens included in that range. The syntax is encoded in UTF-8 and in
- /// the original form supplied to the lexer so that, for example, it may
- /// include comments, spaces, and new-lines if it spans multiple tokens.
- ///
- /// Returns `None` if the token range doesn't include a macro call.
- fn get_macro_call(&self, ofs: RangeInclusive<usize>) -> Option<&str> {
- if self
- .parse
- .get(ofs.clone())
- .unwrap_or_default()
- .iter()
- .all(|token| token.macro_rep.is_none())
- {
- return None;
- }
-
- let token0 = &self.parse[*ofs.start()];
- let token1 = &self.parse[*ofs.end()];
- Some(&self.file.buffer[token0.pos.start..token1.pos.end])
- }
-
- fn is_empty(&self) -> bool {
- self.file.buffer.is_empty()
- }
-
- fn diagnostic(
- &self,
- severity: Severity,
- ofs: RangeInclusive<usize>,
- text: String,
- ) -> Diagnostic {
- let mut s = String::with_capacity(text.len() + 16);
- if self.is_empty() {
- s.push_str("At end of input: ");
- } else if let Some(call) = self.get_macro_call(ofs.clone()) {
- write!(&mut s, "In syntax expanded from `{}`: ", ellipsize(call)).unwrap();
- }
-
- if !text.is_empty() {
- s.push_str(&text);
- } else {
- s.push_str("Syntax error.");
- }
-
- if !s.ends_with('.') {
- s.push('.');
- }
-
- let location = self.ofs_location(ofs);
- let mut source = Vec::new();
- if let Some(Range {
- start: Point { line: l0, .. },
- end: Point { line: l1, .. },
- }) = location.span
- {
- let lines = if l1 - l0 > 3 {
- vec![l0, l0 + 1, l1]
- } else {
- (l0..=l1).collect()
- };
- for line_number in lines {
- source.push((line_number, self.file.get_line(line_number).to_string()));
- }
- }
-
- Diagnostic {
- category: Category::Syntax,
- severity,
- location,
- source,
- stack: Vec::new(),
- command_name: None, // XXX
- text: s,
- }
- }
-}
-
fn ellipsize(s: &str) -> Cow<str> {
if s.width() > 64 {
let mut out = String::new();
}
/// A token in a [`Source`].
-struct LexToken {
+struct LexToken<'a> {
/// The regular token.
token: Token,
+ file: &'a SourceFile,
+
/// For a token obtained through the lexer in an ordinary way, this is the
/// location of the token in the [`Source`]'s buffer.
///
pos: Range<usize>,
}
-impl Borrow<Token> for LexToken {
+impl Borrow<Token> for LexToken<'_> {
fn borrow(&self) -> &Token {
&self.token
}
}
-impl LexToken {
- fn representation<'a>(&self, source: &'a SourceFile) -> &'a str {
- &source.buffer[self.pos.clone()]
+impl LexToken<'_> {
+ fn representation(&self) -> &str {
+ &self.file.buffer[self.pos.clone()]
}
}
pos: RangeInclusive<usize>,
}
-pub struct Lexer {
- source: Source,
- stack: Vec<Source>,
- macros: MacroSet,
- error: Box<dyn Fn(Location, Error)>,
-}
-
-struct Context<'a> {
- macros: &'a MacroSet,
- error: &'a Box<dyn Fn(Location, Error)>,
-}
-
-impl Lexer {
- pub fn new(error: Box<dyn Fn(Location, Error)>) -> Self {
- Self {
- source: Source::default(),
- stack: Vec::new(),
- macros: HashMap::new(),
- error,
- }
- }
-
- pub fn get(&mut self) -> &Token {
- if self.source.parse_ofs < self.source.parse.len() {
- if let Token::EndCommand = self.source.token() {
- self.source.parse.clear();
- self.source.parse_ofs = 0;
- } else {
- self.source.parse_ofs += 1;
- }
- }
-
- while self.source.parse_ofs == self.source.parse.len() {
- let context = Context {
- macros: &self.macros,
- error: &self.error,
- };
- if !self.source.get_parse(&context) {
- if !self.pop_stack() {
- return &Token::EndCommand;
- }
- }
- }
- self.source.token()
- }
-
- fn pop_stack(&mut self) -> bool {
- if let Some(new_source) = self.stack.pop() {
- self.source = new_source;
- true
- } else {
- self.source = Source::default();
- self.source.parse.push(LexToken {
- token: Token::EndCommand,
- pos: 0..0,
- macro_rep: None,
- });
- false
- }
- }
-
- /// Inserts `source` so that the next token comes from it. This is only
- /// permitted when the lexer is either empty or at `Token::EndCommand`.
- pub fn include(&mut self, mut source: Source) {
- // XXX what's the right assertion?
- let context = Context {
- macros: &self.macros,
- error: &self.error,
- };
- source.get_parse(&context);
- let old_source = mem::replace(&mut self.source, source);
- self.stack.push(old_source);
- }
-
- /// Inserts `source` so that it will be read after all the other sources.
- pub fn append(&mut self, mut source: Source) {
- let context = Context {
- macros: &self.macros,
- error: &self.error,
- };
- source.get_parse(&context);
- self.stack.insert(0, source);
- }
-
- pub fn token(&self) -> &Token {
- self.source.token()
- }
-
- pub fn next(&mut self, offset: isize) -> &Token {
- let context = Context {
- macros: &self.macros,
- error: &self.error,
- };
- self.source.next(offset, &context)
- }
-
- pub fn error<S>(&self, text: S) -> Diagnostic
- where
- S: ToString,
- {
- self.diagnostic(
- Severity::Error,
- self.source.parse_ofs..=self.source.parse_ofs,
- text,
- )
- }
-
- pub fn diagnostic<S>(
- &self,
- severity: Severity,
- ofs: RangeInclusive<usize>,
- text: S,
- ) -> Diagnostic
- where
- S: ToString,
- {
- self.source.diagnostic(severity, ofs, text.to_string())
- }
-
- pub fn error_handling(&self) -> ErrorHandling {
- self.source.error_handling
- }
-
- /// Discards all lookahead tokens, then discards all input sources
- /// until it encounters one with error mode [ErrorHandling::Terminal] or until it
- /// runs out of input sources.
- pub fn discard_noninteractive(&mut self) {
- while self.source.error_handling != ErrorHandling::Ignore {
- self.source.pp.clear();
- self.source.merge.clear();
- self.source.parse.clear();
- self.source.parse_ofs = 0;
-
- if self.source.error_handling == ErrorHandling::Terminal || !self.pop_stack() {
- return;
- }
- }
- }
-
- /// Advances past any tokens up to [Token::EndCommand] or [Token::End].
- pub fn discard_rest_of_command(&mut self) {
- while !matches!(self.token(), Token::EndCommand) {
- self.get();
- }
- }
-
- pub fn at_end(&self) -> bool {
- match self.source.token() {
- Token::EndCommand => true,
- _ => false,
- }
- }
-
- pub fn match_(&mut self, token: &Token) -> bool {
- if self.token() == token {
- self.get();
- true
- } else {
- false
- }
- }
-}
-
#[derive(ThisError, Clone, Debug, PartialEq, Eq)]
pub enum Error {
/// Error forming tokens from the input.
}
}
*/
-pub struct Tokens {
- file: Arc<SourceFile>,
- tokens: Vec<LexToken>,
+pub struct Tokens<'a> {
+ tokens: Vec<LexToken<'a>>,
}
-impl Debug for Tokens {
+impl Debug for Tokens<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "Tokens {{ ")?;
for (index, token) in self.tokens.iter().enumerate() {
if index > 0 {
write!(f, ", ")?;
}
- write!(f, "{:?}", token.representation(&self.file))?;
+ write!(f, "{:?}", token.representation())?;
}
write!(f, " }}")
}
}
-impl Tokens {
- /// If the tokens in `ofs` contains a macro call, this returns the raw
+impl Tokens<'_> {}
+
+pub struct NewLexer<'a> {
+ backing: &'a Tokens<'a>,
+ tokens: &'a [LexToken<'a>],
+ start: usize,
+}
+
+impl<'a> NewLexer<'a> {
+ pub fn new(backing: &'a Tokens) -> Self {
+ Self {
+ backing,
+ tokens: backing.tokens.as_slice(),
+ start: 0,
+ }
+ }
+
+ pub fn get(&self, index: usize) -> Option<&Token> {
+ self.tokens.get(index).map(|token| &token.token)
+ }
+
+ pub fn error<S, B>(&self, range: B, text: S) -> Diagnostic
+ where
+ S: ToString,
+ B: RangeBounds<usize>,
+ {
+ self.sublexer(range)
+ .diagnostic(Severity::Error, text.to_string())
+ }
+
+ pub fn sublexer<B>(&self, range: B) -> Self
+ where
+ B: RangeBounds<usize>,
+ {
+ Self {
+ backing: self.backing,
+ start: self.start
+ + match range.start_bound() {
+ Bound::Included(index) => *index,
+ Bound::Excluded(index) => *index + 1,
+ Bound::Unbounded => 0,
+ },
+ tokens: &self.backing.tokens
+ [(range.start_bound().cloned(), range.end_bound().cloned())],
+ }
+ }
+
+ fn file(&self) -> Option<&SourceFile> {
+ if !self.tokens.is_empty() {
+ let first = &self.tokens[0];
+ let last = &self.tokens[self.tokens.len() - 1];
+ if ptr::eq(first.file, last.file) {
+ return Some(first.file);
+ }
+ }
+ None
+ }
+
+ /// If the tokens contains a macro call, this returns the raw
/// syntax for the macro call (not for the expansion) and for any other
/// tokens included in that range. The syntax is encoded in UTF-8 and in
/// the original form supplied to the lexer so that, for example, it may
/// include comments, spaces, and new-lines if it spans multiple tokens.
///
/// Returns `None` if the token range doesn't include a macro call.
- fn get_macro_call(&self, ofs: RangeInclusive<usize>) -> Option<&str> {
- if self
- .tokens
- .get(ofs.clone())
- .unwrap_or_default()
- .iter()
- .all(|token| token.macro_rep.is_none())
- {
- return None;
+ fn get_macro_call(&self) -> Option<&str> {
+ if self.tokens.iter().any(|token| token.macro_rep.is_some()) {
+ let token0 = &self.tokens[0];
+ let token1 = &self.tokens[self.tokens.len() - 1];
+ if let Some(file) = self.file() {
+ let start = token0.pos.start;
+ let end = token1.pos.end;
+ if start < end {
+ return Some(&file.buffer[start..end]);
+ }
+ }
}
-
- let token0 = &self.tokens[*ofs.start()];
- let token1 = &self.tokens[*ofs.end()];
- Some(&self.file.buffer[token0.pos.start..token1.pos.end])
+ None
}
- fn ofs_location(&self, range: RangeInclusive<usize>) -> Location {
- if *range.start() <= *range.end() && *range.end() < self.tokens.len() {
- self.file
- .token_location(&self.tokens[*range.start()]..=&self.tokens[*range.end()])
+ fn location(&self) -> Location {
+ if let Some(file) = self.file() {
+ file.token_location(self.tokens.first().unwrap()..=self.tokens.last().unwrap())
} else {
- Location {
- file_name: self.file.file_name.clone(),
- span: None,
- omit_underlines: false,
- }
+ Location::default()
}
}
- pub fn diagnostic(
- &self,
- severity: Severity,
- ofs: RangeInclusive<usize>,
- text: String,
- ) -> Diagnostic {
+ pub fn diagnostic(&self, severity: Severity, text: String) -> Diagnostic {
let mut s = String::new();
- if let Some(call) = self.get_macro_call(ofs.clone()) {
+ if let Some(call) = self.get_macro_call() {
write!(&mut s, "In syntax expanded from `{}`: ", ellipsize(call)).unwrap();
}
s.push('.');
}
- let location = self.ofs_location(ofs);
+ let location = self.location();
let mut source = Vec::new();
if let Some(Range {
start: Point { line: l0, .. },
end: Point { line: l1, .. },
}) = location.span
{
+ let file = self.file().unwrap();
let lines = if l1 - l0 > 3 {
vec![l0, l0 + 1, l1]
} else {
(l0..=l1).collect()
};
for line_number in lines {
- source.push((line_number, self.file.get_line(line_number).to_string()));
+ source.push((line_number, file.get_line(line_number).to_string()));
}
}
}
}
-pub struct NewLexer<'a> {
- backing: &'a Tokens,
- tokens: &'a [LexToken],
- start: usize,
-}
-
-impl<'a> NewLexer<'a> {
- pub fn new(backing: &'a Tokens) -> Self {
- Self {
- backing,
- tokens: backing.tokens.as_slice(),
- start: 0,
- }
- }
-
- pub fn get(&self, index: usize) -> Option<&Token> {
- self.tokens.get(index).map(|token| &token.token)
- }
-
- pub fn error<S, B>(&self, range: B, text: S) -> Diagnostic
- where
- S: ToString,
- B: RangeBounds<usize>,
- {
- let start = match range.start_bound() {
- Bound::Included(&index) => index,
- Bound::Excluded(&index) => index + 1,
- Bound::Unbounded => 0,
- };
- let end = match range.end_bound() {
- Bound::Included(&index) => index + 1,
- Bound::Excluded(&index) => index,
- Bound::Unbounded => self.tokens.len(),
- };
- let abs_range = (start + self.start)..=(end + self.start - 1);
- self.backing
- .diagnostic(Severity::Error, abs_range, text.to_string())
- }
-
- pub fn sublexer<B>(&self, range: B) -> Self
- where
- B: RangeBounds<usize>,
- {
- Self {
- backing: self.backing,
- start: self.start
- + match range.start_bound() {
- Bound::Included(index) => *index,
- Bound::Excluded(index) => *index + 1,
- Bound::Unbounded => 0,
- },
- tokens: &self.backing.tokens
- [(range.start_bound().cloned(), range.end_bound().cloned())],
- }
- }
-}
-
-pub struct NewSource {
- file: Arc<SourceFile>,
+pub struct NewSource<'a> {
+ file: &'a SourceFile,
segmenter: Segmenter,
seg_pos: usize,
- lookahead: VecDeque<LexToken>,
+ lookahead: VecDeque<LexToken<'a>>,
}
-impl NewSource {
- pub fn new_default(file: SourceFile) -> Self {
+impl<'a> NewSource<'a> {
+ pub fn new_default(file: &'a SourceFile) -> Self {
Self::new(file, Syntax::default())
}
- pub fn new(file: SourceFile, syntax: Syntax) -> Self {
+ pub fn new(file: &'a SourceFile, syntax: Syntax) -> Self {
Self {
- file: Arc::new(file),
+ file,
segmenter: Segmenter::new(syntax, false),
seg_pos: 0,
lookahead: VecDeque::new(),
.position(|token| token.token == Token::EndCommand)
{
return Some(Tokens {
- file: self.file.clone(),
tokens: self.lookahead.drain(..=end).collect(),
});
}
Some(ScanToken::Token(token)) => {
let end = token == Token::EndCommand;
pp.push_back(LexToken {
+ file: self.file,
token,
pos,
macro_rep: None,
let first = &merge[0];
let last = &merge[n - 1];
self.lookahead.push_back(LexToken {
+ file: self.file,
token,
pos: first.pos.start..last.pos.end,
macro_rep: match (&first.macro_rep, &last.macro_rep) {
fn expand_macro(
&self,
macros: &MacroSet,
- src: &mut VecDeque<LexToken>,
- dst: &mut VecDeque<LexToken>,
+ src: &mut VecDeque<LexToken<'a>>,
+ dst: &mut VecDeque<LexToken<'a>>,
) {
// Now pass tokens one-by-one to the macro expander.
let Some(mut parser) = Parser::new(macros, &src[0].token) else {
let macro_rep = Arc::new(macro_rep);
for (index, token) in expansion.into_iter().enumerate() {
let lt = LexToken {
+ file: self.file,
token: token.token,
pos: c0.pos.start..c1.pos.end,
macro_rep: Some(MacroRepresentation {
Some(String::from("crosstabs.sps")),
UTF_8,
);
- let mut source = NewSource::new_default(file);
+ let mut source = NewSource::new_default(&file);
while let Some(tokens) = source.read_command(&MacroSet::new()) {
println!("{tokens:?}");
}