use std::{
borrow::Borrow,
collections::{HashMap, VecDeque},
- io::Result,
num::NonZeroU32,
ops::RangeInclusive,
sync::Arc,
pub trait LexRead {
/// Read some input from the source. If successful, returns the input that
- /// was read. At end of file, returns `Ok(None)`.
+ /// was read. At end of file or on error, returns an empty string.
///
/// `prompt` provides a hint to interactive readers as to what kind of
/// syntax is being read right now.
- fn read(&mut self, prompt: PromptStyle) -> Result<Option<String>>;
+ fn read(&mut self, prompt: PromptStyle) -> String;
}
impl LexRead for () {
- fn read(&mut self, _prompt: PromptStyle) -> Result<Option<String>> {
- Ok(None)
+ fn read(&mut self, _prompt: PromptStyle) -> String {
+ String::from("")
}
}
}
fn read(&mut self) {
- todo!()
+ loop {
+ let prompt = self.segmenter.prompt();
+ let s = self.reader.read(prompt);
+ if s.is_empty() {
+ self.eof = true;
+ return;
+ }
+ self.buffer.push_str(&s);
+ if self.buffer[self.seg_pos..].contains('\n') {
+ return;
+ }
+ }
}
fn try_get_pp(&mut self) -> bool {
let (seg_len, seg_type) = loop {
unreachable!();
}
let token = &self.pp[ofs];
- if parser.push(todo!(), &self.buffer[token.pos], &|e| println!("{e:?}"))
- == ParseStatus::Complete
+ if parser.push(&token.token, &self.buffer[token.pos], &|e| {
+ println!("{e:?}")
+ }) == ParseStatus::Complete
{
break;
}
&mut expansion,
|e| println!("{e:?}"),
);
+ let retval = !expansion.is_empty();
const MPRINT: bool = false;
if MPRINT {
}
// Append the macro expansion tokens to the lookahead.
- let macro_rep = Arc::new(macro_tokens_to_syntax(expansion.as_slice()).collect());
- for token in expansion {
+ let mut macro_rep = String::new();
+ let mut pos = Vec::with_capacity(expansion.len());
+ for [prefix, token] in macro_tokens_to_syntax(expansion.as_slice()) {
+ macro_rep.push_str(prefix);
+ let len = macro_rep.len();
+ pos.push(len..=len + token.len() - 1);
+ }
+ let macro_rep = Arc::new(macro_rep);
+ for (index, token) in expansion.into_iter().enumerate() {
let lt = LexToken {
token: token.token,
- pos: todo!(),
+ pos: *c0.pos.start()..=*c1.pos.end(),
macro_rep: Some(MacroRepresentation {
expansion: Arc::clone(¯o_rep),
- pos: todo!(),
+ pos: pos[index].clone(),
}),
};
+ self.merge.push_back(lt);
}
- todo!()
+ self.pp.drain(..call.len());
+ retval
}
/// Attempts to obtain at least one new token into `self.merge`.
unreachable!();
}
fn get_parse(&mut self) -> bool {
- todo!()
+ // XXX deal with accumulate messages
+ self.get_parse__()
}
fn offset_to_point(&self, offset: usize) -> Point {
}
}
-pub fn macro_tokens_to_syntax(input: &[MacroToken]) -> impl Iterator<Item = &str> {
+pub fn macro_tokens_to_syntax(input: &[MacroToken]) -> impl Iterator<Item = [&str; 2]> {
input
.iter()
.take(1)
- .map(|token| token.syntax.as_str())
- .chain(input.windows(2).flat_map(|w| {
+ .map(|token| ["", token.syntax.as_str()])
+ .chain(input.windows(2).map(|w| {
let c0 = (&w[0].token).into();
let c1 = (&w[1].token).into();
[TokenClass::separator(c0, c1), w[1].syntax.as_str()]
subexpander.expand(&mut MacroTokens(tokens.as_slice()), &mut output);
subexpander.stack.pop();
e.stack = subexpander.stack;
- Some(macro_tokens_to_syntax(&output).collect())
+ Some(macro_tokens_to_syntax(&output).flatten().collect())
}
fn expand_head(e: &mut Expander, mut args: Vec<String>) -> Option<String> {
input.advance();
return Some(
macro_tokens_to_syntax(self.args.unwrap()[param_idx].as_ref().unwrap())
- .collect(),
+ .flatten().collect(),
);
}
if let Some(value) = self.vars.borrow().get(id) {
}
arg.extend(macro_tokens_to_syntax(
self.args.unwrap()[i].as_ref().unwrap(),
- ));
+ ).flatten());
}
input.advance();
return Some(arg);