integer::ToInteger,
lex::{
command_name::CommandMatcher,
- lexer::{NewLexer},
+ lexer::{TokenSlice},
token::{Punct, Token},
},
message::Diagnostic,
COMMANDS.get_or_init(|| new_commands()).as_slice()
}
-fn parse_command_word(lexer: &mut NewLexer, s: &mut String, n: usize) -> bool {
+fn parse_command_word(lexer: &mut TokenSlice, s: &mut String, n: usize) -> bool {
let separator = match s.chars().next_back() {
Some(c) if c != '-' => " ",
_ => "",
}
fn parse_command_name(
- lexer: &mut NewLexer,
+ lexer: &mut TokenSlice,
error: &Box<dyn Fn(Diagnostic)>,
) -> Result<(&'static Command, usize), ()> {
let mut s = String::new();
}
}
-fn parse_in_state(mut lexer: NewLexer, error: &Box<dyn Fn(Diagnostic)>, _state: State) {
+fn parse_in_state(mut lexer: TokenSlice, error: &Box<dyn Fn(Diagnostic)>, _state: State) {
println!("{}:{}", file!(), line!());
match lexer.get(0) {
None | Some(Token::EndCommand) => (),
if let Ok((command, n_tokens)) = parse_command_name(&mut lexer, error) {
let mut context = Context {
error,
- lexer: lexer.sublexer(n_tokens..),
+ lexer: lexer.subslice(n_tokens..),
command_name: Some(command.name),
};
(command.run)(&mut context);
}
}
-pub fn parse_command(lexer: NewLexer, error: &Box<dyn Fn(Diagnostic)>) {
+pub fn parse_command(lexer: TokenSlice, error: &Box<dyn Fn(Diagnostic)>) {
parse_in_state(lexer, error, State::Initial)
}
pub struct Context<'a> {
error: &'a Box<dyn Fn(Diagnostic)>,
- lexer: NewLexer<'a>,
+ lexer: TokenSlice<'a>,
command_name: Option<&'static str>,
}
use crate::{
command::parse_command,
lex::{
- lexer::{NewLexer, NewSource},
+ lexer::{TokenSlice, NewSource},
},
macros::MacroSet,
message::Diagnostic,
let error: Box<dyn Fn(Diagnostic)> = Box::new(|diagnostic| {
println!("{diagnostic}");
});
- parse_command(NewLexer::new(&tokens), &error);
+ parse_command(TokenSlice::new(&tokens), &error);
}
}
}
io::Result as IoResult,
iter::once,
mem::take,
- ops::{Bound, Range, RangeBounds, RangeInclusive},
+ ops::{Range, RangeBounds, RangeInclusive},
path::Path,
ptr,
sync::Arc,
}
}
-impl Tokens<'_> {}
-
-pub struct NewLexer<'a> {
- backing: &'a Tokens<'a>,
+pub struct TokenSlice<'a> {
tokens: &'a [LexToken<'a>],
- start: usize,
}
-impl<'a> NewLexer<'a> {
+impl<'a> TokenSlice<'a> {
pub fn new(backing: &'a Tokens) -> Self {
Self {
- backing,
tokens: backing.tokens.as_slice(),
- start: 0,
}
}
S: ToString,
B: RangeBounds<usize>,
{
- self.sublexer(range)
+ self.subslice(range)
.diagnostic(Severity::Error, text.to_string())
}
- pub fn sublexer<B>(&self, range: B) -> Self
+ pub fn subslice<B>(&self, range: B) -> Self
where
B: RangeBounds<usize>,
{
Self {
- backing: self.backing,
- start: self.start
- + match range.start_bound() {
- Bound::Included(index) => *index,
- Bound::Excluded(index) => *index + 1,
- Bound::Unbounded => 0,
- },
- tokens: &self.backing.tokens
- [(range.start_bound().cloned(), range.end_bound().cloned())],
+ tokens: &self.tokens[(range.start_bound().cloned(), range.end_bound().cloned())],
}
}