#![allow(dead_code)]
-use std::{fmt::Write, sync::OnceLock};
+use std::{fmt::Write, ops::RangeFrom, sync::OnceLock};
use flagset::{flags, FlagSet};
use crate::{
- identifier::Identifier,
integer::ToInteger,
lex::{
command_name::CommandMatcher,
no_abbrev: false,
name: "ECHO",
run: Box::new(|_context| {
+
println!("hi");
}),
},
COMMANDS.get_or_init(|| new_commands()).as_slice()
}
-fn parse_command_word(lexer: &mut NewLexer, s: &mut String, n: isize) -> bool {
+fn parse_command_word(lexer: &mut NewLexer, s: &mut String, n: usize) -> bool {
let separator = match s.chars().next_back() {
Some(c) if c != '-' => " ",
_ => "",
};
- match lexer.next(n) {
- Token::Punct(Punct::Dash) => {
+ match lexer.get(n) {
+ Some(Token::Punct(Punct::Dash)) => {
s.push('-');
true
}
- Token::Id(id) => {
+ Some(Token::Id(id)) => {
write!(s, "{separator}{id}").unwrap();
true
}
- Token::Number(number) if number.is_sign_positive() => {
+ Some(Token::Number(number)) if number.is_sign_positive() => {
if let Some(integer) = number.to_exact_usize() {
write!(s, "{separator}{integer}").unwrap();
true
fn parse_command_name(
lexer: &mut NewLexer,
error: &Box<dyn Fn(Diagnostic)>,
-) -> Result<(&'static Command, isize), ()> {
+) -> Result<(&'static Command, usize), ()> {
let mut s = String::new();
let mut word = 0;
let mut missing_words = 0;
}
match command {
- Some(command) => Ok((command, (word + 1) + missing_words)),
+ Some(command) => Ok((command, ((word as isize + 1) + missing_words) as usize)),
None => {
- if s.is_empty() {
- error(lexer.error("Syntax error expecting command name"))
+ if word == 0 {
+ error(lexer.error(0..=0, "Syntax error expecting command name"))
} else {
- error(lexer.error("Unknown command `{s}`."))
+ error(lexer.error(0..=word, "Unknown command `{s}`."))
};
Err(())
}
Finish,
}
-pub fn end_of_command(context: &Context) -> Result<Success, ()> {
- match context.lexer.token() {
- Token::EndCommand => Ok(Success::Success),
+pub fn end_of_command(context: &Context, range: RangeFrom<usize>) -> Result<Success, ()> {
+ match context.lexer.get(range.start) {
+ None | Some(Token::EndCommand) => Ok(Success::Success),
_ => {
context.error(
context
.lexer
- .error("Syntax error expecting end of command."),
+ .error(range, "Syntax error expecting end of command."),
);
Err(())
}
fn parse_in_state(mut lexer: NewLexer, error: &Box<dyn Fn(Diagnostic)>, _state: State) {
println!("{}:{}", file!(), line!());
- match lexer.token() {
- Token::EndCommand => (),
+ match lexer.get(0) {
+ None | Some(Token::EndCommand) => (),
_ => {
if let Ok((command, n_tokens)) = parse_command_name(&mut lexer, error) {
- for _ in 0..n_tokens {
- lexer.get();
- }
let mut context = Context {
error,
- lexer,
+ lexer: lexer.sublexer(n_tokens..),
command_name: Some(command.name),
};
(command.run)(&mut context);
- let _ = end_of_command(&context);
}
}
}
pub fn error(&self, diagnostic: Diagnostic) {
(self.error)(diagnostic);
}
-
- pub fn force_id(&mut self) -> Option<Identifier> {
- match self.lexer.token() {
- Token::Id(id) => {
- let id = id.clone();
- self.lexer.get();
- Some(id)
- }
- _ => {
- todo!()
- }
- }
- }
}
io::Result as IoResult,
iter::once,
mem::{self, take},
- ops::{Range, RangeInclusive},
+ ops::{Bound, Range, RangeBounds, RangeInclusive},
path::Path,
sync::Arc,
};
backing: &'a Tokens,
tokens: &'a [LexToken],
start: usize,
- pos: usize,
}
impl<'a> NewLexer<'a> {
backing,
tokens: backing.tokens.as_slice(),
start: 0,
- pos: 0,
}
}
- pub fn get(&mut self) {
- if !self.at_end() {
- self.pos += 1;
- }
- }
-
- pub fn at_end(&self) -> bool {
- self.pos >= self.tokens.len()
- }
-
- pub fn match_(&mut self, token: &Token) -> bool {
- if self.token() == token {
- self.get();
- true
- } else {
- false
- }
- }
-
- pub fn token(&self) -> &Token {
- self.tokens
- .get(self.pos)
- .map_or(&Token::EndCommand, |token| &token.token)
- }
-
- pub fn next(&self, ofs: isize) -> &Token {
- ofs.checked_add(self.pos as isize)
- .and_then(|index| usize::try_from(index).ok())
- .and_then(|index| self.tokens.get(index))
- .map_or(&Token::EndCommand, |token| &token.token)
+ pub fn get(&self, index: usize) -> Option<&Token> {
+ self.tokens.get(index).map(|token| &token.token)
}
-/*
- pub fn force_string(&mut self) -> Result<String, ()> {
- if let Token::String(s) = self.token() {
- let s = s.clone();
- self.
- }
- }*/
-
- pub fn error<S>(&self, text: S) -> Diagnostic
+ pub fn error<S, B>(&self, range: B, text: S) -> Diagnostic
where
S: ToString,
+ B: RangeBounds<usize>,
{
- let abs_pos = self.start + self.pos;
+ let start = match range.start_bound() {
+ Bound::Included(&index) => index,
+ Bound::Excluded(&index) => index + 1,
+ Bound::Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Bound::Included(&index) => index + 1,
+ Bound::Excluded(&index) => index,
+ Bound::Unbounded => self.tokens.len(),
+ };
+ let abs_range = (start + self.start)..=(end + self.start - 1);
self.backing
- .diagnostic(Severity::Error, abs_pos..=abs_pos, text.to_string())
+ .diagnostic(Severity::Error, abs_range, text.to_string())
+ }
+
+ pub fn sublexer<B>(&self, range: B) -> Self
+ where
+ B: RangeBounds<usize>,
+ {
+ Self {
+ backing: self.backing,
+ start: self.start
+ + match range.start_bound() {
+ Bound::Included(index) => *index,
+ Bound::Excluded(index) => *index + 1,
+ Bound::Unbounded => 0,
+ },
+ tokens: &self.backing.tokens
+ [(range.start_bound().cloned(), range.end_bound().cloned())],
+ }
}
}
Some(ScanToken::Error(error)) => errors.push(LexError { error, pos }),
}
}
+ // XXX report errors
if pp.is_empty() {
return false;
}