From 2badb82be5c2164b65624c8f9f3c6a21c9a9a137 Mon Sep 17 00:00:00 2001 From: Ben Pfaff Date: Mon, 26 Aug 2024 08:45:36 -0700 Subject: [PATCH] work --- rust/pspp/src/command.rs | 14 +++++++------- rust/pspp/src/engine.rs | 4 ++-- rust/pspp/src/lex/lexer.rs | 26 ++++++-------------------- 3 files changed, 15 insertions(+), 29 deletions(-) diff --git a/rust/pspp/src/command.rs b/rust/pspp/src/command.rs index dfa8c5c68e..57bb885f14 100644 --- a/rust/pspp/src/command.rs +++ b/rust/pspp/src/command.rs @@ -7,7 +7,7 @@ use crate::{ integer::ToInteger, lex::{ command_name::CommandMatcher, - lexer::{NewLexer}, + lexer::{TokenSlice}, token::{Punct, Token}, }, message::Diagnostic, @@ -126,7 +126,7 @@ fn commands() -> &'static [Command] { COMMANDS.get_or_init(|| new_commands()).as_slice() } -fn parse_command_word(lexer: &mut NewLexer, s: &mut String, n: usize) -> bool { +fn parse_command_word(lexer: &mut TokenSlice, s: &mut String, n: usize) -> bool { let separator = match s.chars().next_back() { Some(c) if c != '-' => " ", _ => "", @@ -162,7 +162,7 @@ fn find_best_match(s: &str) -> (Option<&'static Command>, isize) { } fn parse_command_name( - lexer: &mut NewLexer, + lexer: &mut TokenSlice, error: &Box, ) -> Result<(&'static Command, usize), ()> { let mut s = String::new(); @@ -215,7 +215,7 @@ pub fn end_of_command(context: &Context, range: RangeFrom) -> Result, _state: State) { +fn parse_in_state(mut lexer: TokenSlice, error: &Box, _state: State) { println!("{}:{}", file!(), line!()); match lexer.get(0) { None | Some(Token::EndCommand) => (), @@ -223,7 +223,7 @@ fn parse_in_state(mut lexer: NewLexer, error: &Box, _state: if let Ok((command, n_tokens)) = parse_command_name(&mut lexer, error) { let mut context = Context { error, - lexer: lexer.sublexer(n_tokens..), + lexer: lexer.subslice(n_tokens..), command_name: Some(command.name), }; (command.run)(&mut context); @@ -232,13 +232,13 @@ fn parse_in_state(mut lexer: NewLexer, error: &Box, _state: } } -pub fn parse_command(lexer: NewLexer, error: &Box) { +pub fn parse_command(lexer: TokenSlice, error: &Box) { parse_in_state(lexer, error, State::Initial) } pub struct Context<'a> { error: &'a Box, - lexer: NewLexer<'a>, + lexer: TokenSlice<'a>, command_name: Option<&'static str>, } diff --git a/rust/pspp/src/engine.rs b/rust/pspp/src/engine.rs index 59021a184e..abfc2da8b3 100644 --- a/rust/pspp/src/engine.rs +++ b/rust/pspp/src/engine.rs @@ -1,7 +1,7 @@ use crate::{ command::parse_command, lex::{ - lexer::{NewLexer, NewSource}, + lexer::{TokenSlice, NewSource}, }, macros::MacroSet, message::Diagnostic, @@ -19,7 +19,7 @@ impl Engine { let error: Box = Box::new(|diagnostic| { println!("{diagnostic}"); }); - parse_command(NewLexer::new(&tokens), &error); + parse_command(TokenSlice::new(&tokens), &error); } } } diff --git a/rust/pspp/src/lex/lexer.rs b/rust/pspp/src/lex/lexer.rs index f335f67729..3e2b342596 100644 --- a/rust/pspp/src/lex/lexer.rs +++ b/rust/pspp/src/lex/lexer.rs @@ -6,7 +6,7 @@ use std::{ io::Result as IoResult, iter::once, mem::take, - ops::{Bound, Range, RangeBounds, RangeInclusive}, + ops::{Range, RangeBounds, RangeInclusive}, path::Path, ptr, sync::Arc, @@ -345,20 +345,14 @@ impl Debug for Tokens<'_> { } } -impl Tokens<'_> {} - -pub struct NewLexer<'a> { - backing: &'a Tokens<'a>, +pub struct TokenSlice<'a> { tokens: &'a [LexToken<'a>], - start: usize, } -impl<'a> NewLexer<'a> { +impl<'a> TokenSlice<'a> { pub fn new(backing: &'a Tokens) -> Self { Self { - backing, tokens: backing.tokens.as_slice(), - start: 0, } } @@ -371,24 +365,16 @@ impl<'a> NewLexer<'a> { S: ToString, B: RangeBounds, { - self.sublexer(range) + self.subslice(range) .diagnostic(Severity::Error, text.to_string()) } - pub fn sublexer(&self, range: B) -> Self + pub fn subslice(&self, range: B) -> Self where B: RangeBounds, { Self { - backing: self.backing, - start: self.start - + match range.start_bound() { - Bound::Included(index) => *index, - Bound::Excluded(index) => *index + 1, - Bound::Unbounded => 0, - }, - tokens: &self.backing.tokens - [(range.start_bound().cloned(), range.end_bound().cloned())], + tokens: &self.tokens[(range.start_bound().cloned(), range.end_bound().cloned())], } } -- 2.30.2