use crate::{
command::parse_command,
lex::{
- lexer::{TokenSlice, NewSource},
+ lexer::{TokenSlice, Source},
},
macros::MacroSet,
message::Diagnostic,
pub fn new() -> Self {
Self
}
- pub fn run(&mut self, mut source: NewSource) {
+ pub fn run(&mut self, mut source: Source) {
let macros = MacroSet::new();
while let Some(tokens) = source.read_command(¯os) {
let error: Box<dyn Fn(Diagnostic)> = Box::new(|diagnostic| {
mod tests {
use encoding_rs::UTF_8;
- use crate::lex::lexer::{NewSource, SourceFile};
+ use crate::lex::lexer::{Source, SourceFile};
use super::Engine;
#[test]
fn test_echo() {
let mut engine = Engine::new();
- engine.run(NewSource::new_default(&SourceFile::for_file_contents(
+ engine.run(Source::new_default(&SourceFile::for_file_contents(
"ECHO 'hi there'.\nECHO 'bye there'.\n".to_string(),
Some("test.sps".to_string()),
UTF_8,
use std::{
borrow::{Borrow, Cow},
+ cell::Cell,
collections::VecDeque,
fmt::{Debug, Formatter, Result as FmtResult, Write},
fs,
mem::take,
ops::{Range, RangeInclusive},
path::Path,
- ptr, slice,
+ ptr,
sync::Arc,
};
macro_rep: Option<MacroRepresentation>,
}
-impl LexToken<'_> {
- pub fn force_string(&self) -> Result<&str, Diagnostic> {
- if let Token::String(s) = &self.token {
- Ok(s.as_str())
- } else {
- let slice = TokenSlice {
- tokens: slice::from_ref(self),
- };
- Err(slice.error("Syntax error expecting string."))
- }
- }
-}
-
struct LexError {
error: ScanError,
pos: Range<usize>,
}
}
+ pub fn cursor(&'a self) -> Cursor<'a> {
+ Cursor::new(self)
+ }
+
pub fn get_token(&self, index: usize) -> Option<&Token> {
self.get(index).map(|token| &token.token)
}
}
}
-pub struct NewSource<'a> {
+pub struct Cursor<'a> {
+ slice: &'a TokenSlice<'a>,
+
+ /// This allows [Self::force_string] etc. to advance while returning the
+ /// token without cloning it.
+ pos: Cell<usize>,
+}
+
+impl<'a> Cursor<'a> {
+ pub fn new(slice: &'a TokenSlice<'a>) -> Self {
+ Self { slice, pos: Cell::new(0) }
+ }
+
+ pub fn force_string(&self) -> Result<&str, Diagnostic> {
+ let pos = self.pos.get();
+ if let Some(Token::String(s)) = self.slice.get_token(pos) {
+ self.pos.set(pos + 1);
+ Ok(s.as_str())
+ } else {
+ let slice = self.slice.subslice(pos..self.slice.len());
+ Err(slice.error("Syntax error expecting string."))
+ }
+ }
+}
+
+pub struct Source<'a> {
file: &'a SourceFile,
segmenter: Segmenter,
seg_pos: usize,
lookahead: VecDeque<LexToken<'a>>,
}
-impl<'a> NewSource<'a> {
+impl<'a> Source<'a> {
pub fn new_default(file: &'a SourceFile) -> Self {
Self::new(file, Syntax::default())
}
use crate::macros::MacroSet;
- use super::{NewSource, SourceFile};
+ use super::{Source, SourceFile};
#[test]
fn test() {
Some(String::from("crosstabs.sps")),
UTF_8,
);
- let mut source = NewSource::new_default(&file);
+ let mut source = Source::new_default(&file);
while let Some(tokens) = source.read_command(&MacroSet::new()) {
println!("{tokens:?}");
}