source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+[[package]]
+name = "either"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
+
[[package]]
name = "encoding_rs"
version = "0.8.34"
"chrono",
"clap",
"diff",
+ "either",
"encoding_rs",
"enum-map",
"finl_unicode",
let struct_attrs = StructAttrs::parse(&ast.attrs)?;
let mut body = TokenStream2::new();
let name = &ast.ident;
+ let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
if struct_attrs.selector {
let mut variants = Vec::new();
let mut default = None;
construct_fields(&variant.fields, quote! { #name::#ident }, false, None);
let fnname = format_ident!("construct{index}");
body.extend(quote! {
- fn #fnname<'a>(input: TokenSlice<'a>) -> ParseResult<'a, #name> { #construction }
+ fn #fnname #impl_generics(input: &TokenSlice) -> ParseResult<#name #ty_generics> #where_clause { let input = input.clone(); #construction }
if let Ok(p) = #fnname(input) {
return Ok(p);
}
body.extend(quote! { Err(ParseError::Mismatch(input.error("Syntax error.").into())) });
}
- let lifetime = struct_attrs.lifetime();
let output = quote! {
- impl<'a> FromTokens<'a> for #name #lifetime {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self> {
+ impl #impl_generics FromTokens for #name #ty_generics #where_clause {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self> {
#body
}
}
for (index, _field) in fields.iter().enumerate() {
let varname = format_ident!("field{index}");
construction
- .extend(quote! { let (#varname, input) = FromTokens::from_tokens(input) #convert ?.take_diagnostics(&mut diagnostics); });
+ .extend(quote! { let (#varname, input) = FromTokens::from_tokens(&input) #convert ?.take_diagnostics(&mut diagnostics); });
}
match fields {
Fields::Named(named) => {
false,
struct_attrs.syntax.as_ref(),
);
- let lifetime = struct_attrs.lifetime();
+ let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
let output = quote! {
- impl<'a> FromTokens<'a> for #name #lifetime {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self> {
+ impl #impl_generics FromTokens for #name #ty_generics #where_clause {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self> {
#construction
}
}
}
struct StructAttrs {
- add_lifetime: bool,
syntax: Option<Literal>,
selector: bool,
}
impl Default for StructAttrs {
fn default() -> Self {
Self {
- add_lifetime: false,
syntax: None,
selector: true,
}
}
impl StructAttrs {
- fn lifetime(&self) -> Option<TokenStream2> {
- self.add_lifetime.then(|| quote! { <'a> })
- }
fn parse(attributes: &[Attribute]) -> Result<Self, Error> {
//println!("{:?}", &attributes);
let mut field_attrs = Self::default();
meta.input.parse::<Token![=]>()?;
let syntax = meta.input.parse::<Literal>()?;
field_attrs.syntax = Some(syntax);
- } else if meta.path.is_ident("add_lifetime") {
- field_attrs.add_lifetime = true;
} else if meta.path.is_ident("no_selector") {
field_attrs.selector = false;
} else {
enum-map = "2.7.3"
flagset = "0.4.6"
pspp-derive = { version = "0.1.0", path = "../pspp-derive" }
+either = "1.13.0"
[target.'cfg(windows)'.dependencies]
windows-sys = { version = "0.48.0", features = ["Win32_Globalization"] }
no_abbrev: false,
name: "CROSSTABS",
run: Box::new(|context| {
- let input = context.lexer;
- match <Crosstabs>::from_tokens(input) {
+ let input = context.lexer.clone();
+ match <Crosstabs>::from_tokens(&input) {
Ok(Parsed {
value,
rest: _,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Crosstabs<'a>(Subcommands<CrosstabsSubcommand<'a>>);
+struct Crosstabs(Subcommands<CrosstabsSubcommand>);
#[derive(Debug, pspp_derive::FromTokens)]
#[pspp(syntax = "COUNT")]
struct CountKw;
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum CrosstabsSubcommand<'a> {
+enum CrosstabsSubcommand {
#[pspp(default)]
- Tables(Option<Equals>, Punctuated<VarList<'a>, By>),
+ Tables(Option<Equals>, Punctuated<VarList, By>),
Missing(Equals, Missing),
Write(Option<(Equals, Write)>),
HideSmallCounts(CountKw, Equals, Integer),
Cells(Equals, Punctuated<Cell>),
Variables(
Equals,
- Punctuated<(VarRange<'a>, InParens<(Integer, Comma, Integer)>)>,
+ Punctuated<(VarRange, InParens<(Integer, Comma, Integer)>)>,
),
Format(Equals, Punctuated<Format>),
Count(Equals, Punctuated<Count>),
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct BoundedVars<'a> {
- vars: VarRange<'a>,
+struct BoundedVars {
+ vars: VarRange,
bounds: InParens<Bounds>,
}
+use either::Either;
use flagset::FlagSet;
use super::{
- And, By, Command, Equals, Gt, InSquares, Number, Plus, Punctuated, Seq1, Subcommands, VarList
+ And, Asterisk, By, Command, Dash, Equals, Exp, Gt, InSquares, Integer, Number, Plus,
+ Punctuated, Seq0, Seq1, Slash, Subcommands, VarList,
};
use crate::{
command::{FromTokens, InParens, MismatchToError, ParseError, ParseResult, Parsed, TokenSlice},
enhanced_only: false,
testing_only: false,
no_abbrev: false,
- name: "CROSSTABS",
+ name: "CTABLES",
run: Box::new(|context| {
- let input = context.lexer;
- match <CTables>::from_tokens(input) {
+ let input = context.lexer.clone();
+ match <CTables>::from_tokens(&input) {
Ok(Parsed {
value,
rest: _,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct CTables<'a>(Subcommands<CTablesSubcommand<'a>>);
+struct CTables(Subcommands<CTablesSubcommand>);
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum CTablesSubcommand<'a> {
- Table(Table<'a>),
- Format(Seq1<Format<'a>>),
- VLabels(Seq1<VLabels<'a>>),
+enum CTablesSubcommand {
+ Table(Table),
+ Format(Seq1<Format>),
+ VLabels(Seq1<VLabels>),
SMissing(SMissing),
- PCompute(And, &'a Identifier, Equals, keyword::Expr, InParens<Expression>),
+ PCompute(And, Identifier, Equals, keyword::Expr, InParens<Expression>),
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Table<'a> {
- rows: Option<Axis<'a>>,
- columns: Option<(By, Option<Axis<'a>>)>,
- layers: Option<(By, Option<Axis<'a>>)>,
+struct Table {
+ rows: Option<Axis>,
+ columns: Option<(By, Option<Axis>)>,
+ layers: Option<(By, Option<Axis>)>,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum Axis<'a> {
- Variable(&'a Identifier, Option<InSquares<Measurement>>),
- Nest(Box<Axis<'a>>, Gt, Box<Axis<'a>>),
- Stack(Box<Axis<'a>>, Plus, Box<Axis<'a>>),
- Parens(InParens<Box<Axis<'a>>>),
- Annotate(InSquares<Punctuated<Annotation<'a>>>),
+enum Axis {
+ Variable(Identifier, Option<InSquares<Measurement>>),
+ Nest(Box<Axis>, Gt, Box<Axis>),
+ Stack(Box<Axis>, Plus, Box<Axis>),
+ Parens(InParens<Box<Axis>>),
+ Annotate(InSquares<Punctuated<Annotation>>),
}
#[derive(Debug, pspp_derive::FromTokens)]
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Annotation<'a> {
- function: &'a Identifier,
+struct Annotation {
+ function: Identifier,
percentile: Option<Number>,
- label: Option<&'a String>,
+ label: Option<String>,
format: Option<AbstractFormat>,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum Format<'a> {
+enum Format {
MinColWidth(Equals, Width),
MaxColWidth(Equals, Width),
Units(Equals, Unit),
- Empty(Equals, Empty<'a>),
- Missing(Equals, &'a String),
+ Empty(Equals, Empty),
+ Missing(Equals, String),
}
#[derive(Debug, pspp_derive::FromTokens)]
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(no_selector, add_lifetime)]
-enum Empty<'a> {
+enum Empty {
Zero(keyword::Zero),
Blank(keyword::Blank),
- Value(&'a String),
+ Value(String),
}
#[derive(Debug, pspp_derive::FromTokens)]
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum VLabels<'a> {
- Variables(Equals, VarList<'a>),
+enum VLabels {
+ Variables(Equals, VarList),
Display(Display),
}
Listwise,
}
+#[derive(Debug, pspp_derive::FromTokens)]
+struct Expression(MulExpression, Seq0<(Either<Plus, Dash>, Expression)>);
+
+#[derive(Debug, pspp_derive::FromTokens)]
+struct MulExpression(PowExpression, Seq0<(Either<Asterisk, Slash>, PowExpression)>);
+
+#[derive(Debug, pspp_derive::FromTokens)]
+struct PowExpression(Terminal, Seq0<(Exp, PowExpression)>);
+
+#[derive(Debug, pspp_derive::FromTokens)]
+#[pspp(no_selector)]
+enum Terminal {
+ Category(InSquares<Category>),
+ Missing(keyword::Missing),
+ OtherNm(keyword::OtherNm),
+ Subtotal(keyword::Subtotal, Option<InSquares<Integer>>),
+ Total(keyword::Total),
+ Number(Number),
+ Parens(InParens<Box<Expression>>),
+}
+
+#[derive(Debug, pspp_derive::FromTokens)]
+struct Category {
+ min: Value,
+ max: Option<(keyword::Thru, Value)>,
+}
+
#[derive(Debug, pspp_derive::FromTokens)]
#[pspp(no_selector)]
-enum Expression {
- //Category(InSquares<Category<'a>>),
- Missing,
- OtherNm,
-
+enum Value {
+ Lo(keyword::Lo),
+ Hi(keyword::Hi),
+ Number(Number),
+ String(String),
}
mod keyword {
#[derive(Debug, pspp_derive::FromTokens)]
#[pspp(syntax = "blank")]
pub struct Blank;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "thru")]
+ pub struct Thru;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "hi")]
+ pub struct Hi;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "lo")]
+ pub struct Lo;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "missing")]
+ pub struct Missing;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "othernm")]
+ pub struct OtherNm;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "subtotal")]
+ pub struct Subtotal;
+
+ #[derive(Debug, pspp_derive::FromTokens)]
+ #[pspp(syntax = "total")]
+ pub struct Total;
+}
+
+#[cfg(test)]
+mod tests {
+ use std::sync::Arc;
+
+ use encoding_rs::UTF_8;
+
+ use crate::{
+ engine::Engine,
+ lex::lexer::{Source, SourceFile},
+ };
+
+ fn test(syntax: &str) {
+ let mut engine = Engine::new();
+ engine.run(Source::new_default(&Arc::new(
+ SourceFile::for_file_contents(syntax.to_string(), Some("test.sps".to_string()), UTF_8),
+ )));
+ }
+
+ #[test]
+ fn basics() {
+ test(
+ "ctables /pcompute &all_drivers =expr([1 thru 2])
+ /pcompute &all_drivers =expr(1).",
+ );
+ }
}
+use either::Either;
use flagset::FlagSet;
use super::{Comma, Command, Equals, Integer, Punctuated, Seq0, Seq1, Slash};
no_abbrev: false,
name: "DATA LIST",
run: Box::new(|context| {
- let input = context.lexer;
- match <DataList>::from_tokens(input) {
+ match <DataList>::from_tokens(&context.lexer) {
Ok(Parsed {
value,
rest: _,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct DataList<'a>(Seq1<Setting<'a>>, Seq1<Record<'a>>);
+struct DataList(Seq1<Setting>, Seq1<Record>);
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum Setting<'a> {
- File(Equals, File<'a>),
- Encoding(Equals, &'a String),
+enum Setting {
+ File(Equals, Either<String, Identifier>),
+ Encoding(Equals, String),
Fixed,
- Free(Option<InParens<Punctuated<Delimiter<'a>>>>),
- List(Option<InParens<Punctuated<Delimiter<'a>>>>),
+ Free(Option<InParens<Punctuated<Delimiter>>>),
+ List(Option<InParens<Punctuated<Delimiter>>>),
Records(Equals, Integer),
Skip(Equals, Integer),
Table,
NoTable,
- End(Equals, &'a Identifier),
+ End(Equals, Identifier),
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum Delimiter<'a> {
+enum Delimiter {
#[pspp(default)] // XXX this allows `STRING "string"`
- String(&'a String),
+ String(String),
Tab,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(no_selector, add_lifetime)]
-enum File<'a> {
- Name(&'a String),
- Handle(&'a Identifier),
-}
-
-#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Record<'a> {
+struct Record {
slash: Slash,
record: Option<Integer>,
- variables: Seq0<Variable<'a>>,
+ variables: Seq0<Variable>,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Variable<'a> {
- names: Seq1<&'a Identifier>,
- location: Location<'a>,
+struct Variable {
+ names: Seq1<Identifier>,
+ location: Location,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(no_selector, add_lifetime)]
-enum Location<'a> {
+#[pspp(no_selector)]
+enum Location {
Columns(
Integer,
Option<Integer>,
- Option<InParens<(&'a Identifier, Option<(Comma, Integer)>)>>,
+ Option<InParens<(Identifier, Option<(Comma, Integer)>)>>,
),
- Fortran(InParens<Punctuated<(Option<Integer>, Format<'a>)>>),
+ Fortran(InParens<Punctuated<(Option<Integer>, Format)>>),
Asterisk,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Format<'a>(&'a Identifier);
+struct Format(Identifier);
#[cfg(test)]
mod tests {
no_abbrev: false,
name: "DESCRIPTIVES",
run: Box::new(|context| {
- let mut input = context.lexer;
+ let mut input = context.lexer.clone();
while !input.is_empty() {
- match <Subcommand<DescriptivesSubcommand>>::from_tokens(input) {
+ match <Subcommand<DescriptivesSubcommand>>::from_tokens(&input) {
Ok(Parsed {
value: subcommand,
rest,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct Descriptives<'a> {
- subcommands: Seq1<Subcommand<DescriptivesSubcommand<'a>>>,
+struct Descriptives {
+ subcommands: Seq1<Subcommand<DescriptivesSubcommand>>,
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-enum DescriptivesSubcommand<'a> {
+enum DescriptivesSubcommand {
#[pspp(default)]
- Variables(Option<Equals>, Punctuated<DescriptivesVars<'a>>),
+ Variables(Option<Equals>, Punctuated<DescriptivesVars>),
Missing(Equals, Seq1<Missing>),
Save,
Statistics(Equals, Seq1<Statistic>),
}
#[derive(Debug, pspp_derive::FromTokens)]
-#[pspp(add_lifetime)]
-struct DescriptivesVars<'a> {
- vars: VarRange<'a>,
- z_name: Option<InParens<&'a Identifier>>,
+struct DescriptivesVars {
+ vars: VarRange,
+ z_name: Option<InParens<Identifier>>,
}
#[derive(Debug, pspp_derive::FromTokens)]
use ctables::ctables_command;
use data_list::data_list_command;
use descriptives::descriptives_command;
+use either::Either;
use flagset::{flags, FlagSet};
use pspp_derive::FromTokens;
}
#[derive(Debug)]
-struct Parsed<'a, T> {
+struct Parsed<T> {
value: T,
- rest: TokenSlice<'a>,
+ rest: TokenSlice,
diagnostics: Diagnostics,
}
-impl<'a, T> Parsed<'a, T> {
- pub fn new(value: T, rest: TokenSlice<'a>, warnings: Diagnostics) -> Self {
+impl<T> Parsed<T> {
+ pub fn new(value: T, rest: TokenSlice, warnings: Diagnostics) -> Self {
Self {
value,
rest: rest,
diagnostics: warnings,
}
}
- pub fn ok(value: T, rest: TokenSlice<'a>) -> Self {
+ pub fn ok(value: T, rest: TokenSlice) -> Self {
Self {
value,
rest: rest,
diagnostics: Diagnostics::default(),
}
}
- pub fn into_tuple(self) -> (T, TokenSlice<'a>, Diagnostics) {
+ pub fn into_tuple(self) -> (T, TokenSlice, Diagnostics) {
(self.value, self.rest, self.diagnostics)
}
- pub fn take_diagnostics(self, d: &mut Diagnostics) -> (T, TokenSlice<'a>) {
+ pub fn take_diagnostics(self, d: &mut Diagnostics) -> (T, TokenSlice) {
let (value, rest, mut diagnostics) = self.into_tuple();
d.0.append(&mut diagnostics.0);
(value, rest)
}
- pub fn map<F, R>(self, f: F) -> Parsed<'a, R>
+ pub fn map<F, R>(self, f: F) -> Parsed<R>
where
F: FnOnce(T) -> R,
{
}
}
-type ParseResult<'a, T> = Result<Parsed<'a, T>, ParseError>;
+type ParseResult<T> = Result<Parsed<T>, ParseError>;
trait MismatchToError {
fn mismatch_to_error(self) -> Self;
}
-impl<'a, T> MismatchToError for ParseResult<'a, T> {
+impl<T> MismatchToError for ParseResult<T> {
fn mismatch_to_error(self) -> Self {
match self {
Err(ParseError::Mismatch(diagnostic)) => Err(ParseError::Error(diagnostic)),
}
}
-trait FromTokens<'a> {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+trait FromTokens {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized;
}
-impl<'a, T> FromTokens<'a> for Option<T>
+impl<T> FromTokens for Option<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
match T::from_tokens(input) {
Ok(p) => Ok(p.map(Some)),
- Err(ParseError::Mismatch(_)) => Ok(Parsed::ok(None, input)),
+ Err(ParseError::Mismatch(_)) => Ok(Parsed::ok(None, input.clone())),
Err(ParseError::Error(error)) => Err(ParseError::Error(error)),
}
}
}
-impl<'a, A, B> FromTokens<'a> for (A, B)
+impl<L, R> FromTokens for Either<L, R>
where
- A: FromTokens<'a>,
- B: FromTokens<'a>,
+ L: FromTokens,
+ R: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
+ where
+ Self: Sized,
+ {
+ match L::from_tokens(input) {
+ Ok(p) => Ok(p.map(Either::Left)),
+ Err(ParseError::Mismatch(_)) => Ok(R::from_tokens(input)?.map(Either::Right)),
+ Err(ParseError::Error(error)) => Err(ParseError::Error(error)),
+ }
+ }
+}
+
+impl<A, B> FromTokens for (A, B)
+where
+ A: FromTokens,
+ B: FromTokens,
+{
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let (a, input, mut diagnostics) = A::from_tokens(input)?.into_tuple();
- let (b, rest, mut diagnostics2) = B::from_tokens(input)?.into_tuple();
+ let (b, rest, mut diagnostics2) = B::from_tokens(&input)?.into_tuple();
diagnostics.0.append(&mut diagnostics2.0);
Ok(Parsed::new((a, b), rest, diagnostics))
}
}
-impl<'a, A, B, C> FromTokens<'a> for (A, B, C)
+impl<A, B, C> FromTokens for (A, B, C)
where
- A: FromTokens<'a>,
- B: FromTokens<'a>,
- C: FromTokens<'a>,
+ A: FromTokens,
+ B: FromTokens,
+ C: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let (a, input, mut diagnostics) = A::from_tokens(input)?.into_tuple();
- let (b, input, mut diagnostics2) = B::from_tokens(input)?.into_tuple();
- let (c, rest, mut diagnostics3) = C::from_tokens(input)?.into_tuple();
+ let (b, input, mut diagnostics2) = B::from_tokens(&input)?.into_tuple();
+ let (c, rest, mut diagnostics3) = C::from_tokens(&input)?.into_tuple();
diagnostics.0.append(&mut diagnostics2.0);
diagnostics.0.append(&mut diagnostics3.0);
Ok(Parsed::new((a, b, c), rest, diagnostics))
#[derive(Debug)]
pub struct Comma;
-impl<'a> FromTokens<'a> for Comma {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl FromTokens for Comma {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
#[pspp(syntax = "+")]
pub struct Plus;
+#[derive(Debug, pspp_derive::FromTokens)]
+#[pspp(syntax = "-")]
+pub struct Dash;
+
+#[derive(Debug, pspp_derive::FromTokens)]
+#[pspp(syntax = "*")]
+pub struct Asterisk;
+
+#[derive(Debug, pspp_derive::FromTokens)]
+#[pspp(syntax = "**")]
+pub struct Exp;
+
#[derive(Debug, pspp_derive::FromTokens)]
#[pspp(syntax = "BY")]
struct By;
}
}
-impl<'a, T, P> FromTokens<'a> for Punctuated<T, P>
+impl<T, P> FromTokens for Punctuated<T, P>
where
- T: FromTokens<'a>,
- P: FromTokens<'a>,
+ T: FromTokens,
+ P: FromTokens,
{
- fn from_tokens(mut input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let mut head = Vec::new();
let mut warnings_vec = Vec::new();
+ let mut input = input.clone();
let tail = loop {
- let t = match T::from_tokens(input) {
+ let t = match T::from_tokens(&input) {
Ok(Parsed {
value,
rest,
Err(ParseError::Mismatch(_)) => break None,
Err(ParseError::Error(e)) => return Err(ParseError::Error(e)),
};
- let p = match P::from_tokens(input) {
+ let p = match P::from_tokens(&input) {
Ok(Parsed {
value,
rest,
}
}
-impl<'a, T> FromTokens<'a> for Box<T>
+impl<T> FromTokens for Box<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
}
-impl<'a, T> FromTokens<'a> for Subcommands<T>
+impl<T> FromTokens for Subcommands<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(mut input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let mut items = Vec::new();
let mut diagnostics = Vec::new();
+ let mut input = input.clone();
loop {
let start = input.skip_until(|token| token != &Token::Punct(Punct::Slash));
if start.is_empty() {
}
let end = start.skip_to(&Token::Punct(Punct::Slash));
let subcommand = start.subslice(0..start.len() - end.len());
- match T::from_tokens(subcommand) {
+ match T::from_tokens(&subcommand) {
Ok(p) => {
let (value, rest, mut d) = p.into_tuple();
items.push(value);
#[derive(Debug)]
pub struct Seq0<T>(Vec<T>);
-impl<'a, T> FromTokens<'a> for Seq0<T>
+impl<T> FromTokens for Seq0<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(mut input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let mut values_vec = Vec::new();
let mut warnings_vec = Vec::new();
+ let mut input = input.clone();
while !input.is_empty() {
- match T::from_tokens(input) {
+ match T::from_tokens(&input) {
Ok(Parsed {
value,
rest,
#[derive(Debug)]
pub struct Seq1<T>(Vec<T>);
-impl<'a, T> FromTokens<'a> for Seq1<T>
+impl<T> FromTokens for Seq1<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(mut input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let mut values_vec = Vec::new();
let mut warnings_vec = Vec::new();
+ let mut input = input.clone();
while !input.is_empty() {
- match T::from_tokens(input) {
+ match T::from_tokens(&input) {
Ok(Parsed {
value,
rest,
}
/*
-impl<'a, T> FromTokens<'a> for Vec<T>
+impl<T> FromTokens for Vec<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(mut input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(mut input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
}*/
-impl<'a> FromTokens<'a> for TokenSlice<'a> {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl FromTokens for TokenSlice {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
- Ok(Parsed::ok(input, input.end()))
+ Ok(Parsed::ok(input.clone(), input.end()))
}
}
#[derive(Debug)]
struct Subcommand<T>(pub T);
-impl<'a, T> FromTokens<'a> for Subcommand<T>
+impl<T> FromTokens for Subcommand<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
let end = start.skip_to(&Token::Punct(Punct::Slash));
let subcommand = start.subslice(0..start.len() - end.len());
- let (value, rest, mut warnings) = T::from_tokens(subcommand)?.into_tuple();
+ let (value, rest, mut warnings) = T::from_tokens(&subcommand)?.into_tuple();
if !rest.is_empty() {
warnings
.0
#[derive(Debug)]
struct InParens<T>(pub T);
-impl<'a, T> FromTokens<'a> for InParens<T>
+impl<T> FromTokens for InParens<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let ((), rest, _) = parse_token(input, &Token::Punct(Punct::LParen))?.into_tuple();
- let (value, rest, warnings) = T::from_tokens(rest)?.into_tuple();
- let ((), rest, _) = parse_token(rest, &Token::Punct(Punct::RParen))?.into_tuple();
+ let (value, rest, warnings) = T::from_tokens(&rest)?.into_tuple();
+ let ((), rest, _) = parse_token(&rest, &Token::Punct(Punct::RParen))?.into_tuple();
Ok(Parsed {
value: Self(value),
rest,
#[derive(Debug)]
struct InSquares<T>(pub T);
-impl<'a, T> FromTokens<'a> for InSquares<T>
+impl<T> FromTokens for InSquares<T>
where
- T: FromTokens<'a>,
+ T: FromTokens,
{
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
let ((), rest, _) = parse_token(input, &Token::Punct(Punct::LSquare))?.into_tuple();
- let (value, rest, warnings) = T::from_tokens(rest)?.into_tuple();
- let ((), rest, _) = parse_token(rest, &Token::Punct(Punct::RSquare))?.into_tuple();
+ let (value, rest, warnings) = T::from_tokens(&rest)?.into_tuple();
+ let ((), rest, _) = parse_token(&rest, &Token::Punct(Punct::RSquare))?.into_tuple();
Ok(Parsed {
value: Self(value),
rest,
}
}
-fn parse_token_if<'a, F, R>(input: TokenSlice<'a>, parse: F) -> ParseResult<'a, R>
+fn parse_token_if<F, R>(input: &TokenSlice, parse: F) -> ParseResult<R>
where
F: Fn(&Token) -> Option<R>,
{
Err(ParseError::Mismatch(Diagnostics::default()))
}
-fn _parse_token<'a>(input: TokenSlice<'a>, token: &Token) -> ParseResult<'a, Token> {
+fn _parse_token(input: &TokenSlice, token: &Token) -> ParseResult<Token> {
if let Some(rest) = input.skip(token) {
Ok(Parsed::ok(input.first().token.clone(), rest))
} else {
}
}
-fn parse_token<'a>(input: TokenSlice<'a>, token: &Token) -> ParseResult<'a, ()> {
+fn parse_token(input: &TokenSlice, token: &Token) -> ParseResult<()> {
if let Some(rest) = input.skip(token) {
Ok(Parsed::ok((), rest))
} else {
}
}
-fn parse_syntax<'a>(input: TokenSlice<'a>, syntax: &str) -> ParseResult<'a, ()> {
+fn parse_syntax(input: &TokenSlice, syntax: &str) -> ParseResult<()> {
if let Some(rest) = input.skip_syntax(syntax) {
Ok(Parsed::ok((), rest))
} else {
}
}
-pub type VarList<'a> = Punctuated<VarRange<'a>>;
+pub type VarList = Punctuated<VarRange>;
#[derive(Debug)]
pub struct Number(f64);
-impl<'a> FromTokens<'a> for Number {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl FromTokens for Number {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
#[derive(Debug)]
pub struct Integer(i64);
-impl<'a> FromTokens<'a> for Integer {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl FromTokens for Integer {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
}
-pub enum VarRange<'a> {
- Single(&'a Identifier),
- Range(&'a Identifier, &'a Identifier),
+pub enum VarRange {
+ Single(Identifier),
+ Range(Identifier, Identifier),
All,
}
-impl<'a> Debug for VarRange<'a> {
+impl Debug for VarRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Single(var) => write!(f, "{var:?}"),
}
}
-impl<'a> FromTokens<'a> for VarRange<'a> {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl FromTokens for VarRange {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
Ok(Parsed::ok(Self::All, rest))
} else {
let (from, rest, _) = parse_id(input)?.into_tuple();
- if let Ok(Parsed { rest, .. }) = parse_token(rest, &Token::Punct(Punct::To)) {
- if let Ok(p) = parse_id(rest) {
+ if let Ok(Parsed { rest, .. }) = parse_token(&rest, &Token::Punct(Punct::To)) {
+ if let Ok(p) = parse_id(&rest) {
return Ok(p.map(|to| Self::Range(from, to)));
}
}
}
}
-fn parse_id<'a>(input: TokenSlice<'a>) -> ParseResult<'a, &'a Identifier> {
+fn parse_id(input: &TokenSlice) -> ParseResult<Identifier> {
let mut iter = input.iter();
if let Some(LexToken {
token: Token::Id(id),
..
}) = iter.next()
{
- Ok(Parsed::ok(id, iter.remainder()))
+ Ok(Parsed::ok(id.clone(), iter.remainder()))
} else {
Err(ParseError::Mismatch(
input.error("Syntax error expecting identifier.").into(),
}
}
-fn parse_format<'a>(input: TokenSlice<'a>) -> ParseResult<'a, AbstractFormat> {
+fn parse_format(input: &TokenSlice) -> ParseResult<AbstractFormat> {
let mut iter = input.iter();
if let Some(LexToken {
token: Token::Id(id),
))
}
-fn parse_string<'a>(input: TokenSlice<'a>) -> ParseResult<'a, &'a String> {
+fn parse_string(input: &TokenSlice) -> ParseResult<String> {
let mut iter = input.iter();
if let Some(LexToken {
token: Token::String(s),
..
}) = iter.next()
{
- Ok(Parsed::ok(s, iter.remainder()))
+ Ok(Parsed::ok(s.clone(), iter.remainder()))
} else {
Err(ParseError::Mismatch(
input.error("Syntax error expecting identifier.").into(),
}
}
-impl<'a> FromTokens<'a> for &'a Identifier {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl<'a> FromTokens for Identifier {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
}
-impl<'a> FromTokens<'a> for &'a String {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl<'a> FromTokens for String {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
}
-impl<'a> FromTokens<'a> for AbstractFormat {
- fn from_tokens(input: TokenSlice<'a>) -> ParseResult<'a, Self>
+impl FromTokens for AbstractFormat {
+ fn from_tokens(input: &TokenSlice) -> ParseResult<Self>
where
Self: Sized,
{
}
}
-fn collect_subcommands<'a>(src: &'a TokenSlice) -> Vec<TokenSlice<'a>> {
+fn collect_subcommands(src: TokenSlice) -> Vec<TokenSlice> {
src.split(|token| token.token == Token::Punct(Punct::Slash))
.filter(|slice| !slice.is_empty())
.collect()
pub struct Context<'a> {
error: &'a Box<dyn Fn(Diagnostic)>,
- lexer: TokenSlice<'a>,
+ lexer: TokenSlice,
command_name: Option<&'static str>,
}
-impl<'a> Context<'a> {
+impl Context<'_> {
pub fn error(&self, diagnostic: Diagnostic) {
(self.error)(diagnostic);
}
macros::MacroSet,
message::Diagnostic,
};
+use std::rc::Rc;
pub struct Engine;
let error: Box<dyn Fn(Diagnostic)> = Box::new(|diagnostic| {
println!("{diagnostic}");
});
- parse_command(TokenSlice::new(&tokens), &error);
+ parse_command(TokenSlice::new(Rc::new(tokens)), &error);
}
}
}
mem::take,
ops::{Range, RangeInclusive},
path::Path,
+ rc::Rc,
sync::Arc,
};
}
pub struct TokenSliceIter<'a> {
- tokens: &'a [LexToken],
+ slice: &'a TokenSlice,
+ rest: Range<usize>,
}
impl<'a> TokenSliceIter<'a> {
- pub fn remainder(&self) -> TokenSlice<'a> {
+ pub fn new(slice: &'a TokenSlice) -> Self {
+ Self {
+ slice,
+ rest: slice.range.clone(),
+ }
+ }
+ pub fn remainder(&self) -> TokenSlice {
TokenSlice {
- tokens: self.tokens,
+ backing: self.slice.backing.clone(),
+ range: self.rest.clone(),
}
}
}
type Item = &'a LexToken;
fn next(&mut self) -> Option<Self::Item> {
- let (first, rest) = self.tokens.split_first().unwrap();
- if !rest.is_empty() {
- self.tokens = rest;
- Some(first)
- } else {
+ if self.rest.is_empty() {
None
+ } else {
+ self.rest.start += 1;
+ Some(&self.slice.backing.tokens[self.rest.start - 1])
}
}
}
-#[derive(Copy, Clone)]
-pub struct TokenSlice<'a> {
- tokens: &'a [LexToken],
+#[derive(Clone)]
+pub struct TokenSlice {
+ backing: Rc<Tokens>,
+ range: Range<usize>,
}
-impl<'a> Debug for TokenSlice<'a> {
+impl Debug for TokenSlice {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "TokenSlice {{ ")?;
- for (index, token) in self.tokens[..self.tokens.len() - 1].iter().enumerate() {
+ for (index, token) in self.tokens().iter().enumerate() {
if index > 0 {
write!(f, ", ")?;
}
}
}
-impl<'a> TokenSlice<'a> {
- pub fn new(backing: &'a Tokens) -> Self {
- Self {
- tokens: backing.tokens.as_slice(),
- }
+impl TokenSlice {
+ pub fn new(backing: Rc<Tokens>) -> Self {
+ let range = 0..backing.tokens.len() - 1;
+ Self { backing, range }
}
- pub fn get_token(&self, index: usize) -> Option<&'a Token> {
- //self.get(index).map(|token| &token.token)
- if index < self.len() {
- Some(&self.tokens[index].token)
- } else {
- None
- }
+ fn tokens(&self) -> &[LexToken] {
+ &self.backing.tokens[self.range.clone()]
+ }
+ pub fn get_token(&self, index: usize) -> Option<&Token> {
+ self.get(index).map(|token| &token.token)
}
- pub fn get(&self, index: usize) -> Option<&'a LexToken> {
- if index < self.len() {
- Some(&self.tokens[index])
- } else {
- None
- }
+ pub fn get(&self, index: usize) -> Option<&LexToken> {
+ self.tokens().get(index)
}
pub fn error<S>(&self, text: S) -> Diagnostic
pub fn subslice(&self, range: Range<usize>) -> Self {
debug_assert!(range.start <= range.end);
debug_assert!(range.end <= self.len());
+ let start = self.range.start + range.start;
+ let end = start + range.len();
Self {
- tokens: &self.tokens[range.start..range.end + 1],
+ backing: self.backing.clone(),
+ range: start..end,
}
}
pub fn first(&self) -> &LexToken {
- self.tokens.first().unwrap()
+ &self.backing.tokens[self.range.start]
}
fn last(&self) -> &LexToken {
- self.tokens.last().unwrap()
+ &self.backing.tokens[self.range.end - 1]
}
pub fn end(&self) -> Self {
self.subslice(self.len()..self.len())
}
pub fn len(&self) -> usize {
- self.tokens.len() - 1
+ self.tokens().len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
- pub fn iter(&self) -> TokenSliceIter<'a> {
- TokenSliceIter {
- tokens: self.tokens,
- }
+ pub fn iter(&self) -> TokenSliceIter {
+ TokenSliceIter::new(self)
}
/// If the tokens contains a macro call, this returns the raw
/// Returns `None` if the token range doesn't include a macro call.
fn get_macro_call(&self) -> Option<&str> {
if self.iter().any(|token| token.macro_rep.is_some()) {
- let token0 = &self.tokens[0];
- let token1 = &self.tokens[self.tokens.len() - 1];
+ let token0 = self.first();
+ let token1 = self.last();
if let Some(file) = self.file() {
let start = token0.pos.start;
let end = token1.pos.end;
fn location(&self) -> Location {
if let Some(file) = self.file() {
- file.token_location(self.first()..=self.last())
+ file.token_location(
+ &self.backing.tokens[self.range.start]..=&self.backing.tokens[self.range.end],
+ )
} else {
// XXX support non-contiguous locations?
let first = self.first();
pub fn skip_syntax(&self, syntax: &str) -> Option<Self> {
let syntax_scanner = StringScanner::new(syntax, Syntax::Interactive, true);
- let mut input = *self;
+ let mut input = self.clone();
for scan_token in syntax_scanner {
let token = match scan_token {
ScanToken::Token(token) => token,
}
}
- pub fn split<F>(&'a self, predicate: F) -> impl Iterator<Item = Self> + 'a
+ pub fn split<F>(&self, predicate: F) -> impl Iterator<Item = Self> + use<'_, F>
where
- F: Fn(&LexToken) -> bool + 'a,
+ F: Fn(&LexToken) -> bool,
{
- (&self.tokens[..self.len()])
- .split(predicate)
- .map(move |slice| {
- // SAFETY: `slice` is inside `self.tokens`.
- let start_ofs =
- unsafe { slice.as_ptr().offset_from(self.tokens.as_ptr()) } as usize;
- self.subslice(start_ofs..start_ofs + slice.len())
- })
+ self.tokens().split(predicate).map(move |slice| {
+ // SAFETY: `slice` is inside `self.tokens`.
+ let start_ofs = unsafe { slice.as_ptr().offset_from(self.tokens().as_ptr()) } as usize;
+ self.subslice(start_ofs..start_ofs + slice.len())
+ })
}
}