}),
file_operations: None,
}),
-/*
- semantic_tokens_provider: Some(
- SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
- SemanticTokensRegistrationOptions {
- text_document_registration_options: {
- TextDocumentRegistrationOptions {
- document_selector: Some(vec![DocumentFilter {
- language: Some("pspp".to_string()),
- scheme: Some("file".to_string()),
- pattern: None,
- }]),
- }
- },
- semantic_tokens_options: SemanticTokensOptions {
- work_done_progress_options: WorkDoneProgressOptions::default(),
- legend: SemanticTokensLegend {
- token_types: LEGEND_TYPE.into(),
- token_modifiers: vec![],
- },
- range: Some(true),
- full: Some(SemanticTokensFullOptions::Bool(true)),
- },
- static_registration_options: StaticRegistrationOptions::default(),
- },
- ),
- ),
-*/
definition_provider: Some(OneOf::Left(true)),
references_provider: Some(OneOf::Left(true)),
rename_provider: Some(OneOf::Left(true)),
use flagset::{flags, FlagSet};
use crate::{
+ identifier::Identifier,
integer::ToInteger,
lex::{
command_name::CommandMatcher,
testing_only: bool,
no_abbrev: bool,
name: &'static str,
- run: Box<dyn Fn(&Context) + Send + Sync>,
+ run: Box<dyn Fn(&mut Context) + Send + Sync>, //-> Box<dyn ParsedCommand> + Send + Sync>,
}
+/*
+struct Subcommand {
+ name: Identifier,
+ tokens: Vec<Token>,
+}
+
+fn collect_subcommands(context: &mut Context) -> Vec<Subcommand> {
+ let mut subcommands = Vec::new();
+ while !context.lexer.at_end() {
+ let Some(name) = context.force_id() else {
+ todo!()
+ };
+ let mut tokens = Vec::new();
+ context.lexer.match_(&Token::Punct(Punct::Equals));
+ while !context.lexer.at_end() && !context.lexer.match_(&Token::Punct(Punct::Slash)) {
+ tokens.push(context.lexer.token().clone());
+ context.lexer.get();
+ }
+ subcommands.push(Subcommand { name, tokens });
+ }
+ subcommands
+}
+
+struct DescriptivesCommand {
+ variables: Variables,
+ missing: Option<Missing>,
+ save: bool,
+ statistics: Option<Statistics>,
+ sort: Option<Sort>,
+}
+
+struct Variables(Vec<Variable>);
+
+struct Variable {
+ var1: Identifier,
+ var2: Option<Identifier>,
+ zname: Option<Identifier>,
+}
+
+fn parse_descriptives(context: &mut Context) {
+ let subcommands = collect_subcommands(context);
+ for subcommand in subcommands {
+
+ }
+}
+
+trait ParsedCommand {
+ fn format(&self) -> String;
+}
+ */
+
fn commands() -> &'static [Command] {
fn new_commands() -> Vec<Command> {
- vec![Command {
- allowed_states: State::Initial | State::Data,
- enhanced_only: false,
- testing_only: false,
- no_abbrev: false,
- name: "ECHO",
- run: Box::new(|_context| {
- println!("hi");
- }),
- }]
+ vec![
+ Command {
+ allowed_states: FlagSet::full(),
+ enhanced_only: false,
+ testing_only: false,
+ no_abbrev: false,
+ name: "ECHO",
+ run: Box::new(|_context| {
+ println!("hi");
+ }),
+ },
+/*
+ Command {
+ allowed_states: State::Data.into(),
+ enhanced_only: false,
+ testing_only: false,
+ no_abbrev: false,
+ name: "DESCRIPTIVES",
+ run: Box::new(parse_descriptives),
+ },
+*/
+ ]
}
static COMMANDS: OnceLock<Vec<Command>> = OnceLock::new();
for _ in 0..n_tokens {
lexer.get();
}
- let context = Context {
+ let mut context = Context {
error,
lexer,
command_name: Some(command.name),
};
- (command.run)(&context);
+ (command.run)(&mut context);
end_of_command(&context);
}
lexer.interactive_reset();
pub fn error(&self, diagnostic: Diagnostic) {
(self.error)(diagnostic);
}
+
+ pub fn force_id(&mut self) -> Option<Identifier> {
+ match self.lexer.token() {
+ Token::Id(id) => {
+ let id = id.clone();
+ self.lexer.get();
+ Some(id)
+ }
+ _ => {
+ todo!()
+ }
+ }
+ }
}