work
authorBen Pfaff <blp@cs.stanford.edu>
Wed, 21 Aug 2024 23:40:48 +0000 (16:40 -0700)
committerBen Pfaff <blp@cs.stanford.edu>
Wed, 21 Aug 2024 23:40:48 +0000 (16:40 -0700)
rust/pspp-lsp/lsp-pspp.el [new file with mode: 0644]
rust/pspp-lsp/src/main.rs
rust/pspp/src/command.rs
rust/pspp/src/lex/lexer.rs

diff --git a/rust/pspp-lsp/lsp-pspp.el b/rust/pspp-lsp/lsp-pspp.el
new file mode 100644 (file)
index 0000000..fb5d481
--- /dev/null
@@ -0,0 +1,8 @@
+(add-to-list 'lsp-language-id-configuration '(pspp-mode . "pspp"))
+
+(add-to-list 'lsp-language-id-configuration '(".*\\.sps$" . "pspp"))
+
+(lsp-register-client (make-lsp-client
+                      :new-connection (lsp-stdio-connection "pspp-lsp")
+                      :activation-fn (lsp-activate-on "pspp")
+                      :server-id 'pspp))
index 3876550ed2bba4a1d39a00394213b25239b7bcc8..b465a00b693bd044d12be1617edb1db1c3867487 100644 (file)
@@ -45,33 +45,6 @@ impl LanguageServer for Backend {
                     }),
                     file_operations: None,
                 }),
-/*
-                semantic_tokens_provider: Some(
-                    SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
-                        SemanticTokensRegistrationOptions {
-                            text_document_registration_options: {
-                                TextDocumentRegistrationOptions {
-                                    document_selector: Some(vec![DocumentFilter {
-                                        language: Some("pspp".to_string()),
-                                        scheme: Some("file".to_string()),
-                                        pattern: None,
-                                    }]),
-                                }
-                            },
-                            semantic_tokens_options: SemanticTokensOptions {
-                                work_done_progress_options: WorkDoneProgressOptions::default(),
-                                legend: SemanticTokensLegend {
-                                    token_types: LEGEND_TYPE.into(),
-                                    token_modifiers: vec![],
-                                },
-                                range: Some(true),
-                                full: Some(SemanticTokensFullOptions::Bool(true)),
-                            },
-                            static_registration_options: StaticRegistrationOptions::default(),
-                        },
-                    ),
-                ),
-*/
                 definition_provider: Some(OneOf::Left(true)),
                 references_provider: Some(OneOf::Left(true)),
                 rename_provider: Some(OneOf::Left(true)),
index d337d1823a0fa86ed4171466b412e20bde2f5c25..a40aae5f2a1aa64eb1b1176feed2836143e30bef 100644 (file)
@@ -3,6 +3,7 @@ use std::{fmt::Write, sync::OnceLock};
 use flagset::{flags, FlagSet};
 
 use crate::{
+    identifier::Identifier,
     integer::ToInteger,
     lex::{
         command_name::CommandMatcher,
@@ -40,21 +41,84 @@ struct Command {
     testing_only: bool,
     no_abbrev: bool,
     name: &'static str,
-    run: Box<dyn Fn(&Context) + Send + Sync>,
+    run: Box<dyn Fn(&mut Context) + Send + Sync>, //-> Box<dyn ParsedCommand> + Send + Sync>,
 }
 
+/*
+struct Subcommand {
+    name: Identifier,
+    tokens: Vec<Token>,
+}
+
+fn collect_subcommands(context: &mut Context) -> Vec<Subcommand> {
+    let mut subcommands = Vec::new();
+    while !context.lexer.at_end() {
+        let Some(name) = context.force_id() else {
+            todo!()
+        };
+        let mut tokens = Vec::new();
+        context.lexer.match_(&Token::Punct(Punct::Equals));
+        while !context.lexer.at_end() && !context.lexer.match_(&Token::Punct(Punct::Slash)) {
+            tokens.push(context.lexer.token().clone());
+            context.lexer.get();
+        }
+        subcommands.push(Subcommand { name, tokens });
+    }
+    subcommands
+}
+
+struct DescriptivesCommand {
+    variables: Variables,
+    missing: Option<Missing>,
+    save: bool,
+    statistics: Option<Statistics>,
+    sort: Option<Sort>,
+}
+
+struct Variables(Vec<Variable>);
+
+struct Variable {
+    var1: Identifier,
+    var2: Option<Identifier>,
+    zname: Option<Identifier>,
+}
+
+fn parse_descriptives(context: &mut Context) {
+    let subcommands = collect_subcommands(context);
+    for subcommand in subcommands {
+        
+    }
+}
+
+trait ParsedCommand {
+    fn format(&self) -> String;
+}
+ */
+
 fn commands() -> &'static [Command] {
     fn new_commands() -> Vec<Command> {
-        vec![Command {
-            allowed_states: State::Initial | State::Data,
-            enhanced_only: false,
-            testing_only: false,
-            no_abbrev: false,
-            name: "ECHO",
-            run: Box::new(|_context| {
-                println!("hi");
-            }),
-        }]
+        vec![
+            Command {
+                allowed_states: FlagSet::full(),
+                enhanced_only: false,
+                testing_only: false,
+                no_abbrev: false,
+                name: "ECHO",
+                run: Box::new(|_context| {
+                    println!("hi");
+                }),
+            },
+/*
+            Command {
+                allowed_states: State::Data.into(),
+                enhanced_only: false,
+                testing_only: false,
+                no_abbrev: false,
+                name: "DESCRIPTIVES",
+                run: Box::new(parse_descriptives),
+            },
+*/
+        ]
     }
 
     static COMMANDS: OnceLock<Vec<Command>> = OnceLock::new();
@@ -158,12 +222,12 @@ fn parse_in_state(lexer: &mut Lexer, error: &Box<dyn Fn(Diagnostic)>, _state: St
                 for _ in 0..n_tokens {
                     lexer.get();
                 }
-                let context = Context {
+                let mut context = Context {
                     error,
                     lexer,
                     command_name: Some(command.name),
                 };
-                (command.run)(&context);
+                (command.run)(&mut context);
                 end_of_command(&context);
             }
             lexer.interactive_reset();
@@ -189,4 +253,17 @@ impl<'a> Context<'a> {
     pub fn error(&self, diagnostic: Diagnostic) {
         (self.error)(diagnostic);
     }
+
+    pub fn force_id(&mut self) -> Option<Identifier> {
+        match self.lexer.token() {
+            Token::Id(id) => {
+                let id = id.clone();
+                self.lexer.get();
+                Some(id)
+            }
+            _ => {
+                todo!()
+            }
+        }
+    }
 }
index 82ef008aefd5e2de020e7a91d38704caec1598c2..f592da1144184332bf9002a8ecf48c0a6297ddd8 100644 (file)
@@ -828,6 +828,22 @@ impl Lexer {
             self.get();
         }
     }
+
+    pub fn at_end(&self) -> bool {
+        match self.source.token() {
+            Token::End | Token::EndCommand => true,
+            _ => false,
+        }
+    }
+
+    pub fn match_(&mut self, token: &Token) -> bool {
+        if self.token() == token {
+            self.get();
+            true
+        } else {
+            false
+        }
+    }
 }
 
 #[derive(ThisError, Clone, Debug, PartialEq, Eq)]