cargo fmt
authorBen Pfaff <blp@cs.stanford.edu>
Wed, 15 Jan 2025 01:02:09 +0000 (17:02 -0800)
committerBen Pfaff <blp@cs.stanford.edu>
Wed, 15 Jan 2025 01:02:09 +0000 (17:02 -0800)
rust/Cargo.lock
rust/pspp-derive/src/lib.rs
rust/pspp-lsp/src/main.rs
rust/pspp/Cargo.toml
rust/pspp/src/lex/mod.rs
rust/pspp/src/lex/scan/mod.rs
rust/pspp/src/lex/segment/mod.rs
rust/pspp/src/lex/segment/test.rs
rust/pspp/src/output/csv.rs
rust/pspp/src/output/render.rs

index 5e691e5ca20803306eb3e9ef4bfd85a422622215..33edf04474d225ed33ff4b97dc36bf83f9d93ac3 100644 (file)
@@ -607,6 +607,15 @@ version = "1.70.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
 
+[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
 [[package]]
 name = "itoa"
 version = "1.0.11"
@@ -900,6 +909,7 @@ dependencies = [
  "float_next_after",
  "hexplay",
  "indexmap",
+ "itertools",
  "lazy_static",
  "libc",
  "libm",
index 28438ad9bff6330e8552329cdbf272839ea5ae1e..d3bbd943ea209d046b8a62621e1f7c8de0b2e229 100644 (file)
@@ -35,14 +35,15 @@ fn derive_enum(ast: &DeriveInput, e: &DataEnum) -> Result<TokenStream2, Error> {
         let ident = &variant.ident;
         let field_attrs = FieldAttrs::parse(&variant.attrs)?;
         let selector = field_attrs.selector.unwrap_or(struct_attrs.selector);
-        let construction = construct_fields(&variant.fields, quote! { #name::#ident }, selector, None);
+        let construction =
+            construct_fields(&variant.fields, quote! { #name::#ident }, selector, None);
         let fnname = format_ident!("construct_{ident}");
         body.extend(quote! {
             fn #fnname #impl_generics(input: &TokenSlice) -> ParseResult<#name #ty_generics> #where_clause { let input = input.clone();  #construction }
         });
     }
 
-    for  variant in &e.variants {
+    for variant in &e.variants {
         let ident = &variant.ident;
         let fnname = format_ident!("construct_{ident}");
         let field_attrs = FieldAttrs::parse(&variant.attrs)?;
index 349b1afee05fefdaa7d80888cb1a6ac8546fa432..db982a454c47be26161b0392ba5a428e54ec3ed5 100644 (file)
@@ -1,11 +1,7 @@
 use std::collections::HashMap;
 
 use tokio::sync::Mutex;
-use tower_lsp::{
-    jsonrpc::Result,
-    lsp_types::*,
-    Client, LanguageServer, LspService, Server,
-};
+use tower_lsp::{jsonrpc::Result, lsp_types::*, Client, LanguageServer, LspService, Server};
 
 #[tokio::main]
 async fn main() {
index 027a8440ff4e3e9c2a48327cfb5346d72ccc45cf..0536ee4ec05669917d9ab70858656ed6e0042895 100644 (file)
@@ -34,6 +34,7 @@ enum-iterator = "2.1.0"
 smallvec = { version = "1.13.2", features = ["const_generics", "write"] }
 libm = "0.2.11"
 smallstr = "0.3.0"
+itertools = "0.14.0"
 
 [target.'cfg(windows)'.dependencies]
 windows-sys = { version = "0.48.0", features = ["Win32_Globalization"] }
index e87b088cf443b74b0d0cde3e93e2424fab363902..97b347ff2e24bad25e9b672b106e62ef79a71bd5 100644 (file)
@@ -10,8 +10,8 @@
 //! are the same as the tokens used by the PSPP parser with a few additional
 //! types.
 
-pub mod segment;
-pub mod scan;
 pub mod command_name;
-pub mod token;
 pub mod lexer;
+pub mod scan;
+pub mod segment;
+pub mod token;
index 1de961be7f63157c609b269b9f476d744ccfeef5..398c8827a63fdc175fd53101922e0d10cd0a8e11 100644 (file)
@@ -13,7 +13,7 @@
 use crate::identifier::{Identifier, ReservedWord};
 
 use super::{
-    segment::{Syntax, Segment, Segmenter},
+    segment::{Segment, Segmenter, Syntax},
     token::{Punct, Token},
 };
 use std::collections::VecDeque;
@@ -403,8 +403,8 @@ impl<'a> Iterator for StringScanner<'a> {
                         return Some(token);
                     }
                     self.input = rest;
-                    return Some(ScanToken::Error(error))
-                },
+                    return Some(ScanToken::Error(error));
+                }
                 Some(ScanToken::Token(token)) => {
                     self.tokens.push_back(token);
                 }
index 5448aa81ae7e19f27d365ff9ef11dee1b788a186..ffa6de31370011a266832b6893bc1389f5d01750 100644 (file)
@@ -1097,7 +1097,10 @@ impl Segmenter {
         }
         return Ok(Some((rest, Segment::DoRepeatCommand)));
     }
-    fn parse_do_repeat_4<'a>(&mut self, input: &'a str) -> Result<Option<(&'a str, Segment)>, Incomplete> {
+    fn parse_do_repeat_4<'a>(
+        &mut self,
+        input: &'a str,
+    ) -> Result<Option<(&'a str, Segment)>, Incomplete> {
         self.state.0 = State::DoRepeat3;
         Ok(Some((input, Segment::DoRepeatOverflow)))
     }
index 79f92fed3637c0fd586cbe6b5d527b0bc62fbe96..3c36186a14dff36298ff0ad05850cc41b9592646 100644 (file)
@@ -1,6 +1,6 @@
 use crate::prompt::PromptStyle;
 
-use super::{Syntax, Segment, Segmenter};
+use super::{Segment, Segmenter, Syntax};
 
 fn push_segment<'a>(
     segmenter: &mut Segmenter,
@@ -1517,7 +1517,7 @@ end repeat
 
 mod define {
     use crate::{
-        lex::segment::{Syntax, Segment},
+        lex::segment::{Segment, Syntax},
         prompt::PromptStyle,
     };
 
index df0b158db910222f849a5d6b982faa4c6a61b58d..a8edb849e6e35596ba671cbffa21a1dd501b0d8a 100644 (file)
@@ -1,4 +1,10 @@
-use std::{borrow::Cow, fmt::Display, fs::File, io::{Error, Write}, sync::Arc};
+use std::{
+    borrow::Cow,
+    fmt::Display,
+    fs::File,
+    io::{Error, Write},
+    sync::Arc,
+};
 
 use crate::output::pivot::Coord2;
 
index 5057a6bb364fbcf9ec0ed8491c266cb7f3f767af..7fdb5d53a456fe9dbeda48718ef3e20e99dfd450 100644 (file)
@@ -1,9 +1,11 @@
 use std::cmp::max;
 use std::collections::HashMap;
+use std::iter::once;
 use std::ops::Range;
 use std::sync::Arc;
 
 use enum_map::EnumMap;
+use itertools::interleave;
 use smallvec::SmallVec;
 
 use super::pivot::{Axis2, BorderStyle, Coord2, Look, PivotTable, Rect2, Stroke};
@@ -491,17 +493,7 @@ impl Page {
     }
 
     fn use_row_widths(rows: &[usize], rules: &[usize]) -> Vec<usize> {
-        debug_assert_eq!(rows.len() + 1, rules.len());
-        let mut cp = Vec::with_capacity(2 * (rows.len()) + 1);
-
-        cp.push(0);
-        for (rule, row) in rules.iter().zip(rows.iter()) {
-            cp.push(*rule);
-            cp.push(*row);
-        }
-        cp.push(*rules.last().unwrap());
-
-        Self::accumulate_vec(cp)
+        once(0).chain(interleave(rules, rows).copied()).collect()
     }
 
     fn interpolate_row_widths(