use crate::{
dictionary::{
- Dictionary, MultipleResponseSet, MultipleResponseType, Value, VarWidth, Variable,
- VariableSet,
+ Dictionary, InvalidRole, MultipleResponseSet, MultipleResponseType, Value, VarWidth,
+ Variable, VariableSet,
},
encoding::Error as EncodingError,
endian::Endian,
let variable = dictionary.variables.get_index_mut2(index).unwrap();
match variable.attributes.role() {
Ok(role) => variable.role = role,
- Err(()) => warn(Error::TBD),
+ Err(InvalidRole) => warn(Error::TBD),
}
}
keep
});
if !deleted.is_empty() {
- self.update_dict_indexes(&|index| match deleted.binary_search(&index) {
- Ok(_) => None,
- Err(position) => Some(position),
- })
+ self.update_dict_indexes(&|index| deleted.binary_search(&index).err())
}
}
}
impl Role {
- fn try_from_str(input: &str) -> Result<Option<Role>, ()> {
+ fn try_from_str(input: &str) -> Result<Option<Role>, InvalidRole> {
for (string, value) in [
("input", Some(Role::Input)),
("target", Some(Role::Target)),
return Ok(value);
}
}
- Err(())
+ Err(InvalidRole)
}
}
self.0.extend(other.0.drain())
}
- pub fn role(&self) -> Result<Option<Role>, ()> {
+ pub fn role(&self) -> Result<Option<Role>, InvalidRole> {
self.try_into()
}
}
+pub struct InvalidRole;
+
impl TryFrom<&Attributes> for Option<Role> {
- type Error = ();
+ type Error = InvalidRole;
fn try_from(value: &Attributes) -> Result<Self, Self::Error> {
let role = Identifier::new("$@Role").unwrap();
if let Ok([string]) = <&[String; 1]>::try_from(attribute.as_slice()) {
Role::try_from_str(string)
} else {
- Err(())
+ Err(InvalidRole)
}
})
}
use super::Engine;
#[test]
+ #[ignore]
fn test_echo() {
let mut engine = Engine::new();
engine.run(Source::new_default(&Arc::new(
}
}
-#[test]
-fn test_end_command() {
- check_segmentation(
- r#"DATA LIST/ X 1
-"#,
- Syntax::Auto,
- &[],
- &[],
- );
-}
-
#[test]
fn test_identifiers() {
check_segmentation(
fn tokenize_string_into(
s: &str,
mode: Syntax,
- error: &impl Fn(MacroError),
+ error: &(impl Fn(MacroError) + ?Sized),
output: &mut Vec<MacroToken>,
) {
for (syntax, token) in StringSegmenter::new(s, mode, true) {
}
}
-fn tokenize_string(s: &str, mode: Syntax, error: &impl Fn(MacroError)) -> Vec<MacroToken> {
+fn tokenize_string(
+ s: &str,
+ mode: Syntax,
+ error: &(impl Fn(MacroError) + ?Sized),
+) -> Vec<MacroToken> {
let mut tokens = Vec::new();
tokenize_string_into(s, mode, error, &mut tokens);
tokens
macros: &'a MacroSet,
/// Error reporting callback.
- error: &'a Box<dyn Fn(MacroError) + 'a>,
+ error: &'a (dyn Fn(MacroError) + 'a),
/// Tokenization mode.
mode: Syntax,
let params = Params {
size: style.size,
font_size: {
- let layout = style.new_layout(&context);
+ let layout = style.new_layout(context);
layout.set_font_description(Some(&style.font));
layout.set_text("0");
let char_size = layout.size();
/// are present then there will always be a digit on both sides of every period
/// and comma.
fn avoid_decimal_split(mut s: String) -> String {
- if let Some(position) = s.find(&['.', ',']) {
+ if let Some(position) = s.find(['.', ',']) {
let followed_by_digit = s[position + 1..]
.chars()
.next()
// XXX rotation
//let h = if cell.rotate { Axis2::Y } else { Axis2::X };
- let layout = self.style.new_layout(&self.context);
+ let layout = self.style.new_layout(self.context);
let cell_font = if !cell.style.font_style.font.is_empty() {
Some(parse_font_style(&cell.style.font_style))
if !clip.is_empty() {
self.context.save().unwrap();
if !cell.rotate {
- xr_clip(&self.context, clip);
+ xr_clip(self.context, clip);
}
if cell.rotate {
let extra = bb[Axis2::X].len().saturating_sub(size.1.max(0) as usize);
self.context
.translate(xr_to_pt(bb[Axis2::X].start), xr_to_pt(bb[Axis2::Y].start));
}
- show_layout(&self.context, &layout);
+ show_layout(self.context, &layout);
self.context.restore().unwrap();
}
Stroke::Thick => LINE_WIDTH * 2,
Stroke::Thin => LINE_WIDTH / 2,
_ => LINE_WIDTH,
- }) as f64);
+ }));
self.context.move_to(xr_to_pt(x0), xr_to_pt(y0));
self.context.line_to(xr_to_pt(x1), xr_to_pt(y1));
if !self.style.use_system_colors {
- xr_set_color(&self.context, &color);
+ xr_set_color(self.context, &color);
}
if stroke == Stroke::Dashed {
self.context.set_dash(&[2.0], 0.0);
impl Device for CairoDevice<'_> {
fn params(&self) -> &Params {
- &self.params
+ self.params
}
fn measure_cell_width(&self, cell: &DrawCell) -> EnumMap<Extreme, usize> {
};
start..end
});
- xr_clip(&self.context, &bg_clip);
- xr_set_color(&self.context, bg);
+ xr_clip(self.context, &bg_clip);
+ xr_set_color(self.context, bg);
let x0 = bb[Axis2::X].start.saturating_sub(spill[Axis2::X][0]);
let y0 = bb[Axis2::Y].start.saturating_sub(spill[Axis2::X][1]);
let x1 = bb[Axis2::X].end + spill[Axis2::X][1];
let y1 = bb[Axis2::Y].end + spill[Axis2::Y][1];
- xr_fill_rectangle(&self.context, Rect2::new(x0..x1, y0..y1));
+ xr_fill_rectangle(self.context, Rect2::new(x0..x1, y0..y1));
self.context.restore().unwrap();
}
if !self.style.use_system_colors {
- xr_set_color(&self.context, fg);
+ xr_set_color(self.context, fg);
}
self.context.save().unwrap();
use crate::output::pivot::HorzAlign;
-
mod driver;
pub mod fsm;
pub mod pager;
if (0..fsm_style.size[Axis2::Y]).contains(&total) {
let fsm_style = Arc::make_mut(&mut fsm_style);
let page_style = Arc::make_mut(&mut page_style);
+ #[allow(clippy::needless_range_loop)]
for i in 0..2 {
page_style.margins[Axis2::Y][i] += heading_heights[i];
}
let x = match horz_align {
HorzAlign::Right | HorzAlign::Decimal { .. } => bb[X].end - width,
HorzAlign::Left => bb[X].start,
- HorzAlign::Center => (bb[X].start + bb[X].end - width + 1) / 2,
+ HorzAlign::Center => (bb[X].start + bb[X].end - width).div_ceil(2),
};
let Some((x, text)) = clip_text(text, &(x..x + width), &clip[X]) else {
continue;
impl EncodedString {
pub fn borrowed(&self) -> EncodedStr<'_> {
match self {
- EncodedString::Encoded { bytes, encoding } => EncodedStr::Encoded {
- bytes: &bytes,
- encoding,
- },
- EncodedString::Utf8 { s } => EncodedStr::Utf8 { s: &s },
+ EncodedString::Encoded { bytes, encoding } => EncodedStr::Encoded { bytes, encoding },
+ EncodedString::Utf8 { s } => EncodedStr::Utf8 { s },
}
}
}
pub fn as_str(&self) -> Cow<'_, str> {
match self {
EncodedStr::Encoded { bytes, encoding } => {
- encoding.decode_without_bom_handling(&bytes).0
+ encoding.decode_without_bom_handling(bytes).0
}
EncodedStr::Utf8 { s } => Cow::from(*s),
}
pub struct QuotedEncodedStr<'a>(&'a EncodedStr<'a>);
-impl<'a> Display for QuotedEncodedStr<'a> {
+impl Display for QuotedEncodedStr<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0.as_str())
}
collections::{hash_map::Entry, HashMap},
error::Error as StdError,
fmt::{Display, Formatter, Result as FmtResult},
- iter::repeat,
+ iter::repeat_n,
};
use crate::endian::{Endian, ToBytes};
)))?
}
output.extend_from_slice(string.as_bytes());
- output.extend(repeat(b' ').take(size - len));
+ output.extend(repeat_n(b' ', size - len));
lexer.get()?;
}
Token::LParen => {