Break out offsets from sysfile raw errors.
authorBen Pfaff <blp@cs.stanford.edu>
Sun, 13 Jul 2025 19:31:13 +0000 (12:31 -0700)
committerBen Pfaff <blp@cs.stanford.edu>
Sun, 13 Jul 2025 19:31:13 +0000 (12:31 -0700)
29 files changed:
rust/pspp/src/endian.rs
rust/pspp/src/format/display/mod.rs
rust/pspp/src/settings.rs
rust/pspp/src/sys/raw.rs
rust/pspp/src/sys/raw/records.rs
rust/pspp/src/sys/test.rs
rust/pspp/src/sys/testdata/bad_record_type.expected
rust/pspp/src/sys/testdata/extension_too_large.expected
rust/pspp/src/sys/testdata/invalid_label_indicator.expected
rust/pspp/src/sys/testdata/invalid_missing_indicator.expected
rust/pspp/src/sys/testdata/invalid_missing_indicator2.expected
rust/pspp/src/sys/testdata/misplaced_type_4_record.expected
rust/pspp/src/sys/testdata/missing_type_4_record.expected
rust/pspp/src/sys/testdata/partial_compressed_data_record.expected
rust/pspp/src/sys/testdata/partial_data_record_between_variables.expected
rust/pspp/src/sys/testdata/too_many_value_labels.expected
rust/pspp/src/sys/testdata/zcompressed_data_bad_zheader_ofs.expected
rust/pspp/src/sys/testdata/zcompressed_data_bad_ztrailer_ofs.expected
rust/pspp/src/sys/testdata/zcompressed_data_compressed_sizes_don_t_add_up.expected
rust/pspp/src/sys/testdata/zcompressed_data_compressed_sizes_dont_add_up.expected
rust/pspp/src/sys/testdata/zcompressed_data_compression_expands_data_too_much.expected
rust/pspp/src/sys/testdata/zcompressed_data_invalid_ztrailer_len.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_block_size.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_compressed_ofs.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_n_blocks.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_uncompressed_ofs.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_ztrailer_bias.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_ztrailer_len.expected
rust/pspp/src/sys/testdata/zcompressed_data_wrong_ztrailer_zero.expected

index b4c17770445e20e0e8d968e8c57574fff5c1f0f3..07cfe886c8f70115eae09bf262ecbfbcbc20621e 100644 (file)
 // You should have received a copy of the GNU General Public License along with
 // this program.  If not, see <http://www.gnu.org/licenses/>.
 
-use enum_iterator::Sequence;
 use smallvec::SmallVec;
 
-/// The endianness for integer and floating-point numbers in SPSS system files.
-///
-/// SPSS system files can declare IBM 370 and DEC VAX floating-point
-/// representations, but no file that uses either of these has ever been found
-/// in the wild, so this code does not handle them.
-#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Sequence)]
-pub enum Endian {
-    /// Big-endian: MSB at lowest address.
-    #[cfg_attr(target_endian = "big", default)]
-    Big,
+pub use binrw::Endian;
 
-    /// Little-endian: LSB at lowest address.
-    #[cfg_attr(target_endian = "little", default)]
-    Little,
-}
-
-impl Endian {
-    pub fn identify_u32(expected_value: u32, bytes: [u8; 4]) -> Option<Self> {
-        let as_big: u32 = Endian::Big.parse(bytes);
-        let as_little: u32 = Endian::Little.parse(bytes);
-        match (as_big == expected_value, as_little == expected_value) {
-            (true, false) => Some(Endian::Big),
-            (false, true) => Some(Endian::Little),
-            _ => None,
-        }
-    }
-
-    pub fn identify_f64(expected_value: f64, bytes: [u8; 8]) -> Option<Self> {
-        let as_big: f64 = Endian::Big.parse(bytes);
-        let as_little: f64 = Endian::Little.parse(bytes);
-        match (as_big == expected_value, as_little == expected_value) {
-            (true, false) => Some(Endian::Big),
-            (false, true) => Some(Endian::Little),
-            _ => None,
-        }
-    }
-
-    pub fn to_smallvec<const N: usize>(self, mut value: u64, n: usize) -> SmallVec<[u8; N]> {
-        debug_assert!(n <= 8);
-        let mut vec = SmallVec::new();
-        value <<= 8 * (8 - n);
-        for _ in 0..n {
-            vec.push((value >> 56) as u8);
-            value <<= 8;
-        }
-        if self == Endian::Little {
-            vec.reverse();
-        }
-        vec
-    }
+pub fn endian_to_smallvec<const N: usize>(
+    endian: Endian,
+    mut value: u64,
+    n: usize,
+) -> SmallVec<[u8; N]> {
+    debug_assert!(n <= 8);
+    let mut vec = SmallVec::new();
+    value <<= 8 * (8 - n);
+    for _ in 0..n {
+        vec.push((value >> 56) as u8);
+        value <<= 8;
+    }
+    if endian == Endian::Little {
+        vec.reverse();
+    }
+    vec
 }
 
 pub trait ToBytes<T, const N: usize> {
index 51afc68be447e22600348920b7cad329642d0fa8..755b6bdfc0b09d9baad6f6ddac0c89b636c2abd8 100644 (file)
@@ -30,7 +30,7 @@ use smallvec::{Array, SmallVec};
 use crate::{
     calendar::{calendar_offset_to_gregorian, day_of_year, month_name, short_month_name},
     data::Datum,
-    endian::ToBytes,
+    endian::{endian_to_smallvec, ToBytes},
     format::{Category, DateTemplate, Decimal, Format, NumberStyle, Settings, TemplateItem, Type},
     settings::{EndianSettings, Settings as PsppSettings},
 };
@@ -777,7 +777,7 @@ impl<'a, 'b> DisplayDatum<'a, 'b> {
         } else {
             integer
         };
-        self.endian.output.to_smallvec(integer, self.format.w())
+        endian_to_smallvec(self.endian.output, integer, self.format.w())
     }
 
     fn pib(&self, number: Option<f64>) -> SmallVec<[u8; 16]> {
@@ -788,7 +788,7 @@ impl<'a, 'b> DisplayDatum<'a, 'b> {
             number
         };
         let integer = number.abs() as u64;
-        self.endian.output.to_smallvec(integer, self.format.w())
+        endian_to_smallvec(self.endian.output, integer, self.format.w())
     }
 
     fn rb(&self, number: Option<f64>, w: usize) -> SmallVec<[u8; 16]> {
index 490803bcd6d7f370f186e5332dba5c4c51fb3cb6..f4678b12363d28973c298f84a1cc7bb9876981a7 100644 (file)
@@ -54,7 +54,7 @@ impl Show {
     }
 }
 
-#[derive(Copy, Clone, Default, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
 pub struct EndianSettings {
     /// Endianness for reading IB, PIB, and RB formats.
     pub input: Endian,
@@ -63,6 +63,15 @@ pub struct EndianSettings {
     pub output: Endian,
 }
 
+impl Default for EndianSettings {
+    fn default() -> Self {
+        Self {
+            input: Endian::NATIVE,
+            output: Endian::NATIVE,
+        }
+    }
+}
+
 impl EndianSettings {
     pub const fn new(endian: Endian) -> Self {
         Self {
index d9de5c23041c2979efac5b585010c555f052fc25..57143033d5dde9768b9f50c97f60941205ad8192 100644 (file)
@@ -53,6 +53,7 @@ use std::{
     iter::repeat_n,
     mem::take,
     num::NonZeroU8,
+    ops::Range,
 };
 use thiserror::Error as ThisError;
 
@@ -61,8 +62,51 @@ pub mod records;
 /// An error encountered reading raw system file records.
 ///
 /// Any error prevents reading further data from the system file.
+#[derive(Debug)]
+pub struct Error {
+    /// Range of file offsets where the error occurred.
+    offsets: Option<Range<u64>>,
+
+    /// Details of the error.
+    details: ErrorDetails,
+}
+
+impl std::error::Error for Error {}
+
+impl Error {
+    pub fn new(offsets: Option<Range<u64>>, details: ErrorDetails) -> Self {
+        Self { offsets, details }
+    }
+}
+
+impl Display for Error {
+    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
+        if let Some(offsets) = &self.offsets
+            && !offsets.is_empty()
+        {
+            if offsets.end > offsets.start.wrapping_add(1) {
+                write!(
+                    f,
+                    "Error at file offsets {:#x} to {:#x}: ",
+                    offsets.start, offsets.end
+                )?;
+            } else {
+                write!(f, "Error at file offset {:#x}: ", offsets.start)?;
+            }
+        }
+        write!(f, "{}", &self.details)
+    }
+}
+
+impl From<IoError> for Error {
+    fn from(value: IoError) -> Self {
+        Self::new(None, value.into())
+    }
+}
+
+/// Details of an [Error].
 #[derive(ThisError, Debug)]
-pub enum Error {
+pub enum ErrorDetails {
     #[error("Not an SPSS system file")]
     NotASystemFile,
 
@@ -79,93 +123,62 @@ pub enum Error {
     InvalidZsavCompression(u32),
 
     #[error(
-        "Document record at offset {offset:#x} has document line count ({n}) greater than the maximum number {max}."
+        "Document record has document line count ({n}) greater than the maximum number {max}."
     )]
-    BadDocumentLength { offset: u64, n: usize, max: usize },
+    BadDocumentLength { n: usize, max: usize },
 
-    #[error("At offset {offset:#x}, unrecognized record type {rec_type}.")]
-    BadRecordType { offset: u64, rec_type: u32 },
+    #[error("Unrecognized record type {0}.")]
+    BadRecordType(u32),
 
-    #[error(
-        "In variable record starting at offset {start_offset:#x}, variable width is not in the valid range -1 to 255."
-    )]
-    BadVariableWidth { start_offset: u64, width: i32 },
+    #[error("Variable width {0} in variable record is not in the valid range -1 to 255.")]
+    BadVariableWidth(i32),
 
-    #[error(
-        "In variable record starting at offset {start_offset:#x}, variable label code {code} at offset {code_offset:#x} is not 0 or 1."
-    )]
-    BadVariableLabelCode {
-        start_offset: u64,
-        code_offset: u64,
-        code: u32,
-    },
+    #[error("In variable record, variable label code {0} is not 0 or 1.")]
+    BadVariableLabelCode(u32),
 
-    #[error("At offset {offset:#x}, missing value code ({code}) is not -3, -2, 0, 1, 2, or 3.")]
-    BadMissingValueCode { offset: u64, code: i32 },
+    #[error("Missing value code ({0}) is not -3, -2, 0, 1, 2, or 3.")]
+    BadMissingValueCode(i32),
 
-    #[error(
-        "At offset {offset:#x}, numeric missing value code ({code}) is not -3, -2, 0, 1, 2, or 3."
-    )]
-    BadNumericMissingValueCode { offset: u64, code: i32 },
+    #[error("Numeric missing value code ({0}) is not -3, -2, 0, 1, 2, or 3.")]
+    BadNumericMissingValueCode(i32),
 
-    #[error("At offset {offset:#x}, string missing value code ({code}) is not 0, 1, 2, or 3.")]
-    BadStringMissingValueCode { offset: u64, code: i32 },
+    #[error("String missing value code ({0}) is not 0, 1, 2, or 3.")]
+    BadStringMissingValueCode(i32),
 
-    #[error(
-        "At offset {offset:#x}, number of value labels ({n}) is greater than the maximum number {max}."
-    )]
-    BadNumberOfValueLabels { offset: u64, n: u32, max: u32 },
+    #[error("Number of value labels ({n}) is greater than the maximum number {max}.")]
+    BadNumberOfValueLabels { n: u32, max: u32 },
 
     #[error(
-        "At offset {offset:#x}, following value label record, found record type {rec_type} instead of expected type 4 for variable index record"
+        "Following value label record, found record type {0} instead of expected type 4 for variable index record"
     )]
-    ExpectedVarIndexRecord { offset: u64, rec_type: u32 },
+    ExpectedVarIndexRecord(u32),
 
     #[error(
-        "At offset {offset:#x}, number of variables indexes for value labels ({n}) is greater than the maximum number ({max})."
+        "Number of variables indexes for value labels ({n}) is greater than the maximum number ({max})."
     )]
-    TooManyVarIndexes { offset: u64, n: u32, max: u32 },
+    TooManyVarIndexes { n: u32, max: u32 },
 
     #[error(
-        "At offset {offset:#x}, record type 7 subtype {subtype} is too large with element size {size} and {count} elements."
+        "Record type 7 subtype {subtype} is too large with element size {size} and {count} elements."
     )]
-    ExtensionRecordTooLarge {
-        offset: u64,
-        subtype: u32,
-        size: u32,
-        count: u32,
-    },
+    ExtensionRecordTooLarge { subtype: u32, size: u32, count: u32 },
 
-    #[error(
-        "Unexpected end of file at offset {offset:#x}, {case_ofs} bytes into a {case_len}-byte case."
-    )]
-    EofInCase {
-        offset: u64,
-        case_ofs: u64,
-        case_len: usize,
-    },
+    #[error("Unexpected end of file {case_ofs} bytes into a {case_len}-byte case.")]
+    EofInCase { case_ofs: u64, case_len: usize },
 
     #[error(
-        "Unexpected end of file at offset {offset:#x}, {case_ofs} bytes and {n_chunks} compression chunks into a compressed case."
+        "Unexpected end of file {case_ofs} bytes and {n_chunks} compression chunks into a compressed case."
     )]
-    EofInCompressedCase {
-        offset: u64,
-        case_ofs: u64,
-        n_chunks: usize,
-    },
+    EofInCompressedCase { case_ofs: u64, n_chunks: usize },
 
-    #[error("Data ends at offset {offset:#x}, {case_ofs} bytes into a compressed case.")]
-    PartialCompressedCase { offset: u64, case_ofs: u64 },
+    #[error("Data ends {case_ofs} bytes into a compressed case.")]
+    PartialCompressedCase { case_ofs: u64 },
 
-    #[error(
-        "At {case_ofs} bytes into compressed case starting at offset {offset:#x}, a string was found where a number was expected."
-    )]
-    CompressedNumberExpected { offset: u64, case_ofs: u64 },
+    #[error("At {0} bytes into compressed case, a string was found where a number was expected.")]
+    CompressedNumberExpected(u64),
 
-    #[error(
-        "At {case_ofs} bytes into compressed case starting at offset {offset:#x}, a number was found where a string was expected."
-    )]
-    CompressedStringExpected { offset: u64, case_ofs: u64 },
+    #[error("At {0} bytes into compressed case, a number was found where a string was expected.")]
+    CompressedStringExpected(u64),
 
     #[error("Impossible ztrailer_offset {0:#x}.")]
     ImpossibleZTrailerOffset(u64),
@@ -189,10 +202,9 @@ pub enum Error {
     WrongZlibTrailerBlockSize(u32),
 
     #[error(
-        "Block count {n_blocks} in ZLIB trailer at offset {offset:#x} differs from expected block count {expected_n_blocks} calculated from trailer length {ztrailer_len}."
+        "Block count {n_blocks} in ZLIB trailer differs from expected block count {expected_n_blocks} calculated from trailer length {ztrailer_len}."
     )]
     BadZlibTrailerNBlocks {
-        offset: u64,
         n_blocks: u32,
         expected_n_blocks: u64,
         ztrailer_len: u64,
@@ -353,8 +365,8 @@ pub enum Warning {
     #[error("Missing value record with range not allowed for string variable")]
     MissingValueStringRange,
 
-    #[error("Missing value record at offset {0:#x} not allowed for long string continuation")]
-    MissingValueContinuation(u64),
+    #[error("Missing value not allowed for long string continuation")]
+    MissingValueContinuation,
 
     #[error("Invalid multiple dichotomy label type")]
     InvalidMultipleDichotomyLabelType,
@@ -623,10 +635,13 @@ impl Record {
             999 => Ok(Some(Record::EndOfHeaders(
                 endian.parse(read_bytes(reader)?),
             ))),
-            _ => Err(Error::BadRecordType {
-                offset: reader.stream_position()?,
-                rec_type,
-            }),
+            _ => Err(Error::new(
+                {
+                    let offset = reader.stream_position()?;
+                    Some(offset - 4..offset)
+                },
+                ErrorDetails::BadRecordType(rec_type),
+            )),
         }
     }
 
@@ -703,7 +718,7 @@ pub fn infer_encoding(
 
     match get_encoding(encoding, character_code) {
         Ok(encoding) => Ok(encoding),
-        Err(err @ EncodingError::Ebcdic) => Err(Error::EncodingError(err)),
+        Err(err @ EncodingError::Ebcdic) => Err(Error::new(None, ErrorDetails::EncodingError(err))),
         Err(err) => {
             warn(Warning::EncodingError(err));
             // Warn that we're using the default encoding.
@@ -817,14 +832,14 @@ impl Debug for Magic {
 }
 
 impl TryFrom<[u8; 4]> for Magic {
-    type Error = Error;
+    type Error = ErrorDetails;
 
     fn try_from(value: [u8; 4]) -> Result<Self, Self::Error> {
         match value {
             Magic::SAV => Ok(Magic::Sav),
             Magic::ZSAV => Ok(Magic::Zsav),
             Magic::EBCDIC => Ok(Magic::Ebcdic),
-            _ => Err(Error::BadMagic(value)),
+            _ => Err(ErrorDetails::BadMagic(value)),
         }
     }
 }
@@ -908,11 +923,13 @@ impl Datum {
             if offset == case_start {
                 Ok(None)
             } else {
-                Err(Error::EofInCase {
-                    offset,
-                    case_ofs: offset - case_start,
-                    case_len: case_vars.iter().map(CaseVar::bytes).sum(),
-                })
+                Err(Error::new(
+                    Some(case_start..offset),
+                    ErrorDetails::EofInCase {
+                        case_ofs: offset - case_start,
+                        case_len: case_vars.iter().map(CaseVar::bytes).sum(),
+                    },
+                ))
             }
         }
 
@@ -983,11 +1000,13 @@ impl Datum {
         ) -> Result<Option<Case>, Error> {
             let offset = reader.stream_position()?;
             if n_chunks > 0 {
-                Err(Error::EofInCompressedCase {
-                    case_ofs: offset - case_start,
-                    n_chunks,
-                    offset,
-                })
+                Err(Error::new(
+                    Some(case_start..offset),
+                    ErrorDetails::EofInCompressedCase {
+                        case_ofs: offset - case_start,
+                        n_chunks,
+                    },
+                ))
             } else {
                 Ok(None)
             }
@@ -1433,10 +1452,13 @@ impl Iterator for Cases {
                 if let Some(expected_cases) = self.expected_cases
                     && expected_cases != self.read_cases
                 {
-                    return Some(Err(Error::WrongNumberOfCases {
-                        expected: expected_cases,
-                        actual: self.read_cases,
-                    }));
+                    return Some(Err(Error::new(
+                        None,
+                        ErrorDetails::WrongNumberOfCases {
+                            expected: expected_cases,
+                            actual: self.read_cases,
+                        },
+                    )));
                 } else {
                     return None;
                 }
index cc1f2f2b5715af54fd050f0a3e1e284202147d5b..063912b89e57aa06816c04a94c0ada7fc42c8fb3 100644 (file)
@@ -6,7 +6,7 @@ use std::{
     borrow::Cow,
     collections::BTreeMap,
     fmt::{Debug, Formatter},
-    io::{Read, Seek, SeekFrom},
+    io::{Cursor, ErrorKind, Read, Seek, SeekFrom},
     ops::Range,
     str::from_utf8,
 };
@@ -20,11 +20,13 @@ use crate::{
     endian::{Endian, Parse},
     identifier::{Error as IdError, Identifier},
     sys::raw::{
-        read_bytes, read_string, read_vec, DecodedRecord, Decoder, Error, Magic, RawDatum,
-        RawStrArray, RawWidth, Record, VarTypes, Warning,
+        read_bytes, read_string, read_vec, DecodedRecord, Decoder, Error, ErrorDetails, Magic,
+        RawDatum, RawStrArray, RawWidth, Record, UntypedDatum, VarTypes, Warning,
     },
 };
 
+use binrw::BinRead;
+
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
 pub enum Compression {
     Simple,
@@ -36,9 +38,6 @@ pub struct HeaderRecord<S>
 where
     S: Debug,
 {
-    /// Offset in file.
-    pub offsets: Range<u64>,
-
     /// Magic number.
     pub magic: Magic,
 
@@ -114,60 +113,90 @@ where
 
 impl HeaderRecord<RawString> {
     pub fn read<R: Read + Seek>(r: &mut R, warn: &mut dyn FnMut(Warning)) -> Result<Self, Error> {
-        let start = r.stream_position()?;
+        let header_bytes = read_vec(r, 176).map_err(|e| {
+            Error::new(
+                None,
+                if e.kind() == ErrorKind::UnexpectedEof {
+                    ErrorDetails::NotASystemFile
+                } else {
+                    e.into()
+                },
+            )
+        })?;
+        Self::read_inner(&header_bytes, warn).map_err(|details| Error::new(Some(0..176), details))
+    }
+
+    fn read_inner(
+        header_bytes: &[u8],
+        warn: &mut dyn FnMut(Warning),
+    ) -> Result<Self, ErrorDetails> {
+        #[derive(BinRead)]
+        struct RawHeader {
+            magic: [u8; 4],
+            eye_catcher: [u8; 60],
+            layout_code: u32,
+            nominal_case_size: u32,
+            compression_code: u32,
+            weight_index: u32,
+            n_cases: u32,
+            bias: f64,
+            creation_date: [u8; 9],
+            creation_time: [u8; 8],
+            file_label: [u8; 64],
+            _padding: [u8; 3],
+        }
+
+        let be_header = RawHeader::read_be(&mut Cursor::new(&header_bytes)).unwrap();
+        let le_header = RawHeader::read_le(&mut Cursor::new(&header_bytes)).unwrap();
 
-        let magic: [u8; 4] = read_bytes(r)?;
-        let magic: Magic = magic.try_into().map_err(|_| Error::NotASystemFile)?;
+        let magic: Magic = be_header
+            .magic
+            .try_into()
+            .map_err(|_| ErrorDetails::NotASystemFile)?;
 
-        let eye_catcher = RawString(read_vec(r, 60)?);
-        let layout_code: [u8; 4] = read_bytes(r)?;
-        let endian = Endian::identify_u32(2, layout_code)
-            .or_else(|| Endian::identify_u32(2, layout_code))
-            .ok_or(Error::NotASystemFile)?;
-        let layout_code = endian.parse(layout_code);
+        let (endian, header) = if be_header.layout_code == 2 {
+            (Endian::Big, &be_header)
+        } else if le_header.layout_code == 2 {
+            (Endian::Little, &le_header)
+        } else {
+            return Err(ErrorDetails::NotASystemFile);
+        };
 
-        let nominal_case_size: u32 = endian.parse(read_bytes(r)?);
-        let nominal_case_size = (1..i32::MAX as u32 / 16)
-            .contains(&nominal_case_size)
-            .then_some(nominal_case_size);
+        let nominal_case_size = (1..i32::MAX.cast_unsigned() / 16)
+            .contains(&header.nominal_case_size)
+            .then_some(header.nominal_case_size);
 
-        let compression_code: u32 = endian.parse(read_bytes(r)?);
-        let compression = match (magic, compression_code) {
+        let compression = match (magic, header.compression_code) {
             (Magic::Zsav, 2) => Some(Compression::ZLib),
-            (Magic::Zsav, code) => return Err(Error::InvalidZsavCompression(code)),
+            (Magic::Zsav, code) => return Err(ErrorDetails::InvalidZsavCompression(code)),
             (_, 0) => None,
             (_, 1) => Some(Compression::Simple),
-            (_, code) => return Err(Error::InvalidSavCompression(code)),
+            (_, code) => return Err(ErrorDetails::InvalidSavCompression(code)),
         };
 
-        let weight_index: u32 = endian.parse(read_bytes(r)?);
-        let weight_index = (weight_index > 0).then_some(weight_index);
+        let weight_index = (header.weight_index > 0).then_some(header.weight_index);
 
-        let n_cases: u32 = endian.parse(read_bytes(r)?);
-        let n_cases = (n_cases < i32::MAX as u32 / 2).then_some(n_cases);
+        let n_cases = (header.n_cases < i32::MAX as u32 / 2).then_some(header.n_cases);
 
-        let bias: f64 = endian.parse(read_bytes(r)?);
-        if bias != 100.0 && bias != 0.0 {
-            warn(Warning::UnexpectedBias(bias));
+        if header.bias != 100.0 && header.bias != 0.0 {
+            warn(Warning::UnexpectedBias(header.bias));
         }
 
-        let creation_date = RawString(read_vec(r, 9)?);
-        let creation_time = RawString(read_vec(r, 8)?);
-        let file_label = RawString(read_vec(r, 64)?);
-        let _: [u8; 3] = read_bytes(r)?;
+        let creation_date = RawString(header.creation_date.into());
+        let creation_time = RawString(header.creation_time.into());
+        let file_label = RawString(header.file_label.into());
 
         Ok(HeaderRecord {
-            offsets: start..r.stream_position()?,
             magic,
-            layout_code,
+            layout_code: header.layout_code,
             nominal_case_size,
             compression,
             weight_index,
             n_cases,
-            bias,
+            bias: header.bias,
             creation_date,
             creation_time,
-            eye_catcher,
+            eye_catcher: RawString(header.eye_catcher.into()),
             file_label,
             endian,
         })
@@ -183,7 +212,6 @@ impl HeaderRecord<RawString> {
             weight_index: self.weight_index,
             n_cases: self.n_cases,
             file_label,
-            offsets: self.offsets.clone(),
             magic: self.magic,
             layout_code: self.layout_code,
             nominal_case_size: self.nominal_case_size,
@@ -258,22 +286,54 @@ fn format_name(type_: u32) -> Cow<'static, str> {
 }
 
 impl MissingValues {
-    pub fn read<R: Read + Seek>(
+    pub fn read<R>(
         r: &mut R,
-        offset: u64,
+        offsets: Range<u64>,
         raw_width: RawWidth,
         code: i32,
         endian: Endian,
         warn: &mut dyn FnMut(Warning),
-    ) -> Result<Self, Error> {
+    ) -> Result<Self, Error>
+    where
+        R: Read + Seek,
+    {
         let (individual_values, has_range) = match code {
             0 => return Ok(Self::default()),
             1..=3 => (code as usize, false),
             -2 => (0, true),
             -3 => (1, true),
-            _ => return Err(Error::BadMissingValueCode { offset, code }),
+            _ => {
+                return Err(Error::new(
+                    Some(offsets),
+                    ErrorDetails::BadMissingValueCode(code),
+                ))
+            }
         };
 
+        Self::read_inner(r, raw_width, individual_values, has_range, endian, warn).map_err(
+            |details| {
+                Error::new(
+                    {
+                        let n = individual_values + if has_range { 2 } else { 0 };
+                        Some(offsets.start..offsets.end + 8 * n as u64)
+                    },
+                    details,
+                )
+            },
+        )
+    }
+
+    fn read_inner<R>(
+        r: &mut R,
+        raw_width: RawWidth,
+        individual_values: usize,
+        has_range: bool,
+        endian: Endian,
+        warn: &mut dyn FnMut(Warning),
+    ) -> Result<Self, ErrorDetails>
+    where
+        R: Read + Seek,
+    {
         let mut values = Vec::with_capacity(individual_values);
         let range = if has_range {
             let low = read_bytes::<8, _>(r)?;
@@ -307,7 +367,7 @@ impl MissingValues {
                     .collect();
                 return Ok(Self::new(values, None).unwrap());
             }
-            Err(()) => warn(Warning::MissingValueContinuation(offset)),
+            Err(()) => warn(Warning::MissingValueContinuation),
         }
         Ok(Self::default())
     }
@@ -360,20 +420,31 @@ impl VariableRecord<RawString> {
         endian: Endian,
         warn: &mut dyn FnMut(Warning),
     ) -> Result<Record, Error> {
+        #[derive(BinRead)]
+        struct RawVariableRecord {
+            width: i32,
+            has_variable_label: u32,
+            missing_value_code: i32,
+            print_format: u32,
+            write_format: u32,
+            name: [u8; 8],
+        }
+
         let start_offset = r.stream_position()?;
-        let width: i32 = endian.parse(read_bytes(r)?);
-        let width: RawWidth = width.try_into().map_err(|_| Error::BadVariableWidth {
-            start_offset,
-            width,
+        let offsets = start_offset..start_offset + 28;
+        let raw_record =
+            read_vec(r, 28).map_err(|e| Error::new(Some(offsets.clone()), e.into()))?;
+        let raw_record =
+            RawVariableRecord::read_options(&mut Cursor::new(&raw_record), endian, ()).unwrap();
+
+        let width: RawWidth = raw_record.width.try_into().map_err(|_| {
+            Error::new(
+                Some(offsets.clone()),
+                ErrorDetails::BadVariableWidth(raw_record.width),
+            )
         })?;
-        let code_offset = r.stream_position()?;
-        let has_variable_label: u32 = endian.parse(read_bytes(r)?);
-        let missing_value_code: i32 = endian.parse(read_bytes(r)?);
-        let print_format = RawFormat(endian.parse(read_bytes(r)?));
-        let write_format = RawFormat(endian.parse(read_bytes(r)?));
-        let name = RawString(read_vec(r, 8)?);
-
-        let label = match has_variable_label {
+
+        let label = match raw_record.has_variable_label {
             0 => None,
             1 => {
                 let len: u32 = endian.parse(read_bytes(r)?);
@@ -386,25 +457,30 @@ impl VariableRecord<RawString> {
                 Some(label)
             }
             _ => {
-                return Err(Error::BadVariableLabelCode {
-                    start_offset,
-                    code_offset,
-                    code: has_variable_label,
-                });
+                return Err(Error::new(
+                    Some(offsets),
+                    ErrorDetails::BadVariableLabelCode(raw_record.has_variable_label),
+                ));
             }
         };
 
-        let missing_values =
-            MissingValues::read(r, start_offset, width, missing_value_code, endian, warn)?;
+        let missing_values = MissingValues::read(
+            r,
+            offsets,
+            width,
+            raw_record.missing_value_code,
+            endian,
+            warn,
+        )?;
 
         let end_offset = r.stream_position()?;
 
         Ok(Record::Variable(VariableRecord {
             offsets: start_offset..end_offset,
             width,
-            name,
-            print_format,
-            write_format,
+            name: RawString(raw_record.name.into()),
+            print_format: RawFormat(raw_record.print_format),
+            write_format: RawFormat(raw_record.write_format),
             missing_values,
             label,
         }))
@@ -495,16 +571,18 @@ impl ValueLabelRecord<RawDatum, RawString> {
         let label_offset = r.stream_position()?;
         let n: u32 = endian.parse(read_bytes(r)?);
         if n > Self::MAX_LABELS {
-            return Err(Error::BadNumberOfValueLabels {
-                offset: label_offset,
-                n,
-                max: Self::MAX_LABELS,
-            });
+            return Err(Error::new(
+                Some(label_offset..label_offset + 4),
+                ErrorDetails::BadNumberOfValueLabels {
+                    n,
+                    max: Self::MAX_LABELS,
+                },
+            ));
         }
 
         let mut labels = Vec::new();
         for _ in 0..n {
-            let value = super::UntypedDatum(read_bytes(r)?);
+            let value = UntypedDatum(read_bytes(r)?);
             let label_len: u8 = endian.parse(read_bytes(r)?);
             let label_len = label_len as usize;
             let padded_len = (label_len + 1).next_multiple_of(8);
@@ -517,21 +595,22 @@ impl ValueLabelRecord<RawDatum, RawString> {
         let index_offset = r.stream_position()?;
         let rec_type: u32 = endian.parse(read_bytes(r)?);
         if rec_type != 4 {
-            return Err(Error::ExpectedVarIndexRecord {
-                offset: index_offset,
-                rec_type,
-            });
+            return Err(Error::new(
+                Some(index_offset..index_offset + 4),
+                ErrorDetails::ExpectedVarIndexRecord(rec_type),
+            ));
         }
 
         let n: u32 = endian.parse(read_bytes(r)?);
         if n > Self::MAX_INDEXES {
-            return Err(Error::TooManyVarIndexes {
-                offset: index_offset,
-                n,
-                max: Self::MAX_INDEXES,
-            });
+            return Err(Error::new(
+                Some(index_offset + 4..index_offset + 8),
+                ErrorDetails::TooManyVarIndexes {
+                    n,
+                    max: Self::MAX_INDEXES,
+                },
+            ));
         } else if n == 0 {
-            dbg!();
             warn(Warning::NoVarIndexes {
                 offset: index_offset,
             });
@@ -646,21 +725,22 @@ impl DocumentRecord<RawDocumentLine> {
         let n: u32 = endian.parse(read_bytes(r)?);
         let n = n as usize;
         if n > Self::MAX_LINES {
-            Err(Error::BadDocumentLength {
-                offset: start_offset,
-                n,
-                max: Self::MAX_LINES,
-            })
+            Err(Error::new(
+                Some(start_offset..start_offset + 4),
+                ErrorDetails::BadDocumentLength {
+                    n,
+                    max: Self::MAX_LINES,
+                },
+            ))
         } else {
+            let offsets = start_offset..start_offset.saturating_add((n * DOC_LINE_LEN) as u64);
             let mut lines = Vec::with_capacity(n);
             for _ in 0..n {
-                lines.push(RawStrArray(read_bytes(r)?));
+                lines.push(RawStrArray(
+                    read_bytes(r).map_err(|e| Error::new(Some(offsets.clone()), e.into()))?,
+                ));
             }
-            let end_offset = r.stream_position()?;
-            Ok(Record::Document(DocumentRecord {
-                offsets: start_offset..end_offset,
-                lines,
-            }))
+            Ok(Record::Document(DocumentRecord { offsets, lines }))
         }
     }
 
@@ -1167,7 +1247,6 @@ impl LongStringMissingValueRecord<RawString> {
         let mut missing_value_set = Vec::new();
         while !input.is_empty() {
             let var_name = read_string(&mut input, endian)?;
-            dbg!(&var_name);
             let n_missing_values: u8 = endian.parse(read_bytes(&mut input)?);
             let value_len: u32 = endian.parse(read_bytes(&mut input)?);
             if value_len != 8 {
@@ -1177,10 +1256,7 @@ impl LongStringMissingValueRecord<RawString> {
                     offset,
                     value_len,
                 });
-                read_vec(
-                    &mut input,
-                    dbg!(value_len as usize * n_missing_values as usize),
-                )?;
+                read_vec(&mut input, value_len as usize * n_missing_values as usize)?;
                 continue;
             }
             let mut missing_values = Vec::new();
@@ -1487,7 +1563,7 @@ pub struct LongName {
 impl LongName {
     fn parse(input: &str, decoder: &Decoder) -> Result<Self, Warning> {
         let Some((short_name, long_name)) = input.split_once('=') else {
-            return Err(dbg!(Warning::LongNameMissingEquals));
+            return Err(Warning::LongNameMissingEquals);
         };
         let short_name = decoder
             .new_identifier(short_name)
@@ -1615,12 +1691,14 @@ impl Extension {
         let size: u32 = endian.parse(read_bytes(r)?);
         let count = endian.parse(read_bytes(r)?);
         let Some(product) = size.checked_mul(count) else {
-            return Err(Error::ExtensionRecordTooLarge {
-                offset: header_offset,
-                subtype,
-                size,
-                count,
-            });
+            return Err(Error::new(
+                Some(header_offset..header_offset + 8),
+                ErrorDetails::ExtensionRecordTooLarge {
+                    subtype,
+                    size,
+                    count,
+                },
+            ));
         };
         let start_offset = r.stream_position()?;
         let data = read_vec(r, product as usize)?;
@@ -1770,26 +1848,23 @@ impl ZHeader {
         let ztrailer_len: u64 = endian.parse(read_bytes(r)?);
 
         if zheader_offset != offset {
-            return Err(Error::UnexpectedZHeaderOffset {
+            Err(ErrorDetails::UnexpectedZHeaderOffset {
                 actual: zheader_offset,
                 expected: offset,
-            });
-        }
-
-        if ztrailer_offset < offset {
-            return Err(Error::ImpossibleZTrailerOffset(ztrailer_offset));
-        }
-
-        if ztrailer_len < 24 || ztrailer_len % 24 != 0 {
-            return Err(Error::InvalidZTrailerLength(ztrailer_len));
+            })
+        } else if ztrailer_offset < offset {
+            Err(ErrorDetails::ImpossibleZTrailerOffset(ztrailer_offset))
+        } else if ztrailer_len < 24 || ztrailer_len % 24 != 0 {
+            Err(ErrorDetails::InvalidZTrailerLength(ztrailer_len))
+        } else {
+            Ok(ZHeader {
+                offset,
+                zheader_offset,
+                ztrailer_offset,
+                ztrailer_len,
+            })
         }
-
-        Ok(ZHeader {
-            offset,
-            zheader_offset,
-            ztrailer_offset,
-            ztrailer_len,
-        })
+        .map_err(|details| Error::new(Some(offset..offset + 12), details))
     }
 }
 
@@ -1838,6 +1913,19 @@ impl ZBlock {
             compressed_size: endian.parse(read_bytes(r)?),
         })
     }
+
+    /// Returns true if the uncompressed and compressed sizes are plausible.
+    ///
+    /// [zlib Technical Details] says that the maximum expansion from
+    /// compression, with worst-case parameters, is 13.5% plus 11 bytes.  This
+    /// code checks for an expansion of more than 14.3% plus 11 bytes.
+    ///
+    /// [zlib Technical Details]: http://www.zlib.net/zlib_tech.html
+    fn has_plausible_sizes(&self) -> bool {
+        self.uncompressed_size
+            .checked_add(self.uncompressed_size / 7 + 11)
+            .is_some_and(|max| self.compressed_size <= max)
+    }
 }
 
 impl ZTrailer {
@@ -1856,30 +1944,31 @@ impl ZTrailer {
             return Ok(None);
         }
         let int_bias = endian.parse(read_bytes(reader)?);
-        if int_bias as f64 != -bias {
-            return Err(Error::WrongZlibTrailerBias {
-                actual: int_bias,
-                expected: -bias,
-            });
-        }
         let zero = endian.parse(read_bytes(reader)?);
-        if zero != 0 {
-            return Err(Error::WrongZlibTrailerZero(zero));
-        }
         let block_size = endian.parse(read_bytes(reader)?);
-        if block_size != 0x3ff000 {
-            return Err(Error::WrongZlibTrailerBlockSize(block_size));
-        }
         let n_blocks: u32 = endian.parse(read_bytes(reader)?);
-        let expected_n_blocks = (zheader.ztrailer_len - 24) / 24;
-        if n_blocks as u64 != expected_n_blocks {
-            return Err(Error::BadZlibTrailerNBlocks {
-                offset: zheader.ztrailer_offset,
+        if int_bias as f64 != -bias {
+            Err(ErrorDetails::WrongZlibTrailerBias {
+                actual: int_bias,
+                expected: -bias,
+            })
+        } else if zero != 0 {
+            Err(ErrorDetails::WrongZlibTrailerZero(zero))
+        } else if block_size != 0x3ff000 {
+            Err(ErrorDetails::WrongZlibTrailerBlockSize(block_size))
+        } else if let expected_n_blocks = (zheader.ztrailer_len - 24) / 24
+            && n_blocks as u64 != expected_n_blocks
+        {
+            Err(ErrorDetails::BadZlibTrailerNBlocks {
                 n_blocks,
                 expected_n_blocks,
                 ztrailer_len: zheader.ztrailer_len,
-            });
+            })
+        } else {
+            Ok(())
         }
+        .map_err(|details| Error::new(Some(start_offset..start_offset + 24), details))?;
+
         let blocks = (0..n_blocks)
             .map(|_| ZBlock::read(reader, endian))
             .collect::<Result<Vec<_>, _>>()?;
@@ -1888,19 +1977,36 @@ impl ZTrailer {
         let mut expected_cmp_ofs = zheader.zheader_offset + 24;
         for (index, block) in blocks.iter().enumerate() {
             if block.uncompressed_ofs != expected_uncmp_ofs {
-                return Err(Error::ZlibTrailerBlockWrongUncmpOfs {
+                Err(ErrorDetails::ZlibTrailerBlockWrongUncmpOfs {
                     index,
                     actual: block.uncompressed_ofs,
                     expected: expected_cmp_ofs,
-                });
-            }
-            if block.compressed_ofs != expected_cmp_ofs {
-                return Err(Error::ZlibTrailerBlockWrongCmpOfs {
+                })
+            } else if block.compressed_ofs != expected_cmp_ofs {
+                Err(ErrorDetails::ZlibTrailerBlockWrongCmpOfs {
                     index,
                     actual: block.compressed_ofs,
                     expected: expected_cmp_ofs,
-                });
+                })
+            } else if !block.has_plausible_sizes() {
+                Err(ErrorDetails::ZlibExpansion {
+                    index,
+                    compressed_size: block.compressed_size,
+                    uncompressed_size: block.uncompressed_size,
+                })
+            } else {
+                Ok(())
             }
+            .map_err(|details| {
+                Error::new(
+                    {
+                        let block_start = start_offset + 24 + 24 * index as u64;
+                        Some(block_start..block_start + 24)
+                    },
+                    details,
+                )
+            })?;
+
             if index < blocks.len() - 1 {
                 if block.uncompressed_size != block_size {
                     warn(Warning::ZlibTrailerBlockWrongSize {
@@ -1918,27 +2024,19 @@ impl ZTrailer {
                     });
                 }
             }
-            // http://www.zlib.net/zlib_tech.html says that the maximum
-            // expansion from compression, with worst-case parameters, is 13.5%
-            // plus 11 bytes.  This code checks for an expansion of more than
-            // 14.3% plus 11 bytes.
-            if block.compressed_size > block.uncompressed_size + block.uncompressed_size / 7 + 11 {
-                return Err(Error::ZlibExpansion {
-                    index,
-                    compressed_size: block.compressed_size,
-                    uncompressed_size: block.uncompressed_size,
-                });
-            }
 
             expected_cmp_ofs += block.compressed_size as u64;
             expected_uncmp_ofs += block.uncompressed_size as u64;
         }
 
         if expected_cmp_ofs != zheader.ztrailer_offset {
-            return Err(Error::ZlibTrailerOffsetInconsistency {
-                descriptors: expected_cmp_ofs,
-                zheader: zheader.ztrailer_offset,
-            });
+            return Err(Error::new(
+                Some(start_offset..start_offset + 24 + 24 * n_blocks as u64),
+                ErrorDetails::ZlibTrailerOffsetInconsistency {
+                    descriptors: expected_cmp_ofs,
+                    zheader: zheader.ztrailer_offset,
+                },
+            ));
         }
 
         reader.seek(SeekFrom::Start(start_offset))?;
index a4f336bb7f86d48aeb6005c98879b6e1de24ed32..6f55a23f76867973f5b2b588f22334f75b011707 100644 (file)
@@ -35,8 +35,6 @@ use crate::{
     },
 };
 
-use enum_iterator::all;
-
 #[test]
 fn variable_labels_and_missing_values() {
     test_sack_sysfile("variable_labels_and_missing_values");
@@ -586,7 +584,7 @@ fn test_sack_sysfile(name: &str) {
     let input = String::from_utf8(std::fs::read(&input_filename).unwrap()).unwrap();
     let expected_filename = input_filename.with_extension("expected");
     let expected = String::from_utf8(std::fs::read(&expected_filename).unwrap()).unwrap();
-    for endian in all::<Endian>() {
+    for endian in [Endian::Big, Endian::Little] {
         let expected = expected.replace(
             "{endian}",
             match endian {
index 93da523c47931a21164f797b7876094f99840b34..e71fe230683b0ed731b5658d472d50e9d6cd3bba 100644 (file)
@@ -1 +1 @@
-At offset 0xd4, unrecognized record type 8.
+Error at file offsets 0xd0 to 0xd4: Unrecognized record type 8.
index c48edff61c07c621a4dec27e145a9bfba916f65d..d727b3cbfa3589b07d4722e5cb5528b24c357b64 100644 (file)
@@ -1 +1 @@
-At offset 0xd8, record type 7 subtype 3 is too large with element size 4294963200 and 4294963200 elements.
+Error at file offsets 0xd8 to 0xe0: Record type 7 subtype 3 is too large with element size 4294963200 and 4294963200 elements.
index 7acd2b59890fe56747db91a8ae4c2049c24ed935..0780726be2e0e9d53757675c17862374353cc5f2 100644 (file)
@@ -1 +1 @@
-In variable record starting at offset 0xb4, variable label code 2 at offset 0xb8 is not 0 or 1.
+Error at file offsets 0xb4 to 0xd0: In variable record, variable label code 2 is not 0 or 1.
index 6e19fb2f8c14679958175f417d8806463597bcc8..d475360ead94ff3b9522eb004d665541656fab1f 100644 (file)
@@ -1 +1 @@
-At offset 0xb4, missing value code (-1) is not -3, -2, 0, 1, 2, or 3.
+Error at file offsets 0xb4 to 0xd0: Missing value code (-1) is not -3, -2, 0, 1, 2, or 3.
index a1f7d8e99deddbef0b19fd086b5565879032c700..94960b259aae073e1c8c26c6e78c7eb863c275bd 100644 (file)
@@ -1 +1 @@
-At offset 0xb4, missing value code (4) is not -3, -2, 0, 1, 2, or 3.
+Error at file offsets 0xb4 to 0xd0: Missing value code (4) is not -3, -2, 0, 1, 2, or 3.
index 742db791e307ad28b6ebea51d8056e592beceae0..699cf7ed181b6bf20f389ff6d28db7ecb778a801 100644 (file)
@@ -1 +1 @@
-At offset 0xd4, unrecognized record type 4.
+Error at file offsets 0xd0 to 0xd4: Unrecognized record type 4.
index 78fac887fa9f1781d71034e6839cb594b0f8fe02..f830cddba1937013e758b1ef10061cf910149497 100644 (file)
@@ -1 +1 @@
-At offset 0xe8, following value label record, found record type 7 instead of expected type 4 for variable index record
+Error at file offsets 0xe8 to 0xec: Following value label record, found record type 7 instead of expected type 4 for variable index record
index 64c01ec2da0d753545adde6db746e4fb07ad0d7d..d0cc32ccf947d14652197897d1074a315e448f18 100644 (file)
@@ -22,7 +22,7 @@
 │str15│       5│     │Nominal          │Input│   15│Left     │A15         │A15         │              │
 ╰─────┴────────┴─────┴─────────────────┴─────┴─────┴─────────┴────────────┴────────────┴──────────────╯
 
-Unexpected end of file at offset 0x1ac, 0 bytes and 2 compression chunks into a compressed case.
+Unexpected end of file 0 bytes and 2 compression chunks into a compressed case.
 
 ╭────┬──────┬────┬────┬────────┬───────────────╮
 │Case│ num1 │num2│str4│  str8  │     str15     │
index 1b2222eb0005b2d643e7e65afb2fe4f77b032c38..813c06e8347d1951fc637278ef4931cbadf24711 100644 (file)
@@ -18,7 +18,7 @@
 │num2│       2│     │                 │Input│    8│Right    │F8.0        │F8.0        │              │
 ╰────┴────────┴─────┴─────────────────┴─────┴─────┴─────────┴────────────┴────────────┴──────────────╯
 
-Unexpected end of file at offset 0x12c, 8 bytes into a 16-byte case.
+Error at file offsets 0x124 to 0x12c: Unexpected end of file 8 bytes into a 16-byte case.
 
 ╭────┬────┬────╮
 │Case│num1│num2│
index 08e21a713ea259a48f88bde2498f2ac5c4c15d40..2dfac61dcbd8b3d5ce58c3eaec1d8e266b254dd3 100644 (file)
@@ -1 +1 @@
-At offset 0xd4, number of value labels (2147483647) is greater than the maximum number 536870911.
+Error at file offsets 0xd4 to 0xd8: Number of value labels (2147483647) is greater than the maximum number 536870911.
index 99b16acc5a62a94079d00bbc1b1523dbcb526245..6c8ffda947cc1bc29e2246e1793f895fcf53ae6c 100644 (file)
@@ -1 +1 @@
-ZLIB header's zlib_offset is 0x0 instead of expected 0x194.
+Error at file offsets 0x194 to 0x1a0: ZLIB header's zlib_offset is 0x0 instead of expected 0x194.
index cced59a32d7077944500a41e5ed0d7d5efadbcd7..83cbe679c8780369d76a3a6ec25c8576ae910a0b 100644 (file)
@@ -1 +1 @@
-Impossible ztrailer_offset 0x0.
+Error at file offsets 0x194 to 0x1a0: Impossible ztrailer_offset 0x0.
index d8b17d3569c02c954ff3cdeee33916b2d90fd900..6b728bdfe4aa444eeaffa762598eff1404a412b6 100644 (file)
@@ -1 +1 @@
-ZLIB trailer is at offset 0x205 but 0x204 would be expected from block descriptors.
+Error at file offsets 0x1ac to 0x1dc: ZLIB trailer is at offset 0x205 but 0x204 would be expected from block descriptors.
index df98439368ba64351d9aa3de06a93206e1cd80f9..effe1768fb6e33747b81576ef98facc1b3a01584 100644 (file)
@@ -1 +1 @@
-ZLIB block descriptor 1 reported compressed data offset 0x12421, when 0x124f1 was expected.
+Error at file offsets 0x1dc to 0x1f4: ZLIB block descriptor 1 reported compressed data offset 0x12421, when 0x124f1 was expected.
index 29d7c228ab6fc33310283e02ee6b6d4d7c846e55..8196e26acadbbe25042ed7c519f58d0da5ddbdb0 100644 (file)
@@ -1 +1 @@
-ZLIB block descriptor 0 reports compressed size 100 and uncompressed size 50.
+Error at file offsets 0x1c4 to 0x1dc: ZLIB block descriptor 0 reports compressed size 100 and uncompressed size 50.
index f98e08b3f71baaad9d6d93296ad41141fce245a4..b70a425e02056f5539dddf8e135ebd4202d96a85 100644 (file)
@@ -1 +1 @@
-Invalid ZLIB trailer length 21.
+Error at file offsets 0x194 to 0x1a0: Invalid ZLIB trailer length 21.
index 4c3ed56a7c36ba0d7f68110446ec66ad7a8c0239..82c0c1b127396b8e4e83f8c386144f73b3b209ca 100644 (file)
@@ -1 +1 @@
-ZLIB trailer specifies unexpected 4096-byte block size.
+Error at file offsets 0x1ac to 0x1c4: ZLIB trailer specifies unexpected 4096-byte block size.
index 0f86bbcc44af0083310c2f24fbb758ffceb84d12..a7e4b4b93545de48da75d3dde9fba1174ebe891f 100644 (file)
@@ -1 +1 @@
-ZLIB block descriptor 0 reported compressed data offset 0x191, when 0x1ac was expected.
+Error at file offsets 0x1c4 to 0x1dc: ZLIB block descriptor 0 reported compressed data offset 0x191, when 0x1ac was expected.
index e2cd618f705d0694021f88dbae9d5ccb4320f283..37a9a2d4223593b0bc6e228b4fae84991bcffd98 100644 (file)
@@ -1 +1 @@
-Block count 2 in ZLIB trailer at offset 0x205 differs from expected block count 1 calculated from trailer length 48.
+Error at file offsets 0x1ac to 0x1c4: Block count 2 in ZLIB trailer differs from expected block count 1 calculated from trailer length 48.
index 0a2af92d87d0a87fb405c4abb05086a29cf29078..ef1bb440f7abd6c9218ca22736e7288b24cbe69e 100644 (file)
@@ -1 +1 @@
-ZLIB block descriptor 0 reported uncompressed data offset 0x177, when 0x1ac was expected.
+Error at file offsets 0x1c4 to 0x1dc: ZLIB block descriptor 0 reported uncompressed data offset 0x177, when 0x1ac was expected.
index dbfa4c1cb043167dc37ab08ae241dc115d884eff..d084ea52567683aa3b084c097071939f0cd8cf00 100644 (file)
@@ -1 +1 @@
-ZLIB trailer bias 0 is not -100 as expected from file header bias.
+Error at file offsets 0x1ac to 0x1c4: ZLIB trailer bias 0 is not -100 as expected from file header bias.
index 0b98a9701c54f83aa85101f73df25be64e19e7a7..a9d98d29b5d41a49522d7f025c465b1737fb1723 100644 (file)
@@ -1 +1 @@
-Block count 1 in ZLIB trailer at offset 0x205 differs from expected block count 2 calculated from trailer length 72.
+Error at file offsets 0x1ac to 0x1c4: Block count 1 in ZLIB trailer differs from expected block count 2 calculated from trailer length 72.
index 08d1d75614d899636c67f894d9ff2156be15c3a7..73f5cfd137509332ec8e094d5d1fd4540a2c844b 100644 (file)
@@ -1 +1 @@
-ZLIB trailer "zero" field has nonzero value 100.
+Error at file offsets 0x1ac to 0x1c4: ZLIB trailer "zero" field has nonzero value 100.