1 #![allow(unused_variables)]
2 use endian::{Endian, Parse, ToBytes};
3 //use flate2::bufread::ZlibDecoder;
5 use num_derive::FromPrimitive;
8 io::{BufReader, Error as IoError, Read, Seek, SeekFrom},
14 #[derive(Error, Debug)]
16 #[error("Not an SPSS system file")]
19 #[error("Invalid magic number {0:?}")]
22 #[error("I/O error ({0})")]
25 #[error("Invalid SAV compression code {0}")]
26 InvalidSavCompression(u32),
28 #[error("Invalid ZSAV compression code {0}")]
29 InvalidZsavCompression(u32),
31 #[error("Variable record at offset {offset:#x} specifies width {width} not in valid range [-1,255).")]
32 BadVariableWidth { offset: u64, width: i32 },
34 #[error("Misplaced type 4 record near offset {0:#x}.")]
35 MisplacedType4Record(u64),
37 #[error("Document record at offset {offset:#x} has document line count ({n}) greater than the maximum number {max}.")]
38 BadDocumentLength { offset: u64, n: u32, max: u32 },
40 #[error("At offset {offset:#x}, Unrecognized record type {rec_type}.")]
41 BadRecordType { offset: u64, rec_type: u32 },
43 #[error("At offset {offset:#x}, variable label code ({code}) is not 0 or 1.")]
44 BadVariableLabelCode { offset: u64, code: u32 },
47 "At offset {offset:#x}, numeric missing value code ({code}) is not -3, -2, 0, 1, 2, or 3."
49 BadNumericMissingValueCode { offset: u64, code: i32 },
51 #[error("At offset {offset:#x}, string missing value code ({code}) is not 0, 1, 2, or 3.")]
52 BadStringMissingValueCode { offset: u64, code: i32 },
54 #[error("At offset {offset:#x}, number of value labels ({n}) is greater than the maximum number {max}.")]
55 BadNumberOfValueLabels { offset: u64, n: u32, max: u32 },
57 #[error("At offset {offset:#x}, variable index record (type 4) does not immediately follow value label record (type 3) as it should.")]
58 MissingVariableIndexRecord { offset: u64 },
60 #[error("At offset {offset:#x}, number of variables indexes ({n}) is greater than the maximum number ({max}).")]
61 BadNumberOfVarIndexes { offset: u64, n: u32, max: u32 },
63 #[error("At offset {offset:#x}, record type 7 subtype {subtype} is too large with element size {size} and {count} elements.")]
64 ExtensionRecordTooLarge {
71 #[error("Wrong ZLIB data header offset {zheader_offset:#x} (expected {offset:#x}).")]
72 BadZlibHeaderOffset { offset: u64, zheader_offset: u64 },
74 #[error("At offset {offset:#x}, impossible ZLIB trailer offset {ztrailer_offset:#x}.")]
75 BadZlibTrailerOffset { offset: u64, ztrailer_offset: u64 },
77 #[error("At offset {offset:#x}, impossible ZLIB trailer length {ztrailer_len}.")]
78 BadZlibTrailerLen { offset: u64, ztrailer_len: u64 },
80 #[error("Unexpected end of file at offset {offset:#x}, {case_ofs} bytes into a {case_len}-byte case.")]
88 "Unexpected end of file at offset {offset:#x}, {case_ofs} bytes into a compressed case."
90 EofInCompressedCase { offset: u64, case_ofs: u64 },
92 #[error("Data ends at offset {offset:#x}, {case_ofs} bytes into a compressed case.")]
93 PartialCompressedCase { offset: u64, case_ofs: u64 },
95 #[error("At {case_ofs} bytes into compressed case starting at offset {offset:#x}, a string was found where a number was expected.")]
96 CompressedNumberExpected { offset: u64, case_ofs: u64 },
98 #[error("At {case_ofs} bytes into compressed case starting at offset {offset:#x}, a number was found where a string was expected.")]
99 CompressedStringExpected { offset: u64, case_ofs: u64 },
101 #[error("Block count {n_blocks} in ZLIB trailer at offset {offset:#x} differs from expected block count {expected_n_blocks} calculated from trailer length {ztrailer_len}.")]
102 BadZlibTrailerNBlocks { offset: u64, n_blocks: u32, expected_n_blocks: u64, ztrailer_len: u64 }
105 #[derive(Error, Debug)]
107 #[error("Unexpected floating-point bias {0} or unrecognized floating-point format.")]
110 #[error("Duplicate type 6 (document) record.")]
111 DuplicateDocumentRecord,
114 #[derive(Copy, Clone, Debug)]
115 pub enum Compression {
124 ValueLabel(ValueLabel),
125 VarIndexes(VarIndexes),
126 Extension(Extension),
137 /// Eye-catcher string, product name, in the file's encoding. Padded
138 /// on the right with spaces.
139 pub eye_catcher: [u8; 60],
141 /// Layout code, normally either 2 or 3.
142 pub layout_code: u32,
144 /// Number of variable positions, or `None` if the value in the file is
145 /// questionably trustworthy.
146 pub nominal_case_size: Option<u32>,
148 /// Compression type, if any,
149 pub compression: Option<Compression>,
151 /// 0-based variable index of the weight variable, or `None` if the file is
153 pub weight_index: Option<u32>,
155 /// Claimed number of cases, if known.
156 pub n_cases: Option<u32>,
158 /// Compression bias, usually 100.0.
161 /// `dd mmm yy` in the file's encoding.
162 pub creation_date: [u8; 9],
164 /// `HH:MM:SS` in the file's encoding.
165 pub creation_time: [u8; 8],
167 /// File label, in the file's encoding. Padded on the right with spaces.
168 pub file_label: [u8; 64],
170 /// Endianness of the data in the file header.
171 pub endianness: Endian,
174 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
175 pub struct Magic([u8; 4]);
178 /// Magic number for a regular system file.
179 pub const SAV: Magic = Magic(*b"$FL2");
181 /// Magic number for a system file that contains zlib-compressed data.
182 pub const ZSAV: Magic = Magic(*b"$FL3");
184 /// Magic number for an EBDIC-encoded system file. This is `$FL2` encoded
186 pub const EBCDIC: Magic = Magic([0x5b, 0xc6, 0xd3, 0xf2]);
189 impl TryFrom<[u8; 4]> for Magic {
192 fn try_from(value: [u8; 4]) -> Result<Self, Self::Error> {
193 let magic = Magic(value);
195 Magic::SAV | Magic::ZSAV | Magic::EBCDIC => Ok(magic),
196 _ => Err(Error::BadMagic(value)),
201 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
208 fn from_width(width: i32) -> VarType {
210 0 => VarType::Number,
211 _ => VarType::String,
216 pub struct Reader<R: Read> {
218 var_types: Vec<VarType>,
224 Headers(Endian, Option<Compression>),
226 CompressedData(Endian, VecDeque<u8>),
228 ZTrailer { endian: Endian, ztrailer_ofs: u64, ztrailer_len: u64 },
233 #[derive(Copy, Clone)]
240 pub fn from_raw(var_type: VarType, raw: [u8; 8], endian: Endian) -> Value {
242 VarType::String => Value::String(raw),
244 let number: f64 = endian.parse(raw);
245 Value::Number((number != -f64::MAX).then_some(number))
251 impl<R: Read + Seek> Reader<R> {
252 pub fn new(r: R) -> Result<Reader<R>, Error> {
254 r: BufReader::new(r),
255 var_types: Vec::new(),
256 state: ReaderState::Start,
259 fn _next(&mut self) -> Result<Option<Record>, Error> {
261 ReaderState::Start => {
262 let header = read_header(&mut self.r)?;
263 self.state = ReaderState::Headers(header.endianness, header.compression);
264 Ok(Some(Record::Header(header)))
266 ReaderState::Headers(endian, compression) => {
267 let rec_type: u32 = endian.parse(read_bytes(&mut self.r)?);
268 let record = match rec_type {
270 let variable = read_variable_record(&mut self.r, endian)?;
271 self.var_types.push(VarType::from_width(variable.width));
272 Record::Variable(variable)
274 3 => Record::ValueLabel(read_value_label_record(&mut self.r, endian)?),
275 4 => Record::VarIndexes(read_var_indexes_record(&mut self.r, endian)?),
276 6 => Record::Document(read_document_record(&mut self.r, endian)?),
277 7 => Record::Extension(read_extension_record(&mut self.r, endian)?),
279 let _: [u8; 4] = read_bytes(&mut self.r)?;
280 self.state = match compression {
281 None => ReaderState::Data(endian),
282 Some(Compression::Simple) => {
283 ReaderState::CompressedData(endian, VecDeque::new())
285 Some(Compression::ZLib) => ReaderState::ZHeader(endian),
287 return Ok(Some(Record::EndOfHeaders));
290 return Err(Error::BadRecordType {
291 offset: self.r.stream_position()?,
298 ReaderState::Data(endian) => {
299 let case_start = self.r.stream_position()?;
300 let mut values = Vec::with_capacity(self.var_types.len());
301 for (i, &var_type) in self.var_types.iter().enumerate() {
302 let Some(raw) = try_read_bytes(&mut self.r)? else {
306 let offset = self.r.stream_position()?;
307 return Err(Error::EofInCase { offset, case_ofs: offset - case_start, case_len: self.var_types.len() * 8});
310 values.push(Value::from_raw(var_type, raw, endian));
312 Ok(Some(Record::Case(values)))
314 ReaderState::CompressedData(endian, ref mut codes) => {
315 let case_start = self.r.stream_position()?;
316 let mut values = Vec::with_capacity(self.var_types.len());
317 let bias = 100.0; // XXX
318 for (i, &var_type) in self.var_types.iter().enumerate() {
320 let Some(code) = codes.pop_front() else {
321 let Some(new_codes): Option<[u8; 8]> = try_read_bytes(&mut self.r)? else {
325 let offset = self.r.stream_position()?;
326 return Err(Error::EofInCompressedCase { offset, case_ofs: offset - case_start});
329 codes.extend(new_codes.into_iter());
334 1..=251 => match var_type {
335 VarType::Number => break Value::Number(Some(code as f64 - bias)),
337 break Value::String(endian.to_bytes(code as f64 - bias))
344 let offset = self.r.stream_position()?;
345 return Err(Error::PartialCompressedCase {
347 case_ofs: offset - case_start,
352 break Value::from_raw(var_type, read_bytes(&mut self.r)?, endian)
354 254 => match var_type {
355 VarType::String => break Value::String(*b" "), // XXX EBCDIC
357 return Err(Error::CompressedStringExpected {
359 case_ofs: self.r.stream_position()? - case_start,
363 255 => match var_type {
364 VarType::Number => break Value::Number(None),
366 return Err(Error::CompressedNumberExpected {
368 case_ofs: self.r.stream_position()? - case_start,
376 Ok(Some(Record::Case(values)))
378 ReaderState::ZHeader(endian) => {
379 let zheader = read_zheader(&mut self.r, endian)?;
380 self.state = ReaderState::ZTrailer { endian, ztrailer_ofs: zheader.ztrailer_offset, ztrailer_len: zheader.ztrailer_len};
381 Ok(Some(Record::ZHeader(zheader)))
383 ReaderState::ZTrailer { endian, ztrailer_ofs, ztrailer_len } => {
384 //self.state = ReaderState::ZData;
385 match read_ztrailer(&mut self.r, endian, ztrailer_ofs, ztrailer_len)? {
387 Ok(Some(Record::ZTrailer(ztrailer)))
393 ReaderState::ZData(zlib_decoder) => {
394 let zlib_decoder = zlib_decoder.unwrap_or_else(
397 ReaderState::End => Ok(None),
402 impl<R: Read + Seek> Iterator for Reader<R> {
403 type Item = Result<Record, Error>;
405 fn next(&mut self) -> Option<Self::Item> {
406 let retval = self._next();
409 self.state = ReaderState::End;
412 Ok(Some(record)) => Some(Ok(record)),
414 self.state = ReaderState::End;
421 fn read_header<R: Read>(r: &mut R) -> Result<Header, Error> {
422 let magic: [u8; 4] = read_bytes(r)?;
423 let magic: Magic = magic.try_into().map_err(|_| Error::NotASystemFile)?;
425 let eye_catcher: [u8; 60] = read_bytes(r)?;
426 let layout_code: [u8; 4] = read_bytes(r)?;
427 let endianness = Endian::identify_u32(2, layout_code)
428 .or_else(|| Endian::identify_u32(2, layout_code))
429 .ok_or_else(|| Error::NotASystemFile)?;
430 let layout_code = endianness.parse(layout_code);
432 let nominal_case_size: u32 = endianness.parse(read_bytes(r)?);
433 let nominal_case_size =
434 (nominal_case_size <= i32::MAX as u32 / 16).then_some(nominal_case_size);
436 let compression_code: u32 = endianness.parse(read_bytes(r)?);
437 let compression = match (magic, compression_code) {
438 (Magic::ZSAV, 2) => Some(Compression::ZLib),
439 (Magic::ZSAV, code) => return Err(Error::InvalidZsavCompression(code)),
441 (_, 1) => Some(Compression::Simple),
442 (_, code) => return Err(Error::InvalidSavCompression(code)),
445 let weight_index: u32 = endianness.parse(read_bytes(r)?);
446 let weight_index = (weight_index > 0).then_some(weight_index - 1);
448 let n_cases: u32 = endianness.parse(read_bytes(r)?);
449 let n_cases = (n_cases < i32::MAX as u32 / 2).then_some(n_cases);
451 let bias: f64 = endianness.parse(read_bytes(r)?);
453 let creation_date: [u8; 9] = read_bytes(r)?;
454 let creation_time: [u8; 8] = read_bytes(r)?;
455 let file_label: [u8; 64] = read_bytes(r)?;
456 let _: [u8; 3] = read_bytes(r)?;
474 pub struct Variable {
475 /// Offset from the start of the file to the start of the record.
478 /// Variable width, in the range -1..=255.
481 /// Variable name, padded on the right with spaces.
485 pub print_format: u32,
488 pub write_format: u32,
490 /// Missing value code, one of -3, -2, 0, 1, 2, or 3.
491 pub missing_value_code: i32,
493 /// Raw missing values, up to 3 of them.
494 pub missing: Vec<[u8; 8]>,
496 /// Optional variable label.
497 pub label: Option<Vec<u8>>,
500 fn read_variable_record<R: Read + Seek>(
501 r: &mut BufReader<R>,
503 ) -> Result<Variable, Error> {
504 let offset = r.stream_position()?;
505 let width: i32 = endian.parse(read_bytes(r)?);
506 let has_variable_label: u32 = endian.parse(read_bytes(r)?);
507 let missing_value_code: i32 = endian.parse(read_bytes(r)?);
508 let print_format: u32 = endian.parse(read_bytes(r)?);
509 let write_format: u32 = endian.parse(read_bytes(r)?);
510 let name: [u8; 8] = read_bytes(r)?;
512 let label = match has_variable_label {
515 let len: u32 = endian.parse(read_bytes(r)?);
516 let read_len = len.min(65535) as usize;
517 let label = Some(read_vec(r, read_len)?);
519 let padding_bytes = Integer::next_multiple_of(&len, &4) - len;
520 let _ = read_vec(r, padding_bytes as usize)?;
525 return Err(Error::BadVariableLabelCode {
527 code: has_variable_label,
532 let mut missing = Vec::new();
533 if missing_value_code != 0 {
534 match (width, missing_value_code) {
535 (0, -3 | -2 | 1 | 2 | 3) => (),
537 return Err(Error::BadNumericMissingValueCode {
539 code: missing_value_code,
544 return Err(Error::BadStringMissingValueCode {
546 code: missing_value_code,
551 for _ in 0..missing_value_code.abs() {
552 missing.push(read_bytes(r)?);
568 pub struct ValueLabel {
569 /// Offset from the start of the file to the start of the record.
573 pub labels: Vec<([u8; 8], Vec<u8>)>,
577 /// Maximum number of value labels in a record.
578 pub const MAX: u32 = u32::MAX / 8;
581 fn read_value_label_record<R: Read + Seek>(
582 r: &mut BufReader<R>,
584 ) -> Result<ValueLabel, Error> {
585 let offset = r.stream_position()?;
586 let n: u32 = endian.parse(read_bytes(r)?);
587 if n > ValueLabel::MAX {
588 return Err(Error::BadNumberOfValueLabels {
591 max: ValueLabel::MAX,
595 let mut labels = Vec::new();
597 let value: [u8; 8] = read_bytes(r)?;
598 let label_len: u8 = endian.parse(read_bytes(r)?);
599 let label_len = label_len as usize;
600 let padded_len = Integer::next_multiple_of(&(label_len + 1), &8);
602 let mut label = read_vec(r, padded_len)?;
603 label.truncate(label_len);
604 labels.push((value, label));
606 Ok(ValueLabel { offset, labels })
609 pub struct VarIndexes {
610 /// Offset from the start of the file to the start of the record.
613 /// The 0-based indexes of the variable indexes.
614 pub var_indexes: Vec<u32>,
618 /// Maximum number of variable indexes in a record.
619 pub const MAX: u32 = u32::MAX / 8;
622 fn read_var_indexes_record<R: Read + Seek>(
623 r: &mut BufReader<R>,
625 ) -> Result<VarIndexes, Error> {
626 let offset = r.stream_position()?;
627 let n: u32 = endian.parse(read_bytes(r)?);
628 if n > VarIndexes::MAX {
629 return Err(Error::BadNumberOfVarIndexes {
632 max: VarIndexes::MAX,
635 let mut var_indexes = Vec::with_capacity(n as usize);
637 var_indexes.push(endian.parse(read_bytes(r)?));
646 pub const DOC_LINE_LEN: u32 = 80;
647 pub const DOC_MAX_LINES: u32 = i32::MAX as u32 / DOC_LINE_LEN;
649 pub struct Document {
650 /// Offset from the start of the file to the start of the record.
653 /// The document, as an array of 80-byte lines.
654 pub lines: Vec<[u8; DOC_LINE_LEN as usize]>,
657 fn read_document_record<R: Read + Seek>(
658 r: &mut BufReader<R>,
660 ) -> Result<Document, Error> {
661 let offset = r.stream_position()?;
662 let n: u32 = endian.parse(read_bytes(r)?);
664 0..=DOC_MAX_LINES => {
665 let pos = r.stream_position()?;
666 let mut lines = Vec::with_capacity(n as usize);
668 let line: [u8; 80] = read_bytes(r)?;
671 Ok(Document { pos, lines })
673 _ => Err(Error::BadDocumentLength {
681 #[derive(FromPrimitive)]
683 /// Machine integer info.
685 /// Machine floating-point info.
691 /// Multiple response sets.
695 /// Extra product info text.
697 /// Variable display parameters.
699 /// Long variable names.
703 /// Extended number of cases.
705 /// Data file attributes.
707 /// Variable attributes.
709 /// Multiple response sets (extended).
711 /// Character encoding.
713 /// Value labels for long strings.
715 /// Missing values for long strings.
717 /// "Format properties in dataview table".
721 pub struct Extension {
722 /// Offset from the start of the file to the start of the record.
728 /// Size of each data element.
731 /// Number of data elements.
734 /// `size * count` bytes of data.
738 fn extension_record_size_requirements(extension: ExtensionType) -> (u32, u32) {
740 /* Implemented record types. */
741 ExtensionType::Integer => (4, 8),
742 ExtensionType::Float => (8, 3),
743 ExtensionType::VarSets => (1, 0),
744 ExtensionType::Mrsets => (1, 0),
745 ExtensionType::ProductInfo => (1, 0),
746 ExtensionType::Display => (4, 0),
747 ExtensionType::LongNames => (1, 0),
748 ExtensionType::LongStrings => (1, 0),
749 ExtensionType::Ncases => (8, 2),
750 ExtensionType::FileAttrs => (1, 0),
751 ExtensionType::VarAttrs => (1, 0),
752 ExtensionType::Mrsets2 => (1, 0),
753 ExtensionType::Encoding => (1, 0),
754 ExtensionType::LongLabels => (1, 0),
755 ExtensionType::LongMissing => (1, 0),
757 /* Ignored record types. */
758 ExtensionType::Date => (0, 0),
759 ExtensionType::DataEntry => (0, 0),
760 ExtensionType::Dataview => (0, 0),
764 fn read_extension_record<R: Read + Seek>(
765 r: &mut BufReader<R>,
767 ) -> Result<Extension, Error> {
768 let subtype = endian.parse(read_bytes(r)?);
769 let offset = r.stream_position()?;
770 let size: u32 = endian.parse(read_bytes(r)?);
771 let count = endian.parse(read_bytes(r)?);
772 let Some(product) = size.checked_mul(count) else {
773 return Err(Error::ExtensionRecordTooLarge {
780 let offset = r.stream_position()?;
781 let data = read_vec(r, product as usize)?;
792 /// File offset to the start of the record.
795 /// File offset to the ZLIB data header.
796 pub zheader_offset: u64,
798 /// File offset to the ZLIB trailer.
799 pub ztrailer_offset: u64,
801 /// Length of the ZLIB trailer in bytes.
802 pub ztrailer_len: u64,
805 fn read_zheader<R: Read + Seek>(r: &mut BufReader<R>, endian: Endian) -> Result<ZHeader, Error> {
806 let offset = r.stream_position()?;
807 let zheader_offset: u64 = endian.parse(read_bytes(r)?);
808 let ztrailer_offset: u64 = endian.parse(read_bytes(r)?);
809 let ztrailer_len: u64 = endian.parse(read_bytes(r)?);
819 pub struct ZTrailer {
820 /// File offset to the start of the record.
823 /// Compression bias as a negative integer, e.g. -100.
826 /// Always observed as zero.
829 /// Uncompressed size of each block, except possibly the last. Only
830 /// `0x3ff000` has been observed so far.
833 /// Block descriptors, always `(ztrailer_len - 24) / 24)` of them.
834 pub blocks: Vec<ZBlock>,
838 /// Offset of block of data if simple compression were used.
839 pub uncompressed_ofs: u64,
841 /// Actual offset within the file of the compressed data block.
842 pub compressed_ofs: u64,
844 /// The number of bytes in this data block after decompression. This is
845 /// `block_size` in every data block but the last, which may be smaller.
846 pub uncompressed_size: u32,
848 /// The number of bytes in this data block, as stored compressed in this
850 pub compressed_size: u32,
853 fn read_ztrailer<R: Read + Seek>(r: &mut BufReader<R>, endian: Endian, ztrailer_ofs: u64, ztrailer_len: u64) -> Result<Option<ZTrailer>, Error> {
854 let start_offset = r.stream_position()?;
855 if r.seek(SeekFrom::Start(ztrailer_ofs)).is_err() {
858 let int_bias = endian.parse(read_bytes(r)?);
859 let zero = endian.parse(read_bytes(r)?);
860 let block_size = endian.parse(read_bytes(r)?);
861 let n_blocks: u32 = endian.parse(read_bytes(r)?);
862 let expected_n_blocks = (ztrailer_len - 24) / 24;
863 if n_blocks as u64 != expected_n_blocks {
864 return Err(Error::BadZlibTrailerNBlocks { offset: ztrailer_ofs, n_blocks, expected_n_blocks, ztrailer_len })
866 let mut blocks = Vec::with_capacity(n_blocks as usize);
867 for _ in 0..n_blocks {
868 let uncompressed_ofs = endian.parse(read_bytes(r)?);
869 let compressed_ofs = endian.parse(read_bytes(r)?);
870 let uncompressed_size = endian.parse(read_bytes(r)?);
871 let compressed_size = endian.parse(read_bytes(r)?);
872 blocks.push(ZBlock { uncompressed_ofs, compressed_ofs, uncompressed_size, compressed_size });
874 r.seek(SeekFrom::Start(start_offset))?;
875 Ok(Some(ZTrailer { offset: ztrailer_ofs, int_bias, zero, block_size, blocks }))
878 fn try_read_bytes<const N: usize, R: Read>(r: &mut R) -> Result<Option<[u8; N]>, IoError> {
879 let mut buf = [0; N];
880 let n = r.read(&mut buf)?;
883 r.read_exact(&mut buf[n..])?;
891 fn read_bytes<const N: usize, R: Read>(r: &mut R) -> Result<[u8; N], IoError> {
892 let mut buf = [0; N];
893 r.read_exact(&mut buf)?;
897 fn read_vec<R: Read>(r: &mut BufReader<R>, n: usize) -> Result<Vec<u8>, IoError> {
898 let mut vec = vec![0; n];
899 r.read_exact(&mut vec)?;
904 fn trim_end(mut s: Vec<u8>, c: u8) -> Vec<u8> {
905 while s.last() == Some(&c) {
911 fn skip_bytes<R: Read>(r: &mut R, mut n: u64) -> Result<(), IoError> {
912 let mut buf = [0; 1024];
914 let chunk = u64::min(n, buf.len() as u64);
915 r.read_exact(&mut buf[0..chunk as usize])?;