From 1a63cef8854d6c9d57cf7ac7c5bcc5e91fe95423 Mon Sep 17 00:00:00 2001 From: wcampbell Date: Fri, 11 Aug 2023 23:18:40 -0400 Subject: [PATCH 1/6] Update deku to use Reader --- backhand/src/compressor.rs | 4 +- backhand/src/metadata.rs | 13 ++++--- backhand/src/reader.rs | 78 ++++++++++++++++++++------------------ backhand/src/squashfs.rs | 39 +++++++++---------- 4 files changed, 69 insertions(+), 65 deletions(-) diff --git a/backhand/src/compressor.rs b/backhand/src/compressor.rs index e7d96fa3..ce573bf8 100644 --- a/backhand/src/compressor.rs +++ b/backhand/src/compressor.rs @@ -83,9 +83,9 @@ pub struct Xz { // TODO: in openwrt, git-hash:f97ad870e11ebe5f3dcf833dda6c83b9165b37cb shows that before // offical squashfs-tools had xz support they had the dictionary_size field as the last field // in this struct. If we get test images, I guess we can support this in the future. - #[deku(cond = "!deku::rest.is_empty()")] + #[deku(cond = "!deku::reader.end()")] pub bit_opts: Option, - #[deku(cond = "!deku::rest.is_empty()")] + #[deku(cond = "!deku::reader.end()")] pub fb: Option, } diff --git a/backhand/src/metadata.rs b/backhand/src/metadata.rs index bb2a80cb..22022e2f 100644 --- a/backhand/src/metadata.rs +++ b/backhand/src/metadata.rs @@ -1,7 +1,7 @@ use std::collections::VecDeque; -use std::io::{self, Read, Seek, Write}; +use std::io::{self, Cursor, Read, Seek, Write}; -use deku::bitvec::{BitVec, BitView}; +use deku::bitvec::BitVec; use deku::prelude::*; use tracing::trace; @@ -119,12 +119,13 @@ pub fn read_block( superblock: &SuperBlock, kind: &Kind, ) -> Result, BackhandError> { - let mut buf = [0u8; 2]; - reader.read_exact(&mut buf)?; + let buf: &mut [u8] = &mut [0u8; 2]; + reader.read_exact(buf)?; - let bv = buf.view_bits::(); trace!("{:02x?}", buf); - let (_, metadata_len) = u16::read(bv, kind.inner.data_endian)?; + let mut cursor = Cursor::new(buf); + let mut deku_reader = Reader::new(&mut cursor); + let metadata_len = u16::from_reader_with_ctx(&mut deku_reader, kind.inner.data_endian)?; let byte_len = len(metadata_len); tracing::trace!("len: 0x{:02x?}", byte_len); diff --git a/backhand/src/reader.rs b/backhand/src/reader.rs index 67065b3b..2f5971bf 100644 --- a/backhand/src/reader.rs +++ b/backhand/src/reader.rs @@ -1,9 +1,8 @@ //! Reader traits use std::collections::HashMap; -use std::io::{BufRead, Read, Seek, SeekFrom, Write}; +use std::io::{BufRead, Cursor, Read, Seek, SeekFrom, Write}; -use deku::bitvec::{BitView, Msb0}; use deku::prelude::*; use rustc_hash::FxHashMap; use tracing::{error, trace}; @@ -103,25 +102,28 @@ pub trait SquashFsReader: BufReadSeek { // Using this size, a SquashFS reader can determine if another header with further entries // should be following once it reaches the end of a run. - let mut ret_bytes = Vec::with_capacity(METADATA_MAXSIZE); + let mut next = vec![]; let mut metadata_offsets = vec![]; let mut ret_vec = HashMap::default(); let start = self.stream_position()?; while self.stream_position()? < superblock.dir_table { - trace!("offset: {:02x?}", self.stream_position()); metadata_offsets.push(self.stream_position()? - start); // parse into metadata let mut bytes = metadata::read_block(self, superblock, kind)?; // parse as many inodes as you can - ret_bytes.append(&mut bytes); - - let mut input_bits = ret_bytes.view_bits::(); - while !input_bits.is_empty() { - match Inode::read( - input_bits, + let mut inode_bytes = next; + inode_bytes.append(&mut bytes); + let mut c_inode_bytes = Cursor::new(inode_bytes.clone()); + let mut container = Reader::new(&mut c_inode_bytes); + + // store last successful read position + let mut container_bits_read = container.bits_read; + loop { + match Inode::from_reader_with_ctx( + &mut container, ( superblock.bytes_used, superblock.block_size, @@ -129,14 +131,15 @@ pub trait SquashFsReader: BufReadSeek { kind.inner.type_endian, ), ) { - Ok((rest, inode)) => { + Ok(inode) => { // Push the new Inode to the return, with the position this was read from ret_vec.insert(inode.header.inode_number, inode); - input_bits = rest; + container_bits_read = container.bits_read; } Err(e) => { if let DekuError::Incomplete(_) = e { // try next block, inodes can span multiple blocks! + next = inode_bytes.clone()[(container_bits_read / 8)..].to_vec(); break; } else { error!("{e}"); @@ -145,10 +148,6 @@ pub trait SquashFsReader: BufReadSeek { } } } - - // save leftover bits to new bits to leave for the next metadata block - // this is safe, input_bits is always byte aligned - ret_bytes.drain(..(ret_bytes.len() - (input_bits.len() / 8))); } Ok(ret_vec) @@ -175,10 +174,10 @@ pub trait SquashFsReader: BufReadSeek { error!("root_inode_offset > bytes.len()"); return Err(BackhandError::CorruptedOrInvalidSquashfs); } - let new_bytes = &bytes_01[root_inode_offset..]; - let input_bits = new_bytes.view_bits::<::deku::bitvec::Msb0>(); - if let Ok((_, inode)) = Inode::read( - input_bits, + let mut cursor = Cursor::new(&bytes_01[root_inode_offset..]); + let mut new_bytes = Reader::new(&mut cursor); + if let Ok(inode) = Inode::from_reader_with_ctx( + &mut new_bytes, ( superblock.bytes_used, superblock.block_size, @@ -196,11 +195,11 @@ pub trait SquashFsReader: BufReadSeek { error!("root_inode_offset > bytes.len()"); return Err(BackhandError::CorruptedOrInvalidSquashfs); } - let new_bytes = &bytes_01[root_inode_offset..]; - let input_bits = new_bytes.view_bits::<::deku::bitvec::Msb0>(); - match Inode::read( - input_bits, + let mut cursor = Cursor::new(&bytes_01[root_inode_offset..]); + let mut new_bytes = Reader::new(&mut cursor); + match Inode::from_reader_with_ctx( + &mut new_bytes, ( superblock.bytes_used, superblock.block_size, @@ -208,7 +207,7 @@ pub trait SquashFsReader: BufReadSeek { kind.inner.type_endian, ), ) { - Ok((_, inode)) => Ok(inode), + Ok(inode) => Ok(inode), Err(e) => Err(e.into()), } } @@ -280,22 +279,26 @@ pub trait SquashFsReader: BufReadSeek { } /// Parse Lookup Table - fn lookup_table DekuRead<'a, deku::ctx::Endian>>( + fn lookup_table( &mut self, superblock: &SuperBlock, seek: u64, size: u64, kind: &Kind, - ) -> Result<(u64, Vec), BackhandError> { + ) -> Result<(u64, Vec), BackhandError> + where + T: for<'a> DekuReader<'a, deku::ctx::Endian>, + { // find the pointer at the initial offset trace!("seek: {:02x?}", seek); self.seek(SeekFrom::Start(seek))?; - let mut buf = [0u8; 8]; - self.read_exact(&mut buf)?; + let buf: &mut [u8] = &mut [0u8; 8]; + self.read_exact(buf)?; trace!("{:02x?}", buf); - let bv = buf.view_bits::(); - let (_, ptr) = u64::read(bv, kind.inner.type_endian)?; + let mut cursor = Cursor::new(buf); + let mut deku_reader = Reader::new(&mut cursor); + let ptr = u64::from_reader_with_ctx(&mut deku_reader, kind.inner.type_endian)?; let block_count = (size as f32 / METADATA_MAXSIZE as f32).ceil() as u64; @@ -306,13 +309,16 @@ pub trait SquashFsReader: BufReadSeek { } /// Parse count of `Metadata` block at offset into `T` - fn metadata_with_count DekuRead<'a, deku::ctx::Endian>>( + fn metadata_with_count( &mut self, superblock: &SuperBlock, seek: u64, count: u64, kind: &Kind, - ) -> Result, BackhandError> { + ) -> Result, BackhandError> + where + T: for<'a> DekuReader<'a, deku::ctx::Endian>, + { trace!("seek: {:02x?}", seek); self.seek(SeekFrom::Start(seek))?; @@ -323,11 +329,11 @@ pub trait SquashFsReader: BufReadSeek { } let mut ret_vec = vec![]; - let mut all_bytes = all_bytes.view_bits::(); // Read until we fail to turn bytes into `T` - while let Ok((rest, t)) = T::read(all_bytes, kind.inner.type_endian) { + let mut cursor = Cursor::new(all_bytes); + let mut container = Reader::new(&mut cursor); + while let Ok(t) = T::from_reader_with_ctx(&mut container, kind.inner.type_endian) { ret_vec.push(t); - all_bytes = rest; } Ok(ret_vec) diff --git a/backhand/src/squashfs.rs b/backhand/src/squashfs.rs index 32e36811..61620659 100644 --- a/backhand/src/squashfs.rs +++ b/backhand/src/squashfs.rs @@ -1,13 +1,12 @@ //! Read from on-disk image use std::ffi::OsString; -use std::io::{Seek, SeekFrom}; +use std::io::{Cursor, Seek, SeekFrom}; use std::os::unix::prelude::OsStringExt; use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; -use deku::bitvec::{BitVec, BitView, Msb0}; use deku::prelude::*; use rustc_hash::FxHashMap; use tracing::{error, info, trace}; @@ -220,19 +219,15 @@ impl<'b> Squashfs<'b> { /// Read Superblock and Compression Options at current `reader` offset without parsing inodes /// and dirs /// - /// Used for unsquashfs --stat + /// Used for unsquashfs (extraction and --stat) pub fn superblock_and_compression_options( reader: &mut Box, kind: &Kind, ) -> Result<(SuperBlock, Option), BackhandError> { - // Size of metadata + optional compression options metadata block - let mut superblock = [0u8; 96]; - reader.read_exact(&mut superblock)?; - // Parse SuperBlock - let bs = superblock.view_bits::(); - let (_, superblock) = SuperBlock::read( - bs, + let mut container = Reader::new(reader); + let superblock = SuperBlock::from_reader_with_ctx( + &mut container, ( kind.inner.magic, kind.inner.version_major, @@ -260,16 +255,18 @@ impl<'b> Squashfs<'b> { { let bytes = metadata::read_block(reader, &superblock, kind)?; // data -> compression options - let bv = BitVec::from_slice(&bytes); - match CompressionOptions::read(&bv, (kind.inner.type_endian, superblock.compressor)) { + match CompressionOptions::from_reader_with_ctx( + &mut Reader::new(&mut Cursor::new(bytes)), + (kind.inner.type_endian, superblock.compressor), + ) { Ok(co) => { - if !co.0.is_empty() { - error!("invalid compression options, bytes left over, using"); - } - Some(co.1) + //if !co.0.is_empty() { + // error!("invalid compression options, bytes left over, using"); + //} + Some(co) } Err(e) => { - error!("invalid compression options: {e:?}[{bytes:02x?}], not using"); + error!("invalid compression options: {e:?}, not using"); None } } @@ -357,7 +354,7 @@ impl<'b> Squashfs<'b> { } // Read all fields from filesystem to make a Squashfs - info!("Reading Inodes"); + info!("Reading Inodes @ {:02x?}", superblock.inode_table); let inodes = reader.inodes(&superblock, &kind)?; info!("Reading Root Inode"); @@ -474,11 +471,11 @@ impl<'b> Squashfs<'b> { let bytes = &block[block_offset..][..file_size as usize - 3]; let mut dirs = vec![]; - let mut all_bytes = bytes.view_bits::(); // Read until we fail to turn bytes into `T` - while let Ok((rest, t)) = Dir::read(all_bytes, self.kind.inner.type_endian) { + let mut cursor = Cursor::new(bytes); + let mut container = Reader::new(&mut cursor); + while let Ok(t) = Dir::from_reader_with_ctx(&mut container, self.kind.inner.type_endian) { dirs.push(t); - all_bytes = rest; } trace!("finish"); From afb4dd205111c2f3e5916ff9095789d8449685c5 Mon Sep 17 00:00:00 2001 From: wcampbell Date: Wed, 18 Oct 2023 00:26:42 -0400 Subject: [PATCH 2/6] Use new deku writer --- Cargo.lock | 3 +- backhand/src/filesystem/writer.rs | 60 ++++++++++++++++--------------- backhand/src/inode.rs | 10 +++--- backhand/src/metadata.rs | 20 +++++------ 4 files changed, 47 insertions(+), 46 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b19ab885..cede3cc5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -497,8 +497,7 @@ dependencies = [ [[package]] name = "deku_derive" version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e2ca12572239215a352a74ad7c776d7e8a914f8a23511c6cbedddd887e5009e" +source = "git+https://github.com/sharksforarms/deku?branch=impl-writer#8a3596b07e4cff286e7da9c12698b0e48b5b26b8" dependencies = [ "darling", "proc-macro-crate", diff --git a/backhand/src/filesystem/writer.rs b/backhand/src/filesystem/writer.rs index 4bde2845..72119157 100644 --- a/backhand/src/filesystem/writer.rs +++ b/backhand/src/filesystem/writer.rs @@ -6,8 +6,8 @@ use std::sync::Arc; use std::sync::Mutex; use std::time::{SystemTime, UNIX_EPOCH}; -use deku::bitvec::BitVec; -use deku::DekuWrite; +use deku::writer::Writer; +use deku::DekuWriter; use tracing::{error, info, trace}; use super::node::{InnerNode, Nodes}; @@ -540,12 +540,11 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { trace!("WRITING DIR: {block_offset:#02x?}"); let mut total_size: usize = 3; for dir in Entry::into_dir(entries) { - let mut bv = BitVec::new(); - dir.write(&mut bv, kind.inner.type_endian)?; - let bytes = bv.as_raw_slice(); - dir_writer.write_all(bv.as_raw_slice())?; - + let mut bytes = vec![]; + let mut writer = Writer::new(&mut bytes); + dir.to_writer(&mut writer, kind.inner.type_endian)?; total_size += bytes.len(); + dir_writer.write_all(&bytes)?; } let entry = Entry::path( filename, @@ -581,17 +580,24 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { // Write compression options, if any if let Some(options) = &self.fs_compressor.options { superblock.flags |= Flags::CompressorOptionsArePresent as u16; - let mut buf = BitVec::new(); + let mut compression_opt_buf_out = vec![]; + let mut writer = Writer::new(&mut compression_opt_buf_out); match options { CompressionOptions::Gzip(gzip) => { - gzip.write(&mut buf, self.kind.inner.type_endian)? + gzip.to_writer(&mut writer, self.kind.inner.type_endian)? + } + CompressionOptions::Lz4(lz4) => { + lz4.to_writer(&mut writer, self.kind.inner.type_endian)? } - CompressionOptions::Lz4(lz4) => lz4.write(&mut buf, self.kind.inner.type_endian)?, CompressionOptions::Zstd(zstd) => { - zstd.write(&mut buf, self.kind.inner.type_endian)? + zstd.to_writer(&mut writer, self.kind.inner.type_endian)? + } + CompressionOptions::Xz(xz) => { + xz.to_writer(&mut writer, self.kind.inner.type_endian)? + } + CompressionOptions::Lzo(lzo) => { + lzo.to_writer(&mut writer, self.kind.inner.type_endian)? } - CompressionOptions::Xz(xz) => xz.write(&mut buf, self.kind.inner.type_endian)?, - CompressionOptions::Lzo(lzo) => lzo.write(&mut buf, self.kind.inner.type_endian)?, CompressionOptions::Lzma => {} } let mut metadata = MetadataWriter::new( @@ -599,8 +605,8 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { self.block_size, Kind { inner: self.kind.inner.clone() }, ); - metadata.write_all(buf.as_raw_slice())?; - metadata.finalize(&mut w)?; + metadata.write_all(&compression_opt_buf_out)?; + metadata.finalize(w)?; } let mut data_writer = @@ -704,9 +710,9 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { // Seek back the beginning and write the superblock info!("Writing Superblock"); w.rewind()?; - let mut bv = BitVec::new(); - superblock.write( - &mut bv, + let mut writer = Writer::new(w); + superblock.to_writer( + &mut writer, ( self.kind.inner.magic, self.kind.inner.version_major, @@ -714,7 +720,6 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { self.kind.inner.type_endian, ), )?; - w.write_all(bv.as_raw_slice())?; info!("Writing Finished"); @@ -750,7 +755,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { /// │└────────────────────────────┘│ /// └──────────────────────────────┘ /// ``` - fn write_lookup_table, W: Write + Seek>( + fn write_lookup_table, W: Write + Seek>( &self, mut w: W, table: &Vec, @@ -760,20 +765,18 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { let mut table_bytes = Vec::with_capacity(table.len() * element_size); let mut iter = table.iter().peekable(); while let Some(t) = iter.next() { + let mut table_writer = Writer::new(&mut table_bytes); // convert fragment ptr to bytes - let mut bv = BitVec::new(); - t.write(&mut bv, self.kind.inner.type_endian)?; - table_bytes.write_all(bv.as_raw_slice())?; + t.to_writer(&mut table_writer, self.kind.inner.type_endian)?; // once table_bytes + next is over the maximum size of a metadata block, write if ((table_bytes.len() + element_size) > METADATA_MAXSIZE) || iter.peek().is_none() { ptrs.push(w.stream_position()?); - let mut bv = BitVec::new(); // write metadata len let len = metadata::set_if_uncompressed(table_bytes.len() as u16); - len.write(&mut bv, self.kind.inner.data_endian)?; - w.write_all(bv.as_raw_slice())?; + let mut writer = Writer::new(w); + len.to_writer(&mut writer, self.kind.inner.data_endian)?; // write metadata bytes w.write_all(&table_bytes)?; @@ -786,9 +789,8 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { // write ptr for ptr in ptrs { - let mut bv = BitVec::new(); - ptr.write(&mut bv, self.kind.inner.type_endian)?; - w.write_all(bv.as_raw_slice())?; + let mut writer = Writer::new(w); + ptr.to_writer(&mut writer, self.kind.inner.type_endian)?; } Ok((table_position, count)) diff --git a/backhand/src/inode.rs b/backhand/src/inode.rs index 172ffbe9..dc2d7808 100644 --- a/backhand/src/inode.rs +++ b/backhand/src/inode.rs @@ -3,7 +3,6 @@ use core::fmt; use std::io::Write; -use deku::bitvec::{BitVec, Msb0}; use deku::prelude::*; use crate::data::DataSize; @@ -36,9 +35,10 @@ impl Inode { superblock: &SuperBlock, kind: &Kind, ) -> Entry<'a> { - let mut bytes = BitVec::::new(); - self.write( - &mut bytes, + let mut inode_bytes = vec![]; + let mut writer = Writer::new(&mut inode_bytes); + self.to_writer( + &mut writer, ( 0xffff_ffff_ffff_ffff, // bytes_used is unused for ctx. set to max superblock.block_size, @@ -49,7 +49,7 @@ impl Inode { .unwrap(); let start = m_writer.metadata_start; let offset = m_writer.uncompressed_bytes.len() as u16; - m_writer.write_all(bytes.as_raw_slice()).unwrap(); + m_writer.write_all(&inode_bytes).unwrap(); Entry { start, diff --git a/backhand/src/metadata.rs b/backhand/src/metadata.rs index 22022e2f..09965e86 100644 --- a/backhand/src/metadata.rs +++ b/backhand/src/metadata.rs @@ -1,7 +1,6 @@ use std::collections::VecDeque; use std::io::{self, Cursor, Read, Seek, Write}; -use deku::bitvec::BitVec; use deku::prelude::*; use tracing::trace; @@ -39,6 +38,7 @@ impl MetadataWriter { } fn add_block(&mut self) -> io::Result<()> { + trace!("adding block"); // uncompress data that will create the metablock let uncompressed_len = self.uncompressed_bytes.len().min(METADATA_MAXSIZE); if uncompressed_len == 0 { @@ -58,9 +58,11 @@ impl MetadataWriter { // Remove the data consumed, if the uncompressed data is smalled, use it. let (compressed, metadata) = if compressed.len() > uncompressed_len { + trace!("using uncompressed"); let uncompressed = self.uncompressed_bytes.drain(0..uncompressed_len).collect(); (false, uncompressed) } else { + trace!("using compressed"); self.uncompressed_bytes.drain(0..uncompressed_len); (true, compressed) }; @@ -70,7 +72,6 @@ impl MetadataWriter { trace!("new metadata start: {:#02x?}", self.metadata_start); self.final_bytes.push((compressed, metadata)); - trace!("LEN: {:02x?}", self.uncompressed_bytes.len()); Ok(()) } @@ -81,15 +82,14 @@ impl MetadataWriter { } // write all the metadata blocks - for (compressed, cb) in &self.final_bytes { - trace!("len: {:02x?}", cb.len()); - //trace!("total: {:02x?}", out.len()); - let mut bv = BitVec::new(); + for (compressed, compressed_bytes) in &self.final_bytes { + trace!("len: {:02x?}", compressed_bytes.len()); // if uncompressed, set the highest bit of len - let len = cb.len() as u16 | if *compressed { 0 } else { 1 << (u16::BITS - 1) }; - len.write(&mut bv, self.kind.inner.data_endian)?; - out.write_all(bv.as_raw_slice())?; - out.write_all(cb)?; + let len = + compressed_bytes.len() as u16 | if *compressed { 0 } else { 1 << (u16::BITS - 1) }; + let mut writer = Writer::new(out); + len.to_writer(&mut writer, self.kind.inner.data_endian)?; + out.write_all(compressed_bytes)?; } Ok(()) From f97dca6653c06332b42cc63ed2fb5a54f8a6d466 Mon Sep 17 00:00:00 2001 From: wcampbell Date: Thu, 14 Dec 2023 23:46:46 -0500 Subject: [PATCH 3/6] Updates --- Cargo.lock | 121 +++++++++++++++--------------- backhand-cli/src/bin/add.rs | 4 +- backhand-cli/src/bin/replace.rs | 4 +- backhand/Cargo.toml | 2 +- backhand/src/data.rs | 6 +- backhand/src/filesystem/writer.rs | 27 ++++--- backhand/src/metadata.rs | 2 +- backhand/src/squashfs.rs | 1 + 8 files changed, 85 insertions(+), 82 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cede3cc5..6b1ad2c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -199,9 +199,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" +checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc" dependencies = [ "memchr", "regex-automata 0.4.3", @@ -265,9 +265,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.11" +version = "4.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" +checksum = "dcfab8ba68f3668e89f6ff60f5b205cea56aa7b769451a59f34b8682f51c056d" dependencies = [ "clap_builder", "clap_derive", @@ -275,9 +275,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.11" +version = "4.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" +checksum = "fb7fb5e4e979aec3be7791562fcba452f94ad85e954da024396433e0e25a79e9" dependencies = [ "anstream 0.6.5", "anstyle", @@ -288,9 +288,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.4.4" +version = "4.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffe91f06a11b4b9420f62103854e90867812cd5d01557f853c5ee8e791b12ae" +checksum = "a51919c5608a32e34ea1d6be321ad070065e17613e168c5b6977024290f2630b" dependencies = [ "clap", ] @@ -304,7 +304,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", ] [[package]] @@ -420,21 +420,20 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.16" +version = "0.9.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d2fe95351b870527a5d09bf563ed3c97c0cffb87cf1c78a591bf48bb218d9aa" +checksum = "0e3681d554572a651dda4186cd47240627c3d0114d45a95f6ad27f2f22e7548d" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset", ] [[package]] name = "crossbeam-utils" -version = "0.8.17" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" +checksum = "c3a430a770ebd84726f584a90ee7f020d28db52c6d02138900f22341f866d39c" dependencies = [ "cfg-if", ] @@ -451,9 +450,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.4" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" dependencies = [ "darling_core", "darling_macro", @@ -461,49 +460,49 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.14.4" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 1.0.109", + "syn 2.0.43", ] [[package]] name = "darling_macro" -version = "0.14.4" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 1.0.109", + "syn 2.0.43", ] [[package]] name = "deku" version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819b87cc7a05b3abe3fc38e59b3980a5fd3162f25a247116441a9171d3e84481" +source = "git+https://github.com/sharksforarms/deku?branch=impl-writer#26c00d3b05f369f9699aae27bd53005d139a2d60" dependencies = [ "bitvec", "deku_derive", + "no_std_io", ] [[package]] name = "deku_derive" version = "0.16.0" -source = "git+https://github.com/sharksforarms/deku?branch=impl-writer#8a3596b07e4cff286e7da9c12698b0e48b5b26b8" +source = "git+https://github.com/sharksforarms/deku?branch=impl-writer#26c00d3b05f369f9699aae27bd53005d139a2d60" dependencies = [ "darling", "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.43", ] [[package]] @@ -724,13 +723,13 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455" dependencies = [ "hermit-abi", "rustix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -865,18 +864,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.6.4" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" - -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "minimal-lexical" @@ -904,6 +894,15 @@ dependencies = [ "libc", ] +[[package]] +name = "no_std_io" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "762f99e20001f0c1cedf5c85df1e96d0866f9739d752520d97bf562ee5f05341" +dependencies = [ + "memchr", +] + [[package]] name = "nom" version = "7.1.3" @@ -971,9 +970,9 @@ checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "69d3587f8a9e599cc7ec2c00e331f71c4e69a5f9a4b8a6efd5b07466b9736f9a" [[package]] name = "plotters" @@ -1049,9 +1048,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.70" +version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +checksum = "75cb1540fadbd5b8fbccc4dddad2734eba435053f725621c070711a14bb5f4b8" dependencies = [ "unicode-ident", ] @@ -1250,7 +1249,7 @@ checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", ] [[package]] @@ -1315,9 +1314,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.41" +version = "2.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" +checksum = "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53" dependencies = [ "proc-macro2", "quote", @@ -1332,15 +1331,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.8.1" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" dependencies = [ "cfg-if", "fastrand", "redox_syscall", "rustix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1396,7 +1395,7 @@ checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", ] [[package]] @@ -1419,22 +1418,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.51" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f11c217e1416d6f036b870f14e0413d480dbf28edbee1f877abaf0206af43bb7" +checksum = "83a48fd946b02c0a526b2e9481c8e2a17755e47039164a86c4070446e3a4614d" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.51" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01742297787513b79cf8e29d1056ede1313e2420b7b3b15d0a768b4921f549df" +checksum = "e7fbe9b594d6568a6a1443250a7e67d80b74e1e96f6d1715e1e21cc1888291d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", ] [[package]] @@ -1508,7 +1507,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", ] [[package]] @@ -1680,7 +1679,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", "wasm-bindgen-shared", ] @@ -1702,7 +1701,7 @@ checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.43", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -1960,9 +1959,9 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.5.28" +version = "0.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" +checksum = "97a4882e6b134d6c28953a387571f1acdd3496830d5e36c5e3a1075580ea641c" dependencies = [ "memchr", ] diff --git a/backhand-cli/src/bin/add.rs b/backhand-cli/src/bin/add.rs index ae25e1c4..495ec315 100644 --- a/backhand-cli/src/bin/add.rs +++ b/backhand-cli/src/bin/add.rs @@ -104,8 +104,8 @@ fn main() -> ExitCode { } // write new file - let output = File::create(&args.out).unwrap(); - if let Err(e) = filesystem.write(output) { + let mut output = File::create(&args.out).unwrap(); + if let Err(e) = filesystem.write(&mut output) { println!("[!] {e}"); } println!("[-] added file and wrote to {}", args.out.display()); diff --git a/backhand-cli/src/bin/replace.rs b/backhand-cli/src/bin/replace.rs index dbbce0af..01f060f6 100644 --- a/backhand-cli/src/bin/replace.rs +++ b/backhand-cli/src/bin/replace.rs @@ -55,8 +55,8 @@ fn main() -> ExitCode { } // write new file - let output = File::create(&args.out).unwrap(); - filesystem.write(output).unwrap(); + let mut output = File::create(&args.out).unwrap(); + filesystem.write(&mut output).unwrap(); println!("replaced file and wrote to {}", args.out.display()); ExitCode::SUCCESS diff --git a/backhand/Cargo.toml b/backhand/Cargo.toml index eb707f50..7ff57ec1 100644 --- a/backhand/Cargo.toml +++ b/backhand/Cargo.toml @@ -12,7 +12,7 @@ description = "Library for the reading, creating, and modification of SquashFS f [dependencies] # for lib -deku = "0.16.0" +deku = { git = "https://github.com/sharksforarms/deku", branch = "impl-writer" } tracing = "0.1.40" thiserror = "1.0.50" flate2 = { version = "1.0.28", optional = true } diff --git a/backhand/src/data.rs b/backhand/src/data.rs index 5f05fc75..21191115 100644 --- a/backhand/src/data.rs +++ b/backhand/src/data.rs @@ -118,7 +118,7 @@ impl<'a> DataWriter<'a> { pub(crate) fn just_copy_it( &mut self, mut reader: SquashfsRawData, - mut writer: W, + writer: &mut W, ) -> Result<(usize, Added), BackhandError> { //just clone it, because block sizes where never modified, just copy it let mut block_sizes = reader.file.basic.block_sizes.clone(); @@ -181,7 +181,7 @@ impl<'a> DataWriter<'a> { pub(crate) fn add_bytes( &mut self, reader: impl Read, - mut writer: W, + writer: &mut W, ) -> Result<(usize, Added), BackhandError> { let mut chunk_reader = DataWriterChunkReader { chunk: vec![0u8; self.block_size as usize], @@ -230,7 +230,7 @@ impl<'a> DataWriter<'a> { /// Compress the fragments that were under length, write to data, add to fragment table, clear /// current fragment_bytes - pub fn finalize(&mut self, mut writer: W) -> Result<(), BackhandError> { + pub fn finalize(&mut self, writer: &mut W) -> Result<(), BackhandError> { let start = writer.stream_position()?; let cb = self.kind.compress(&self.fragment_bytes, self.fs_compressor, self.block_size)?; diff --git a/backhand/src/filesystem/writer.rs b/backhand/src/filesystem/writer.rs index 72119157..80eab7b1 100644 --- a/backhand/src/filesystem/writer.rs +++ b/backhand/src/filesystem/writer.rs @@ -391,8 +391,8 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { w: W, offset: u64, ) -> Result<(SuperBlock, u64), BackhandError> { - let writer = WriterWithOffset::new(w, offset)?; - self.write(writer) + let mut writer = WriterWithOffset::new(w, offset)?; + self.write(&mut writer) } fn write_data( @@ -568,7 +568,10 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { /// /// # Returns /// (written populated [`SuperBlock`], total amount of bytes written including padding) - pub fn write(&mut self, mut w: W) -> Result<(SuperBlock, u64), BackhandError> { + pub fn write( + &mut self, + mut w: &mut W, + ) -> Result<(SuperBlock, u64), BackhandError> { let mut superblock = SuperBlock::new(self.fs_compressor.id, Kind { inner: self.kind.inner.clone() }); @@ -628,7 +631,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { self.write_data(self.fs_compressor, self.block_size, &mut w, &mut data_writer)?; info!("Writing Data Fragments"); // Compress fragments and write - data_writer.finalize(&mut w)?; + data_writer.finalize(w)?; info!("Writing Other stuff"); let root = self.write_inode_dir( @@ -649,15 +652,15 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { info!("Writing Inodes"); superblock.inode_table = w.stream_position()?; - inode_writer.finalize(&mut w)?; + inode_writer.finalize(w)?; info!("Writing Dirs"); superblock.dir_table = w.stream_position()?; - dir_writer.finalize(&mut w)?; + dir_writer.finalize(w)?; info!("Writing Frag Lookup Table"); let (table_position, count) = - self.write_lookup_table(&mut w, &data_writer.fragment_table, fragment::SIZE)?; + self.write_lookup_table(w, &data_writer.fragment_table, fragment::SIZE)?; superblock.frag_table = table_position; superblock.frag_count = count; @@ -667,7 +670,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { superblock.id_count = count.try_into().unwrap(); info!("Finalize Superblock and End Bytes"); - let bytes_written = self.finalize(&mut w, &mut superblock)?; + let bytes_written = self.finalize(w, &mut superblock)?; info!("Success"); Ok((superblock, bytes_written)) @@ -710,7 +713,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { // Seek back the beginning and write the superblock info!("Writing Superblock"); w.rewind()?; - let mut writer = Writer::new(w); + let mut writer = Writer::new(&mut w); superblock.to_writer( &mut writer, ( @@ -757,7 +760,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { /// ``` fn write_lookup_table, W: Write + Seek>( &self, - mut w: W, + mut w: &mut W, table: &Vec, element_size: usize, ) -> Result<(u64, u32), BackhandError> { @@ -775,7 +778,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { // write metadata len let len = metadata::set_if_uncompressed(table_bytes.len() as u16); - let mut writer = Writer::new(w); + let mut writer = Writer::new(&mut w); len.to_writer(&mut writer, self.kind.inner.data_endian)?; // write metadata bytes w.write_all(&table_bytes)?; @@ -789,7 +792,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { // write ptr for ptr in ptrs { - let mut writer = Writer::new(w); + let mut writer = Writer::new(&mut w); ptr.to_writer(&mut writer, self.kind.inner.type_endian)?; } diff --git a/backhand/src/metadata.rs b/backhand/src/metadata.rs index 09965e86..6300dda4 100644 --- a/backhand/src/metadata.rs +++ b/backhand/src/metadata.rs @@ -75,7 +75,7 @@ impl MetadataWriter { Ok(()) } - pub fn finalize(&mut self, mut out: W) -> Result<(), BackhandError> { + pub fn finalize(&mut self, out: &mut W) -> Result<(), BackhandError> { //add any remaining data while !self.uncompressed_bytes.is_empty() { self.add_block()?; diff --git a/backhand/src/squashfs.rs b/backhand/src/squashfs.rs index 61620659..13b4f632 100644 --- a/backhand/src/squashfs.rs +++ b/backhand/src/squashfs.rs @@ -7,6 +7,7 @@ use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; +use deku::prelude::Reader; use deku::prelude::*; use rustc_hash::FxHashMap; use tracing::{error, info, trace}; From 008b4aafd307ebeadf89b5f288572cf11ef821a9 Mon Sep 17 00:00:00 2001 From: wcampbell Date: Thu, 14 Dec 2023 23:50:54 -0500 Subject: [PATCH 4/6] Test default features of backhand in backhand-tests --- backhand-test/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backhand-test/Cargo.toml b/backhand-test/Cargo.toml index 74af1195..0cef20fc 100644 --- a/backhand-test/Cargo.toml +++ b/backhand-test/Cargo.toml @@ -24,7 +24,7 @@ bench = false [features] # testing only feature for testing vs squashfs-tools/unsquashfs __test_unsquashfs = [] -default = ["backhand-cli/xz", "backhand-cli/gzip", "backhand-cli/zstd"] +default = ["xz", "gzip", "zstd"] xz = ["backhand-cli/xz"] xz-static = ["backhand-cli/xz-static"] gzip = ["backhand-cli/gzip"] From de45add25717afe36ec2cf2716058e6ca7ddb63f Mon Sep 17 00:00:00 2001 From: Rubens Brandao Date: Sun, 17 Dec 2023 07:37:38 -0300 Subject: [PATCH 5/6] remove the writer reference requirement --- backhand-cli/src/bin/add.rs | 4 ++-- backhand/Cargo.toml | 2 +- backhand/src/data.rs | 2 +- backhand/src/filesystem/writer.rs | 17 +++++++---------- backhand/src/metadata.rs | 4 ++-- 5 files changed, 13 insertions(+), 16 deletions(-) diff --git a/backhand-cli/src/bin/add.rs b/backhand-cli/src/bin/add.rs index 495ec315..ae25e1c4 100644 --- a/backhand-cli/src/bin/add.rs +++ b/backhand-cli/src/bin/add.rs @@ -104,8 +104,8 @@ fn main() -> ExitCode { } // write new file - let mut output = File::create(&args.out).unwrap(); - if let Err(e) = filesystem.write(&mut output) { + let output = File::create(&args.out).unwrap(); + if let Err(e) = filesystem.write(output) { println!("[!] {e}"); } println!("[-] added file and wrote to {}", args.out.display()); diff --git a/backhand/Cargo.toml b/backhand/Cargo.toml index 7ff57ec1..4a1fbb35 100644 --- a/backhand/Cargo.toml +++ b/backhand/Cargo.toml @@ -53,4 +53,4 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [lib] -bench = false \ No newline at end of file +bench = false diff --git a/backhand/src/data.rs b/backhand/src/data.rs index 21191115..b5a51ca0 100644 --- a/backhand/src/data.rs +++ b/backhand/src/data.rs @@ -230,7 +230,7 @@ impl<'a> DataWriter<'a> { /// Compress the fragments that were under length, write to data, add to fragment table, clear /// current fragment_bytes - pub fn finalize(&mut self, writer: &mut W) -> Result<(), BackhandError> { + pub fn finalize(&mut self, mut writer: W) -> Result<(), BackhandError> { let start = writer.stream_position()?; let cb = self.kind.compress(&self.fragment_bytes, self.fs_compressor, self.block_size)?; diff --git a/backhand/src/filesystem/writer.rs b/backhand/src/filesystem/writer.rs index 80eab7b1..919c1d4c 100644 --- a/backhand/src/filesystem/writer.rs +++ b/backhand/src/filesystem/writer.rs @@ -568,10 +568,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { /// /// # Returns /// (written populated [`SuperBlock`], total amount of bytes written including padding) - pub fn write( - &mut self, - mut w: &mut W, - ) -> Result<(SuperBlock, u64), BackhandError> { + pub fn write(&mut self, mut w: W) -> Result<(SuperBlock, u64), BackhandError> { let mut superblock = SuperBlock::new(self.fs_compressor.id, Kind { inner: self.kind.inner.clone() }); @@ -609,7 +606,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { Kind { inner: self.kind.inner.clone() }, ); metadata.write_all(&compression_opt_buf_out)?; - metadata.finalize(w)?; + metadata.finalize(&mut w)?; } let mut data_writer = @@ -631,7 +628,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { self.write_data(self.fs_compressor, self.block_size, &mut w, &mut data_writer)?; info!("Writing Data Fragments"); // Compress fragments and write - data_writer.finalize(w)?; + data_writer.finalize(&mut w)?; info!("Writing Other stuff"); let root = self.write_inode_dir( @@ -652,15 +649,15 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { info!("Writing Inodes"); superblock.inode_table = w.stream_position()?; - inode_writer.finalize(w)?; + inode_writer.finalize(&mut w)?; info!("Writing Dirs"); superblock.dir_table = w.stream_position()?; - dir_writer.finalize(w)?; + dir_writer.finalize(&mut w)?; info!("Writing Frag Lookup Table"); let (table_position, count) = - self.write_lookup_table(w, &data_writer.fragment_table, fragment::SIZE)?; + self.write_lookup_table(&mut w, &data_writer.fragment_table, fragment::SIZE)?; superblock.frag_table = table_position; superblock.frag_count = count; @@ -760,7 +757,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { /// ``` fn write_lookup_table, W: Write + Seek>( &self, - mut w: &mut W, + mut w: W, table: &Vec, element_size: usize, ) -> Result<(u64, u32), BackhandError> { diff --git a/backhand/src/metadata.rs b/backhand/src/metadata.rs index 6300dda4..107e5c27 100644 --- a/backhand/src/metadata.rs +++ b/backhand/src/metadata.rs @@ -75,7 +75,7 @@ impl MetadataWriter { Ok(()) } - pub fn finalize(&mut self, out: &mut W) -> Result<(), BackhandError> { + pub fn finalize(&mut self, mut out: W) -> Result<(), BackhandError> { //add any remaining data while !self.uncompressed_bytes.is_empty() { self.add_block()?; @@ -87,7 +87,7 @@ impl MetadataWriter { // if uncompressed, set the highest bit of len let len = compressed_bytes.len() as u16 | if *compressed { 0 } else { 1 << (u16::BITS - 1) }; - let mut writer = Writer::new(out); + let mut writer = Writer::new(&mut out); len.to_writer(&mut writer, self.kind.inner.data_endian)?; out.write_all(compressed_bytes)?; } From 1dea596390f56c304e93770ed9e2c9aa44133a72 Mon Sep 17 00:00:00 2001 From: wcampbell Date: Thu, 28 Dec 2023 11:28:14 -0500 Subject: [PATCH 6/6] Update to latest --- backhand/src/compressor.rs | 2 +- backhand/src/inode.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backhand/src/compressor.rs b/backhand/src/compressor.rs index ce573bf8..d52c47e6 100644 --- a/backhand/src/compressor.rs +++ b/backhand/src/compressor.rs @@ -17,7 +17,7 @@ use crate::filesystem::writer::{CompressionExtra, FilesystemCompressor}; #[derive(Copy, Clone, Debug, PartialEq, Eq, DekuRead, DekuWrite, Default)] #[deku(endian = "endian", ctx = "endian: deku::ctx::Endian")] -#[deku(type = "u16")] +#[deku(id_type = "u16")] #[rustfmt::skip] pub enum Compressor { None = 0, diff --git a/backhand/src/inode.rs b/backhand/src/inode.rs index dc2d7808..ddf02a50 100644 --- a/backhand/src/inode.rs +++ b/backhand/src/inode.rs @@ -63,7 +63,7 @@ impl Inode { } #[derive(Debug, DekuRead, DekuWrite, Clone, Copy, PartialEq, Eq)] -#[deku(type = "u16")] +#[deku(id_type = "u16")] #[deku(endian = "endian", ctx = "endian: deku::ctx::Endian")] #[rustfmt::skip] pub enum InodeId {