#[cfg(target_endian = "big")]
assert!(false, "This library assumes little-endian!");
pub mod builder;
mod format;
pub mod reader;
use crate::format::{ChunkHeader, SparseHeader};
use anyhow::{bail, ensure, Context, Result};
use core::fmt;
use serde::de::DeserializeOwned;
use std::fs::File;
use std::io::{Cursor, Read, Seek, SeekFrom, Write};
use std::path::Path;
use tempfile::{NamedTempFile, TempPath};
const BLK_SIZE: usize = 0x1000;
fn deserialize_from<'a, T: DeserializeOwned, R: Read + ?Sized>(source: &mut R) -> Result<T> {
let mut buf = vec![0u8; std::mem::size_of::<T>()];
source.read_exact(&mut buf[..]).context("Failed to read bytes")?;
Ok(bincode::deserialize(&buf[..])?)
}
pub trait Reader: Read + Seek {}
impl<T: Read + Seek> Reader for T {}
pub trait Writer: Write + Seek {
fn set_len(&mut self, size: u64) -> Result<()>;
}
impl Writer for File {
fn set_len(&mut self, size: u64) -> Result<()> {
Ok(File::set_len(self, size)?)
}
}
impl Writer for Cursor<Vec<u8>> {
fn set_len(&mut self, size: u64) -> Result<()> {
Vec::resize(self.get_mut(), size as usize, 0u8);
Ok(())
}
}
struct LimitedReader<'a>(pub &'a mut dyn Reader, pub usize);
impl<'a> Read for LimitedReader<'a> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let offset = self.0.stream_position()?;
let avail = self.1.saturating_sub(offset as usize);
let to_read = std::cmp::min(avail, buf.len());
self.0.read(&mut buf[..to_read])
}
}
impl<'a> Seek for LimitedReader<'a> {
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
self.0.seek(pos)
}
}
pub fn is_sparse_image<R: Reader>(reader: &mut R) -> bool {
|| -> Option<bool> {
let header: SparseHeader = deserialize_from(reader).ok()?;
let is_sparse = header.magic == format::SPARSE_HEADER_MAGIC;
reader.seek(SeekFrom::Start(0)).ok()?;
Some(is_sparse)
}()
.unwrap_or(false)
}
#[derive(Clone, PartialEq, Debug)]
enum Chunk {
Raw { start: u64, size: usize },
Fill { start: u64, size: usize, value: u32 },
DontCare { start: u64, size: usize },
#[allow(dead_code)]
Crc32 { checksum: u32 },
}
impl Chunk {
pub fn read_metadata<R: Reader>(reader: &mut R, offset: u64, block_size: u32) -> Result<Self> {
let header: ChunkHeader =
deserialize_from(reader).context("Failed to read chunk header")?;
ensure!(header.valid(), "Invalid chunk header");
let size = (header.chunk_sz * block_size) as usize;
match header.chunk_type {
format::CHUNK_TYPE_RAW => Ok(Self::Raw { start: offset, size }),
format::CHUNK_TYPE_FILL => {
let value: u32 =
deserialize_from(reader).context("Failed to deserialize fill value")?;
Ok(Self::Fill { start: offset, size, value })
}
format::CHUNK_TYPE_DONT_CARE => Ok(Self::DontCare { start: offset, size }),
format::CHUNK_TYPE_CRC32 => {
let checksum: u32 =
deserialize_from(reader).context("Failed to deserialize checksum")?;
Ok(Self::Crc32 { checksum })
}
_ => unreachable!(),
}
}
fn valid(&self, block_size: usize) -> bool {
self.output_size() % block_size == 0
}
fn output_offset(&self) -> Option<u64> {
match self {
Self::Raw { start, .. } => Some(*start),
Self::Fill { start, .. } => Some(*start),
Self::DontCare { start, .. } => Some(*start),
Self::Crc32 { .. } => None,
}
}
fn output_size(&self) -> usize {
match self {
Self::Raw { size, .. } => *size,
Self::Fill { size, .. } => *size,
Self::DontCare { size, .. } => *size,
Self::Crc32 { .. } => 0,
}
}
fn output_blocks(&self, block_size: usize) -> u32 {
let size_bytes = self.output_size();
((size_bytes + block_size - 1) / block_size) as u32
}
fn chunk_type(&self) -> u16 {
match self {
Self::Raw { .. } => format::CHUNK_TYPE_RAW,
Self::Fill { .. } => format::CHUNK_TYPE_FILL,
Self::DontCare { .. } => format::CHUNK_TYPE_DONT_CARE,
Self::Crc32 { .. } => format::CHUNK_TYPE_CRC32,
}
}
fn chunk_data_len(&self) -> usize {
let header_size = format::CHUNK_HEADER_SIZE;
let data_size = match self {
Self::Raw { size, .. } => *size,
Self::Fill { .. } => std::mem::size_of::<u32>(),
Self::DontCare { .. } => 0,
Self::Crc32 { .. } => std::mem::size_of::<u32>(),
};
header_size + data_size
}
fn write<W: Write + Seek, R: Read + Seek>(
&self,
source: Option<&mut R>,
dest: &mut W,
) -> Result<()> {
ensure!(self.valid(BLK_SIZE), "Not writing invalid chunk");
let header = ChunkHeader::new(
self.chunk_type(),
0x0,
self.output_blocks(BLK_SIZE),
self.chunk_data_len() as u32,
);
let header_bytes: Vec<u8> = bincode::serialize(&header)?;
std::io::copy(&mut Cursor::new(header_bytes), dest)?;
match self {
Self::Raw { size, .. } => {
ensure!(source.is_some(), "No source for Raw chunk");
let n = std::io::copy(source.unwrap(), dest)? as usize;
if n < *size {
let zeroes = vec![0u8; *size - n];
std::io::copy(&mut Cursor::new(zeroes), dest)?;
}
}
Self::Fill { value, .. } => {
bincode::serialize_into(dest, value)?;
}
Self::DontCare { .. } => {
}
Self::Crc32 { checksum } => {
bincode::serialize_into(dest, checksum)?;
}
}
Ok(())
}
}
impl fmt::Display for Chunk {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let message = match self {
Self::Raw { start, size } => {
format!("RawChunk: start: {}, total bytes: {}", start, size)
}
Self::Fill { start, size, value } => {
format!("FillChunk: start: {}, value: {}, n_blocks: {}", start, value, size)
}
Self::DontCare { start, size } => {
format!("DontCareChunk: start: {}, bytes: {}", start, size)
}
Self::Crc32 { checksum } => format!("Crc32Chunk: checksum: {:?}", checksum),
};
write!(f, "{}", message)
}
}
#[derive(Clone, Debug, PartialEq)]
struct SparseFileWriter {
chunks: Vec<Chunk>,
}
impl SparseFileWriter {
fn new(chunks: Vec<Chunk>) -> SparseFileWriter {
SparseFileWriter { chunks }
}
fn total_blocks(&self) -> u32 {
self.chunks.iter().map(|c| c.output_blocks(BLK_SIZE)).sum()
}
fn total_bytes(&self) -> usize {
self.chunks.iter().map(|c| c.output_size()).sum()
}
#[tracing::instrument(skip(self, reader, writer))]
fn write<W: Write + Seek, R: Read + Seek>(&self, reader: &mut R, writer: &mut W) -> Result<()> {
let header = SparseHeader::new(
BLK_SIZE.try_into().unwrap(), self.total_blocks(), self.chunks.len().try_into().unwrap(), );
let header_bytes: Vec<u8> = bincode::serialize(&header)?;
std::io::copy(&mut Cursor::new(header_bytes), writer)?;
for chunk in &self.chunks {
let mut reader = if let &Chunk::Raw { start, size } = chunk {
reader.seek(SeekFrom::Start(start))?;
Some(LimitedReader(reader, start as usize + size))
} else {
None
};
chunk.write(reader.as_mut(), writer)?;
}
Ok(())
}
}
impl fmt::Display for SparseFileWriter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, r"SparseFileWriter: {} Chunks:", self.chunks.len())
}
}
fn add_sparse_chunk(r: &mut Vec<Chunk>, chunk: Chunk) -> Result<()> {
match r.last_mut() {
Some(last) => match (&last, &chunk) {
(Chunk::Raw { start, size }, Chunk::Raw { size: new_length, .. }) => {
*last = Chunk::Raw { start: *start, size: size + new_length };
return Ok(());
}
(
Chunk::Fill { start, size, value },
Chunk::Fill { size: new_size, value: new_value, .. },
) if value == new_value => {
*last = Chunk::Fill { start: *start, size: size + new_size, value: *value };
return Ok(());
}
(Chunk::DontCare { start, size }, Chunk::DontCare { size: new_size, .. }) => {
*last = Chunk::DontCare { start: *start, size: size + new_size };
return Ok(());
}
_ => {}
},
None => {}
}
r.push(chunk);
Ok(())
}
#[tracing::instrument(skip(source, dest))]
pub fn unsparse<W: Writer, R: Reader>(source: &mut R, dest: &mut W) -> Result<()> {
let header: SparseHeader = deserialize_from(source).context("Failed to read header")?;
ensure!(header.valid(), "Invalid sparse image header {:?}", header);
let num_chunks = header.total_chunks as usize;
for _ in 0..num_chunks {
expand_chunk(source, dest, header.blk_sz).context("Failed to expand chunk")?;
}
let offset = dest.stream_position()?;
dest.set_len(offset).context("Failed to truncate output")?;
dest.flush()?;
Ok(())
}
fn expand_chunk<R: Read + Seek, W: Write + Seek>(
source: &mut R,
dest: &mut W,
block_size: u32,
) -> Result<()> {
let header: ChunkHeader =
deserialize_from(source).context("Failed to deserialize chunk header")?;
ensure!(header.valid(), "Invalid chunk header {:x?}", header);
let size = (header.chunk_sz * block_size) as usize;
match header.chunk_type {
format::CHUNK_TYPE_RAW => {
let limit = source.stream_position()? as usize + size;
std::io::copy(&mut LimitedReader(source, limit), dest)
.context("Failed to copy contents")?;
}
format::CHUNK_TYPE_FILL => {
let value: [u8; 4] =
deserialize_from(source).context("Failed to deserialize fill value")?;
assert!(size % 4 == 0);
let repeated = value.repeat(size / 4);
std::io::copy(&mut Cursor::new(repeated), dest).context("Failed to fill contents")?;
}
format::CHUNK_TYPE_DONT_CARE => {
dest.seek(SeekFrom::Current(size as i64)).context("Failed to skip contents")?;
}
format::CHUNK_TYPE_CRC32 => {
let _: u32 = deserialize_from(source).context("Failed to deserialize fill value")?;
}
_ => bail!("Invalid type {}", header.chunk_type),
};
Ok(())
}
#[tracing::instrument]
fn resparse(
sparse_file: SparseFileWriter,
max_download_size: u64,
) -> Result<Vec<SparseFileWriter>> {
if max_download_size as usize <= BLK_SIZE {
anyhow::bail!(
"Given maximum download size ({}) is less than the block size ({})",
max_download_size,
BLK_SIZE
);
}
let mut ret = Vec::<SparseFileWriter>::new();
let sunk_file_length = format::SPARSE_HEADER_SIZE
+ (Chunk::DontCare { start: 0, size: BLK_SIZE }.chunk_data_len()
+ Chunk::Crc32 { checksum: 2345 }.chunk_data_len());
let mut chunk_pos = 0;
let mut output_offset = 0;
while chunk_pos < sparse_file.chunks.len() {
tracing::trace!("Starting a new file at chunk position: {}", chunk_pos);
let mut file_len = 0;
file_len += sunk_file_length;
let mut chunks = Vec::<Chunk>::new();
if chunk_pos > 0 {
tracing::trace!("Adding a DontCare chunk offset: {}", chunk_pos);
let dont_care = Chunk::DontCare { start: 0, size: output_offset };
chunks.push(dont_care);
}
loop {
match sparse_file.chunks.get(chunk_pos) {
Some(chunk) => {
let curr_chunk_data_len = chunk.chunk_data_len();
if (file_len + curr_chunk_data_len) as u64 > max_download_size {
tracing::trace!("Current file size is: {} and adding another chunk of len: {} would put us over our max: {}", file_len, curr_chunk_data_len, max_download_size);
let remainder_size = sparse_file.total_bytes() - output_offset;
let dont_care =
Chunk::DontCare { start: output_offset as u64, size: remainder_size };
chunks.push(dont_care);
break;
}
tracing::trace!("chunk: {} curr_chunk_data_len: {} current file size: {} max_download_size: {} diff: {}", chunk_pos, curr_chunk_data_len, file_len, max_download_size, (max_download_size as usize - file_len - curr_chunk_data_len) );
add_sparse_chunk(&mut chunks, chunk.clone())?;
file_len += curr_chunk_data_len;
chunk_pos = chunk_pos + 1;
output_offset += chunk.output_size();
}
None => {
tracing::trace!("Finished iterating chunks");
break;
}
}
}
let resparsed = SparseFileWriter::new(chunks);
tracing::trace!("resparse: Adding new SparseFile: {}", resparsed);
ret.push(resparsed);
}
Ok(ret)
}
#[tracing::instrument()]
pub fn build_sparse_files(
name: &str,
file_to_upload: &str,
dir: &Path,
max_download_size: u64,
) -> Result<Vec<TempPath>> {
if max_download_size as usize <= BLK_SIZE {
anyhow::bail!(
"Given maximum download size ({}) is less than the block size ({})",
max_download_size,
BLK_SIZE
);
}
tracing::debug!("Building sparse files for: {}. File: {}", name, file_to_upload);
let mut in_file = File::open(file_to_upload)?;
let mut total_read: usize = 0;
let mut chunks =
Vec::<Chunk>::with_capacity((in_file.metadata()?.len() as usize / BLK_SIZE) + 1);
let mut buf = [0u8; BLK_SIZE];
loop {
let read = in_file.read(&mut buf)?;
if read == 0 {
break;
}
let is_fill = buf.chunks(4).collect::<Vec<&[u8]>>().windows(2).all(|w| w[0] == w[1]);
if is_fill {
let value: u32 = bincode::deserialize(&buf[0..4])?;
let fill = Chunk::Fill { start: total_read as u64, size: buf.len(), value };
tracing::trace!("Sparsing file: {}. Created: {}", file_to_upload, fill);
chunks.push(fill);
} else {
let raw = Chunk::Raw { start: total_read as u64, size: buf.len() };
tracing::trace!("Sparsing file: {}. Created: {}", file_to_upload, raw);
chunks.push(raw);
}
total_read += read;
}
tracing::trace!("Creating sparse file from: {} chunks", chunks.len());
let sparse_file = SparseFileWriter::new(chunks);
tracing::trace!("Created sparse file: {}", sparse_file);
let mut ret = Vec::<TempPath>::new();
tracing::trace!("Resparsing sparse file");
for re_sparsed_file in resparse(sparse_file, max_download_size)? {
let (file, temp_path) = NamedTempFile::new_in(dir)?.into_parts();
let mut file_create = File::from(file);
tracing::trace!("Writing resparsed {} to disk", re_sparsed_file);
re_sparsed_file.write(&mut in_file, &mut file_create)?;
ret.push(temp_path);
}
tracing::debug!("Finished building sparse files");
Ok(ret)
}
#[cfg(test)]
mod test {
use super::builder::{DataSource, SparseImageBuilder};
use super::{add_sparse_chunk, resparse, unsparse, Chunk, SparseFileWriter, BLK_SIZE};
use rand::rngs::SmallRng;
use rand::{RngCore, SeedableRng};
use std::io::{Cursor, Read as _, Seek as _, SeekFrom, Write as _};
use tempfile::{NamedTempFile, TempDir};
#[test]
fn test_fill_into_bytes() {
let mut dest = Cursor::new(Vec::<u8>::new());
let fill_chunk = Chunk::Fill { start: 0, size: 5 * BLK_SIZE, value: 365 };
fill_chunk.write(None::<&mut Cursor<Vec<u8>>>, &mut dest).unwrap();
assert_eq!(dest.into_inner(), [194, 202, 0, 0, 5, 0, 0, 0, 16, 0, 0, 0, 109, 1, 0, 0]);
}
#[test]
fn test_raw_into_bytes() {
const EXPECTED_RAW_BYTES: [u8; 22] =
[193, 202, 0, 0, 1, 0, 0, 0, 12, 16, 0, 0, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0];
let mut source = Cursor::new(Vec::<u8>::from(&b"12345"[..]));
let mut sparse = Cursor::new(Vec::<u8>::new());
let chunk = Chunk::Raw { start: 0, size: BLK_SIZE };
chunk.write(Some(&mut source), &mut sparse).unwrap();
let buf = sparse.into_inner();
assert_eq!(buf.len(), 4108);
assert_eq!(&buf[..EXPECTED_RAW_BYTES.len()], EXPECTED_RAW_BYTES);
assert_eq!(&buf[EXPECTED_RAW_BYTES.len()..], &[0u8; 4108 - EXPECTED_RAW_BYTES.len()]);
}
#[test]
fn test_dont_care_into_bytes() {
let mut dest = Cursor::new(Vec::<u8>::new());
let chunk = Chunk::DontCare { start: 0, size: 5 * BLK_SIZE };
chunk.write(None::<&mut Cursor<Vec<u8>>>, &mut dest).unwrap();
assert_eq!(dest.into_inner(), [195, 202, 0, 0, 5, 0, 0, 0, 12, 0, 0, 0]);
}
#[test]
fn test_sparse_file_into_bytes() {
let mut source = Cursor::new(Vec::<u8>::from(&b"123"[..]));
let mut sparse = Cursor::new(Vec::<u8>::new());
let mut chunks = Vec::<Chunk>::new();
let fill = Chunk::Fill { start: 0, size: 4096, value: 5 };
chunks.push(fill);
let raw = Chunk::Raw { start: 0, size: 12288 };
chunks.push(raw);
let dontcare = Chunk::DontCare { start: 0, size: 4096 };
chunks.push(dontcare);
let sparsefile = SparseFileWriter::new(chunks);
sparsefile.write(&mut source, &mut sparse).unwrap();
sparse.seek(SeekFrom::Start(0)).unwrap();
let mut unsparsed = Cursor::new(Vec::<u8>::new());
unsparse(&mut sparse, &mut unsparsed).unwrap();
let buf = unsparsed.into_inner();
assert_eq!(buf.len(), 4096 + 12288 + 4096);
{
let chunks = buf[..4096].chunks(4);
for chunk in chunks {
assert_eq!(chunk, &[5u8, 0, 0, 0]);
}
}
assert_eq!(&buf[4096..4099], b"123");
assert_eq!(&buf[4099..16384], &[0u8; 12285]);
assert_eq!(&buf[16384..], &[0u8; 4096]);
}
#[test]
fn test_resparse_bails_on_too_small_size() {
let sparse = SparseFileWriter::new(Vec::<Chunk>::new());
assert!(resparse(sparse, 4095).is_err());
}
#[test]
fn test_resparse_splits() {
let max_download_size = 4096 * 2;
let mut chunks = Vec::<Chunk>::new();
chunks.push(Chunk::Raw { start: 0, size: 4096 });
chunks.push(Chunk::Fill { start: 4096, size: 4096, value: 2 });
chunks.push(Chunk::Raw { start: 8192, size: 4096 });
let input_sparse_file = SparseFileWriter::new(chunks);
let resparsed_files = resparse(input_sparse_file, max_download_size).unwrap();
assert_eq!(2, resparsed_files.len());
assert_eq!(3, resparsed_files[0].chunks.len());
assert_eq!(Chunk::Raw { start: 0, size: 4096 }, resparsed_files[0].chunks[0]);
assert_eq!(Chunk::Fill { start: 4096, size: 4096, value: 2 }, resparsed_files[0].chunks[1]);
assert_eq!(Chunk::DontCare { start: 8192, size: 4096 }, resparsed_files[0].chunks[2]);
assert_eq!(2, resparsed_files[1].chunks.len());
assert_eq!(Chunk::DontCare { start: 0, size: 8192 }, resparsed_files[1].chunks[0]);
assert_eq!(Chunk::Raw { start: 8192, size: 4096 }, resparsed_files[1].chunks[1]);
}
#[test]
fn test_add_sparse_chunk_adds_empty() {
let init_vec = Vec::<Chunk>::new();
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Fill { start: 0, size: 4096, value: 1 }).unwrap();
assert_eq!(0, init_vec.len());
assert_ne!(init_vec, res);
assert_eq!(Chunk::Fill { start: 0, size: 4096, value: 1 }, res[0]);
}
#[test]
fn test_add_sparse_chunk_fill() {
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Fill { start: 0, size: 8192, value: 1 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Fill { start: 0, size: 8192, value: 1 }).unwrap();
assert_eq!(1, res.len());
assert_eq!(Chunk::Fill { start: 0, size: 16384, value: 1 }, res[0]);
}
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Fill { start: 0, size: 4096, value: 1 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Fill { start: 0, size: 4096, value: 2 }).unwrap();
assert_ne!(res, init_vec);
assert_eq!(2, res.len());
assert_eq!(
res,
[
Chunk::Fill { start: 0, size: 4096, value: 1 },
Chunk::Fill { start: 0, size: 4096, value: 2 }
]
);
}
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Fill { start: 0, size: 4096, value: 2 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::DontCare { start: 0, size: 4096 }).unwrap();
assert_ne!(res, init_vec);
assert_eq!(2, res.len());
assert_eq!(
res,
[
Chunk::Fill { start: 0, size: 4096, value: 2 },
Chunk::DontCare { start: 0, size: 4096 }
]
);
}
}
#[test]
fn test_add_sparse_chunk_dont_care() {
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::DontCare { start: 0, size: 4096 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::DontCare { start: 0, size: 4096 }).unwrap();
assert_eq!(1, res.len());
assert_eq!(Chunk::DontCare { start: 0, size: 8192 }, res[0]);
}
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::DontCare { start: 0, size: 4096 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Fill { start: 0, size: 4096, value: 1 }).unwrap();
assert_eq!(2, res.len());
assert_eq!(
res,
[
Chunk::DontCare { start: 0, size: 4096 },
Chunk::Fill { start: 0, size: 4096, value: 1 }
]
);
}
}
#[test]
fn test_add_sparse_chunk_raw() {
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Raw { start: 0, size: 12288 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Raw { start: 0, size: 16384 }).unwrap();
assert_eq!(1, res.len());
assert_eq!(Chunk::Raw { start: 0, size: 28672 }, res[0]);
}
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Raw { start: 0, size: 12288 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Fill { start: 3, size: 8192, value: 1 }).unwrap();
assert_eq!(2, res.len());
assert_eq!(
res,
[
Chunk::Raw { start: 0, size: 12288 },
Chunk::Fill { start: 3, size: 8192, value: 1 }
]
);
}
}
#[test]
fn test_add_sparse_chunk_crc32() {
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Crc32 { checksum: 1234 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Crc32 { checksum: 2345 }).unwrap();
assert_eq!(2, res.len());
assert_eq!(res, [Chunk::Crc32 { checksum: 1234 }, Chunk::Crc32 { checksum: 2345 }]);
}
{
let mut init_vec = Vec::<Chunk>::new();
init_vec.push(Chunk::Crc32 { checksum: 1234 });
let mut res = init_vec.clone();
add_sparse_chunk(&mut res, Chunk::Fill { start: 0, size: 4096, value: 1 }).unwrap();
assert_eq!(2, res.len());
assert_eq!(
res,
[Chunk::Crc32 { checksum: 1234 }, Chunk::Fill { start: 0, size: 4096, value: 1 }]
);
}
}
#[test]
fn test_roundtrip() {
let tmpdir = TempDir::new().unwrap();
let (mut file, _temp_path) = NamedTempFile::new_in(&tmpdir).unwrap().into_parts();
let mut rng = SmallRng::from_entropy();
let mut buf = Vec::<u8>::new();
buf.resize(1 * 4096, 0);
rng.fill_bytes(&mut buf);
file.write_all(&buf).unwrap();
file.flush().unwrap();
file.seek(SeekFrom::Start(0)).unwrap();
let content_size = buf.len();
let mut sparse_file = NamedTempFile::new_in(&tmpdir).unwrap().into_file();
SparseImageBuilder::new()
.add_chunk(DataSource::Buffer(Box::new([0xffu8; 8192])))
.add_chunk(DataSource::Reader(Box::new(file)))
.add_chunk(DataSource::Fill(0xaaaa_aaaau32, 1024))
.add_chunk(DataSource::Skip(16384))
.build(&mut sparse_file)
.expect("Build sparse image failed");
sparse_file.seek(SeekFrom::Start(0)).unwrap();
let mut orig_file = NamedTempFile::new_in(&tmpdir).unwrap().into_file();
unsparse(&mut sparse_file, &mut orig_file).expect("unsparse failed");
orig_file.seek(SeekFrom::Start(0)).unwrap();
let mut unsparsed_bytes = vec![];
orig_file.read_to_end(&mut unsparsed_bytes).expect("Failed to read unsparsed image");
assert_eq!(unsparsed_bytes.len(), 8192 + 20480 + content_size);
assert_eq!(&unsparsed_bytes[..8192], &[0xffu8; 8192]);
assert_eq!(&unsparsed_bytes[8192..8192 + content_size], &buf[..]);
assert_eq!(&unsparsed_bytes[8192 + content_size..12288 + content_size], &[0xaau8; 4096]);
assert_eq!(&unsparsed_bytes[12288 + content_size..], &[0u8; 16384]);
}
}