Skip to content

Commit

Permalink
download_sysext: split out hash_on_disk_sha256 into download
Browse files Browse the repository at this point in the history
To expose hash_on_disk core logic to other parts like test/update_crau
or download_sysext, move that into a common part in src/download.rs.
  • Loading branch information
dongsupark committed Nov 29, 2023
1 parent b9c3c46 commit 9470586
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 47 deletions.
48 changes: 2 additions & 46 deletions src/bin/download_sysext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use std::path::{Path, PathBuf};
use std::fs::File;
use std::fs;
use std::io;
use std::io::{Read, Seek, SeekFrom};
use std::io::BufReader;

#[macro_use]
Expand All @@ -18,6 +17,7 @@ use reqwest::redirect::Policy;
use url::Url;

use update_format_crau::delta_update;
use ue_rs::hash_on_disk_sha256;

#[derive(Debug)]
enum PackageStatus {
Expand Down Expand Up @@ -45,51 +45,7 @@ impl<'a> Package<'a> {
// If maxlen is None, a simple read to the end of the file.
// If maxlen is Some, read only until the given length.
fn hash_on_disk(&mut self, path: &Path, maxlen: Option<usize>) -> Result<omaha::Hash<omaha::Sha256>> {
use sha2::{Sha256, Digest};

let file = File::open(path).context({
format!("failed to open path({:?})", path.display())
})?;
let mut hasher = Sha256::new();

let filelen = file.metadata().unwrap().len() as usize;

let mut maxlen_to_read: usize = match maxlen {
Some(len) => {
if filelen < len {
filelen
} else {
len
}
}
None => filelen,
};

const CHUNKLEN: usize = 10485760; // 10M

let mut freader = BufReader::new(file);
let mut chunklen: usize;

freader.seek(SeekFrom::Start(0)).context("failed to seek(0)".to_string())?;
while maxlen_to_read > 0 {
if maxlen_to_read < CHUNKLEN {
chunklen = maxlen_to_read;
} else {
chunklen = CHUNKLEN;
}

let mut databuf = vec![0u8; chunklen];

freader.read_exact(&mut databuf).context(format!("failed to read_exact(chunklen {:?})", chunklen))?;

maxlen_to_read -= chunklen;

hasher.update(&databuf);
}

Ok(omaha::Hash::from_bytes(
hasher.finalize().into()
))
hash_on_disk_sha256(path, maxlen)
}

#[rustfmt::skip]
Expand Down
46 changes: 45 additions & 1 deletion src/download.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use anyhow::{Context, Result, bail};
use std::io::Write;
use std::io::{BufReader, Read, Seek, SeekFrom, Write};
use std::io;
use std::fs::File;
use std::path::Path;
use log::info;
use url::Url;

Expand All @@ -13,6 +15,48 @@ pub struct DownloadResult<W: std::io::Write> {
pub data: W,
}

pub fn hash_on_disk_sha256(path: &Path, maxlen: Option<usize>) -> Result<omaha::Hash<omaha::Sha256>> {
let file = File::open(path).context(format!("failed to open path({:?})", path.display()))?;
let mut hasher = Sha256::new();

let filelen = file.metadata().context(format!("failed to get metadata of {:?}", path.display()))?.len() as usize;

let mut maxlen_to_read: usize = match maxlen {
Some(len) => {
if filelen < len {
filelen
} else {
len
}
}
None => filelen,
};

const CHUNKLEN: usize = 10485760; // 10M

let mut freader = BufReader::new(file);
let mut chunklen: usize;

freader.seek(SeekFrom::Start(0)).context("failed to seek(0)".to_string())?;
while maxlen_to_read > 0 {
if maxlen_to_read < CHUNKLEN {
chunklen = maxlen_to_read;
} else {
chunklen = CHUNKLEN;
}

let mut databuf = vec![0u8; chunklen];

freader.read_exact(&mut databuf).context(format!("failed to read_exact(chunklen {:?})", chunklen))?;

maxlen_to_read -= chunklen;

hasher.update(&databuf);
}

Ok(omaha::Hash::from_bytes(hasher.finalize().into()))
}

pub async fn download_and_hash<U, W>(client: &reqwest::Client, url: U, mut data: W) -> Result<DownloadResult<W>>
where
U: reqwest::IntoUrl + Clone,
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
mod download;
pub use download::DownloadResult;
pub use download::download_and_hash;
pub use download::hash_on_disk_sha256;

pub mod request;

0 comments on commit 9470586

Please sign in to comment.