diff --git a/Cargo.lock b/Cargo.lock index df2673f..96d22e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,6 +26,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "anyhow" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" + [[package]] name = "argh" version = "0.1.12" @@ -745,6 +751,7 @@ dependencies = [ name = "omaha" version = "0.1.0" dependencies = [ + "anyhow", "ct-codecs", "hard-xml", "url", @@ -1377,6 +1384,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" name = "ue-rs" version = "0.1.0" dependencies = [ + "anyhow", "argh", "bzip2", "env_logger", @@ -1419,6 +1427,7 @@ dependencies = [ name = "update-format-crau" version = "0.1.0" dependencies = [ + "anyhow", "bzip2", "log", "protobuf", diff --git a/Cargo.toml b/Cargo.toml index 2a80494..0240a92 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,19 +6,19 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -reqwest = "0.11" -tokio = { version = "1", features = ["macros", "net", "rt-multi-thread"] } -uuid = "1.2" -sha2 = "0.10" -url = "2" - -env_logger = "0.10" -log = "0.4" +anyhow = "1.0.75" argh = "0.1" +bzip2 = "0.4.4" +env_logger = "0.10" globset = "0.4" +log = "0.4" protobuf = "3.2.0" -bzip2 = "0.4.4" +reqwest = "0.11" +sha2 = "0.10" tempfile = "3.8.1" +tokio = { version = "1", features = ["macros", "net", "rt-multi-thread"] } +url = "2" +uuid = "1.2" [dependencies.hard-xml] path = "vendor/hard-xml" diff --git a/examples/full_test.rs b/examples/full_test.rs index 011aa79..36c56cc 100644 --- a/examples/full_test.rs +++ b/examples/full_test.rs @@ -1,10 +1,11 @@ use std::error::Error; use std::borrow::Cow; +use anyhow::{Context, Result}; use hard_xml::XmlRead; use url::Url; -fn get_pkgs_to_download(resp: &omaha::Response) -> Result)>, Box> { +fn get_pkgs_to_download(resp: &omaha::Response) -> Result)>> { let mut to_download: Vec<(Url, omaha::Hash<_>)> = Vec::new(); for app in &resp.apps { @@ -44,17 +45,23 @@ fn get_pkgs_to_download(resp: &omaha::Response) -> Result Result<(), Box> { let client = reqwest::Client::new(); + const APP_VERSION_DEFAULT: &str = "3340.0.0+nightly-20220823-2100"; + const MACHINE_ID_DEFAULT: &str = "abce671d61774703ac7be60715220bfe"; + const TRACK_DEFAULT: &str = "stable"; + //// // request //// let parameters = ue_rs::request::Parameters { - app_version: Cow::Borrowed("3340.0.0+nightly-20220823-2100"), - machine_id: Cow::Borrowed("abce671d61774703ac7be60715220bfe"), + app_version: Cow::Borrowed(APP_VERSION_DEFAULT), + machine_id: Cow::Borrowed(MACHINE_ID_DEFAULT), - track: Cow::Borrowed("stable"), + track: Cow::Borrowed(TRACK_DEFAULT), }; - let response_text = ue_rs::request::perform(&client, parameters).await?; + let response_text = ue_rs::request::perform(&client, parameters).await.context(format!( + "perform({APP_VERSION_DEFAULT}, {MACHINE_ID_DEFAULT}, {TRACK_DEFAULT}) failed" + ))?; println!("response:\n\t{:#?}", response_text); println!(); @@ -62,9 +69,9 @@ async fn main() -> Result<(), Box> { //// // parse response //// - let resp = omaha::Response::from_str(&response_text)?; + let resp = omaha::Response::from_str(&response_text).context("failed to parse response")?; - let pkgs_to_dl = get_pkgs_to_download(&resp)?; + let pkgs_to_dl = get_pkgs_to_download(&resp).context("failed to get packages to download")?; //// // download @@ -76,7 +83,7 @@ async fn main() -> Result<(), Box> { // std::io::BufWriter wrapping an std::fs::File is probably the right choice. // std::io::sink() is basically just /dev/null let data = std::io::sink(); - let res = ue_rs::download_and_hash(&client, url, data).await?; + let res = ue_rs::download_and_hash(&client, url.clone(), data).await.context(format!("download_and_hash({url:?}) failed"))?; println!("\texpected sha256: {}", expected_sha256); println!("\tcalculated sha256: {}", res.hash); diff --git a/examples/request.rs b/examples/request.rs index e7bf4e8..9468e46 100644 --- a/examples/request.rs +++ b/examples/request.rs @@ -1,20 +1,28 @@ use std::error::Error; use std::borrow::Cow; +use anyhow::Context; + use ue_rs::request; #[tokio::main] async fn main() -> Result<(), Box> { let client = reqwest::Client::new(); + const APP_VERSION_DEFAULT: &str = "3340.0.0+nightly-20220823-2100"; + const MACHINE_ID_DEFAULT: &str = "abce671d61774703ac7be60715220bfe"; + const TRACK_DEFAULT: &str = "stable"; + let parameters = request::Parameters { - app_version: Cow::Borrowed("3340.0.0+nightly-20220823-2100"), - machine_id: Cow::Borrowed("abce671d61774703ac7be60715220bfe"), + app_version: Cow::Borrowed(APP_VERSION_DEFAULT), + machine_id: Cow::Borrowed(MACHINE_ID_DEFAULT), - track: Cow::Borrowed("stable"), + track: Cow::Borrowed(TRACK_DEFAULT), }; - let response = request::perform(&client, parameters).await?; + let response = request::perform(&client, parameters).await.context(format!( + "perform({APP_VERSION_DEFAULT}, {MACHINE_ID_DEFAULT}, {TRACK_DEFAULT}) failed" + ))?; println!("response:\n\t{:#?}", response); diff --git a/examples/response.rs b/examples/response.rs index b86115b..50194dc 100644 --- a/examples/response.rs +++ b/examples/response.rs @@ -1,5 +1,6 @@ use std::error::Error; +use anyhow::Context; use hard_xml::XmlRead; use omaha; @@ -29,7 +30,7 @@ fn main() -> Result<(), Box> { println!("{}", RESPONSE_XML); println!(); - let resp = omaha::Response::from_str(RESPONSE_XML)?; + let resp = omaha::Response::from_str(RESPONSE_XML).context("failed to create response")?; println!("{:#?}", resp); println!(); @@ -66,7 +67,10 @@ fn main() -> Result<(), Box> { println!(" urls:"); for url in &app.update_check.urls { - println!(" {}", url.join(&pkg.name)?); + println!( + " {}", + url.join(&pkg.name).context(format!("failed to join URL with {:?}", pkg.name))? + ); } println!(); diff --git a/omaha/Cargo.toml b/omaha/Cargo.toml index c4ccb59..16d7316 100644 --- a/omaha/Cargo.toml +++ b/omaha/Cargo.toml @@ -9,6 +9,7 @@ edition = "2021" uuid = "1.2" ct-codecs = "1" url = "2" +anyhow = "1.0.75" [dependencies.hard-xml] path = "../vendor/hard-xml" diff --git a/omaha/src/hash_types.rs b/omaha/src/hash_types.rs index a0bdcc4..308f472 100644 --- a/omaha/src/hash_types.rs +++ b/omaha/src/hash_types.rs @@ -1,10 +1,10 @@ use std::fmt; use std::str; +use anyhow::{Error as CodecError, anyhow}; + #[rustfmt::skip] use ct_codecs::{ - Error as CodecError, - Base64, Hex, @@ -82,9 +82,9 @@ impl Into> for Hash { impl Hash { #[inline] - fn decode(hash: &str) -> Result { + fn decode(hash: &str) -> anyhow::Result { let mut digest = T::Output::default(); - D::decode(digest.as_mut(), hash, None)?; + D::decode(digest.as_mut(), hash, None).map_err(|_| anyhow!("decode ({}) failed", hash))?; Ok(Self(digest)) } diff --git a/omaha/src/response.rs b/omaha/src/response.rs index 7094bb7..0e21503 100644 --- a/omaha/src/response.rs +++ b/omaha/src/response.rs @@ -11,10 +11,10 @@ use self::omaha::{Sha1, Sha256}; mod sha256_hex { use crate as omaha; use self::omaha::Sha256; - use ct_codecs::Error; + use anyhow::Error as CodecError; #[inline] - pub(crate) fn from_str(s: &str) -> Result, Error> { + pub(crate) fn from_str(s: &str) -> Result, CodecError> { >::from_hex(s) } } diff --git a/src/bin/download_sysext.rs b/src/bin/download_sysext.rs index 4fa97dc..d4a8c70 100644 --- a/src/bin/download_sysext.rs +++ b/src/bin/download_sysext.rs @@ -10,6 +10,7 @@ use std::io::BufReader; #[macro_use] extern crate log; +use anyhow::{Context, Result, bail}; use globset::{Glob, GlobSet, GlobSetBuilder}; use hard_xml::XmlRead; use argh::FromArgs; @@ -42,10 +43,12 @@ impl<'a> Package<'a> { // Return Sha256 hash of data in the given path. // If maxlen is None, a simple read to the end of the file. // If maxlen is Some, read only until the given length. - fn hash_on_disk(&mut self, path: &Path, maxlen: Option) -> Result, Box> { + fn hash_on_disk(&mut self, path: &Path, maxlen: Option) -> Result> { use sha2::{Sha256, Digest}; - let file = File::open(path)?; + let file = File::open(path).context({ + format!("failed to open path({:?})", path.display()) + })?; let mut hasher = Sha256::new(); let filelen = file.metadata().unwrap().len() as usize; @@ -66,7 +69,7 @@ impl<'a> Package<'a> { let mut freader = BufReader::new(file); let mut chunklen: usize; - freader.seek(SeekFrom::Start(0))?; + freader.seek(SeekFrom::Start(0)).context("failed to seek(0)".to_string())?; while maxlen_to_read > 0 { if maxlen_to_read < CHUNKLEN { chunklen = maxlen_to_read; @@ -76,7 +79,7 @@ impl<'a> Package<'a> { let mut databuf = vec![0u8; chunklen]; - freader.read_exact(&mut databuf)?; + freader.read_exact(&mut databuf).context(format!("failed to read_exact(chunklen {:?})", chunklen))?; maxlen_to_read -= chunklen; @@ -89,7 +92,7 @@ impl<'a> Package<'a> { } #[rustfmt::skip] - fn check_download(&mut self, in_dir: &Path) -> Result<(), Box> { + fn check_download(&mut self, in_dir: &Path) -> Result<()> { let path = in_dir.join(&*self.name); if !path.exists() { @@ -98,7 +101,9 @@ impl<'a> Package<'a> { return Ok(()); } - let md = fs::metadata(&path)?; + let md = fs::metadata(&path).context({ + format!("failed to get metadata, path ({:?})", path.display()) + })?; let size_on_disk = md.len() as usize; let expected_size = self.size.bytes(); @@ -114,7 +119,9 @@ impl<'a> Package<'a> { if size_on_disk == expected_size { info!("{}: download complete, checking hash...", path.display()); - let hash = self.hash_on_disk(&path, None)?; + let hash = self.hash_on_disk(&path, None).context({ + format!("failed to hash_on_disk, path ({:?})", path.display()) + })?; if self.verify_checksum(hash) { info!("{}: good hash, will continue without re-download", path.display()); } else { @@ -126,7 +133,7 @@ impl<'a> Package<'a> { Ok(()) } - async fn download(&mut self, into_dir: &Path, client: &reqwest::Client) -> Result<(), Box> { + async fn download(&mut self, into_dir: &Path, client: &reqwest::Client) -> Result<()> { // FIXME: use _range_start for completing downloads let _range_start = match self.status { PackageStatus::ToDownload => 0, @@ -137,14 +144,14 @@ impl<'a> Package<'a> { info!("downloading {}...", self.url); let path = into_dir.join(&*self.name); - let mut file = File::create(path)?; + let mut file = File::create(path.clone()).context(format!("failed to create path ({:?})", path.display()))?; let res = match ue_rs::download_and_hash(&client, self.url.clone(), &mut file).await { Ok(ok) => ok, Err(err) => { error!("Downloading failed with error {}", err); self.status = PackageStatus::DownloadFailed; - return Err("unable to download data".into()); + bail!("unable to download data(url {})", self.url); } }; @@ -166,19 +173,19 @@ impl<'a> Package<'a> { } } - fn verify_signature_on_disk(&mut self, from_path: &Path, pubkey_path: &str) -> Result> { - let upfile = File::open(from_path)?; + fn verify_signature_on_disk(&mut self, from_path: &Path, pubkey_path: &str) -> Result { + let upfile = File::open(from_path).context(format!("failed to open path ({:?})", from_path.display()))?; // create a BufReader to pass down to parsing functions. let upfreader = &mut BufReader::new(upfile); // Read update payload from file, read delta update header from the payload. - let header = delta_update::read_delta_update_header(upfreader)?; + let header = delta_update::read_delta_update_header(upfreader).context(format!("failed to read_delta_update_header path ({:?})", from_path.display()))?; - let mut delta_archive_manifest = delta_update::get_manifest_bytes(upfreader, &header)?; + let mut delta_archive_manifest = delta_update::get_manifest_bytes(upfreader, &header).context(format!("failed to get_manifest_bytes path ({:?})", from_path.display()))?; // Extract signature from header. - let sigbytes = delta_update::get_signatures_bytes(upfreader, &header, &mut delta_archive_manifest)?; + let sigbytes = delta_update::get_signatures_bytes(upfreader, &header, &mut delta_archive_manifest).context(format!("failed to get_signatures_bytes path ({:?})", from_path.display()))?; // tmp dir == "/var/tmp/outdir/.tmp" let tmpdirpathbuf = from_path.parent().unwrap().parent().unwrap().join(".tmp"); @@ -187,21 +194,25 @@ impl<'a> Package<'a> { // Get length of header and data, including header and manifest. let header_data_length = delta_update::get_header_data_length(&header, &delta_archive_manifest); - let hdhash = self.hash_on_disk(from_path, Some(header_data_length))?; - let hdhashvec: Vec = hdhash.into(); + let hdhash = self.hash_on_disk(from_path, Some(header_data_length)).context(format!("failed to hash_on_disk path ({:?}) failed", from_path.display()))?; + let hdhashvec: Vec = hdhash.clone().into(); // Extract data blobs into a file, datablobspath. - delta_update::get_data_blobs(upfreader, &header, &delta_archive_manifest, datablobspath.as_path())?; + delta_update::get_data_blobs(upfreader, &header, &delta_archive_manifest, datablobspath.as_path()).context(format!("failed to get_data_blobs path ({:?})", datablobspath.display()))?; // Check for hash of data blobs with new_partition_info hash. let pinfo_hash = match &delta_archive_manifest.new_partition_info.hash { Some(hash) => hash, - None => return Err("unable to parse signature data".into()), + None => bail!("unable to get new_partition_info hash"), }; - let datahash = self.hash_on_disk(datablobspath.as_path(), None)?; + let datahash = self.hash_on_disk(datablobspath.as_path(), None).context(format!("failed to hash_on_disk path ({:?})", datablobspath.display()))?; if datahash != omaha::Hash::from_bytes(pinfo_hash.as_slice()[..].try_into().unwrap_or_default()) { - return Err("data hash mismatch with new_partition_info hash".into()); + bail!( + "mismatch of data hash ({:?}) with new_partition_info hash ({:?})", + datahash, + pinfo_hash + ); } // Parse signature data from sig blobs, data blobs, public key, and verify. @@ -209,7 +220,12 @@ impl<'a> Package<'a> { Some(_) => (), _ => { self.status = PackageStatus::BadSignature; - return Err("unable to parse and verify signature data".into()); + bail!( + "unable to parse and verify signature, sigbytes ({:?}), hdhash ({:?}), pubkey_path ({:?})", + sigbytes, + hdhash, + pubkey_path + ); } }; @@ -222,7 +238,7 @@ impl<'a> Package<'a> { #[rustfmt::skip] fn get_pkgs_to_download<'a>(resp: &'a omaha::Response, glob_set: &GlobSet) - -> Result>, Box> { + -> Result>> { let mut to_download: Vec<_> = Vec::new(); for app in &resp.apps { diff --git a/src/download.rs b/src/download.rs index 2df893c..d2f76bf 100644 --- a/src/download.rs +++ b/src/download.rs @@ -1,4 +1,4 @@ -use std::error::Error; +use anyhow::{Context, Result, bail}; use std::io::Write; use std::io; use log::warn; @@ -12,15 +12,18 @@ pub struct DownloadResult { pub data: W, } -pub async fn download_and_hash(client: &reqwest::Client, url: U, mut data: W) -> Result, Box> +pub async fn download_and_hash(client: &reqwest::Client, url: U, mut data: W) -> Result> where - U: reqwest::IntoUrl, + U: reqwest::IntoUrl + Clone, W: io::Write, { + let client_url = url.clone(); + #[rustfmt::skip] let mut res = client.get(url) .send() - .await?; + .await + .context(format!("client get and send({:?}) failed", client_url.as_str()))?; // Return immediately on download failure on the client side. let status = res.status(); @@ -33,9 +36,9 @@ where if !status.is_success() { match status { StatusCode::FORBIDDEN | StatusCode::NOT_FOUND => { - return Err(format!("cannnot fetch remotely with status code {:?}", status).into()); + bail!("cannnot fetch remotely with status code {:?}", status); } - _ => return Err(format!("general failure with status code {:?}", status).into()), + _ => bail!("general failure with status code {:?}", status), } } @@ -44,11 +47,11 @@ where let mut bytes_read = 0usize; let bytes_to_read = res.content_length().unwrap_or(u64::MAX) as usize; - while let Some(chunk) = res.chunk().await? { + while let Some(chunk) = res.chunk().await.context("failed to get response chunk")? { bytes_read += chunk.len(); hasher.update(&chunk); - data.write_all(&chunk)?; + data.write_all(&chunk).context("failed to write_all chunk")?; // TODO: better way to report progress? print!( @@ -57,10 +60,10 @@ where bytes_to_read, ((bytes_read as f32 / bytes_to_read as f32) * 100.0f32).floor() ); - io::stdout().flush()?; + io::stdout().flush().context("failed to flush stdout")?; } - data.flush()?; + data.flush().context("failed to flush data")?; println!(); Ok(DownloadResult { diff --git a/src/request.rs b/src/request.rs index de9900e..3701aa8 100644 --- a/src/request.rs +++ b/src/request.rs @@ -1,6 +1,6 @@ -use std::error::Error; use std::borrow::Cow; +use anyhow::{Context, Result}; use hard_xml::XmlWrite; use omaha; @@ -30,7 +30,7 @@ pub struct Parameters<'a> { pub machine_id: Cow<'a, str>, } -pub async fn perform<'a>(client: &reqwest::Client, parameters: Parameters<'a>) -> Result> { +pub async fn perform<'a>(client: &reqwest::Client, parameters: Parameters<'a>) -> Result { let req_body = { let r = omaha::Request { protocol_version: Cow::Borrowed(PROTOCOL_VERSION), @@ -69,7 +69,7 @@ pub async fn perform<'a>(client: &reqwest::Client, parameters: Parameters<'a>) - ], }; - r.to_string()? + r.to_string().context("failed to convert to string")? }; // TODO: remove @@ -80,7 +80,8 @@ pub async fn perform<'a>(client: &reqwest::Client, parameters: Parameters<'a>) - let resp = client.post(UPDATE_URL) .body(req_body) .send() - .await?; + .await + .context("client post send({UPDATE_URL}) failed")?; - Ok(resp.text().await?) + Ok(resp.text().await.context("failed to get response")?) } diff --git a/test/crau_verify.rs b/test/crau_verify.rs index 2c4fbca..e2b9fee 100644 --- a/test/crau_verify.rs +++ b/test/crau_verify.rs @@ -8,6 +8,7 @@ use tempfile; use update_format_crau::{delta_update, proto}; +use anyhow::{Context, Result}; use argh::FromArgs; const PUBKEY_FILE: &str = "../src/testdata/public_key_test_pkcs8.pem"; @@ -24,13 +25,13 @@ struct Args { sig_path: String, } -fn hash_on_disk(path: &Path) -> Result, Box> { +fn hash_on_disk(path: &Path) -> Result> { use sha2::{Sha256, Digest}; - let mut file = File::open(path)?; + let mut file = File::open(path).context(format!("failed to open path({:?})", path.display()))?; let mut hasher = Sha256::new(); - io::copy(&mut file, &mut hasher)?; + io::copy(&mut file, &mut hasher).context(format!("failed to copy data path ({:?})", path.display()))?; Ok(omaha::Hash::from_bytes(hasher.finalize().into())) } diff --git a/update-format-crau/Cargo.toml b/update-format-crau/Cargo.toml index a6585e7..bb98898 100644 --- a/update-format-crau/Cargo.toml +++ b/update-format-crau/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +anyhow = "1.0.75" bzip2 = "0.4.4" log = "0.4.19" protobuf = "3" diff --git a/update-format-crau/src/delta_update.rs b/update-format-crau/src/delta_update.rs index 8fa5c37..6be6e44 100644 --- a/update-format-crau/src/delta_update.rs +++ b/update-format-crau/src/delta_update.rs @@ -1,10 +1,10 @@ use std::io::{BufReader, Read, Seek, SeekFrom, Write}; -use std::error::Error; use std::fs; use std::fs::File; use std::path::Path; use log::{error, debug}; use bzip2::read::BzDecoder; +use anyhow::{Context, Result, bail}; use protobuf::Message; @@ -32,26 +32,26 @@ impl DeltaUpdateFileHeader { } // Read delta update header from the given file, return DeltaUpdateFileHeader. -pub fn read_delta_update_header(f: &mut BufReader) -> Result> { +pub fn read_delta_update_header(f: &mut BufReader) -> Result { let mut header = DeltaUpdateFileHeader { magic: [0; 4], file_format_version: 0, manifest_size: 0, }; - f.read_exact(&mut header.magic)?; + f.read_exact(&mut header.magic).context("failed to read header magic")?; if header.magic != DELTA_UPDATE_FILE_MAGIC { - return Err("bad file magic".into()); + bail!("bad file magic"); } let mut buf = [0u8; 8]; - f.read_exact(&mut buf)?; + f.read_exact(&mut buf).context("failed to read file format version")?; header.file_format_version = u64::from_be_bytes(buf); if header.file_format_version != 1 { - return Err("unsupported file format version".into()); + bail!("unsupported file format version"); } - f.read_exact(&mut buf)?; + f.read_exact(&mut buf).context("failed to read manifest size")?; header.manifest_size = u64::from_be_bytes(buf); Ok(header) @@ -59,21 +59,21 @@ pub fn read_delta_update_header(f: &mut BufReader) -> Result, header: &DeltaUpdateFileHeader) -> Result> { +pub fn get_manifest_bytes(f: &mut BufReader, header: &DeltaUpdateFileHeader) -> Result { let manifest_bytes = { let mut buf = vec![0u8; header.manifest_size as usize]; - f.read_exact(&mut buf)?; + f.read_exact(&mut buf).context("failed to read manifest bytes")?; buf.into_boxed_slice() }; - let delta_archive_manifest = proto::DeltaArchiveManifest::parse_from_bytes(&manifest_bytes)?; + let delta_archive_manifest = proto::DeltaArchiveManifest::parse_from_bytes(&manifest_bytes).context("failed to parse manifest")?; Ok(delta_archive_manifest) } // Take a buffer stream and DeltaUpdateFileHeader, // return a bytes slice of the actual signature data as well as its length. -pub fn get_signatures_bytes<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFileHeader, manifest: &mut proto::DeltaArchiveManifest) -> Result, Box> { +pub fn get_signatures_bytes<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFileHeader, manifest: &mut proto::DeltaArchiveManifest) -> Result> { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // !!! signature offsets are from the END of the manifest !!! // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! @@ -82,10 +82,10 @@ pub fn get_signatures_bytes<'a>(f: &'a mut BufReader, header: &'a DeltaUpd let signatures_bytes = match (manifest.signatures_offset, manifest.signatures_size) { (Some(sig_offset), Some(sig_size)) => { - f.seek(SeekFrom::Start(header.translate_offset(sig_offset)))?; + f.seek(SeekFrom::Start(header.translate_offset(sig_offset))).context("failed to seek to start of signature")?; let mut buf = vec![0u8; sig_size as usize]; - f.read_exact(&mut buf)?; + f.read_exact(&mut buf).context("failed to read signature")?; Some(buf.into_boxed_slice()) } _ => None, @@ -108,9 +108,10 @@ pub fn get_header_data_length(header: &DeltaUpdateFileHeader, manifest: &proto:: // Take a buffer reader, delta file header, manifest as input. // Return path to data blobs, without header, manifest, or signatures. -pub fn get_data_blobs<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFileHeader, manifest: &proto::DeltaArchiveManifest, tmppath: &Path) -> Result> { - fs::create_dir_all(tmppath.parent().unwrap())?; - let mut outfile = File::create(tmppath)?; +pub fn get_data_blobs<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFileHeader, manifest: &proto::DeltaArchiveManifest, tmpfile: &Path) -> Result { + let tmpdir = tmpfile.parent().unwrap(); + fs::create_dir_all(tmpdir).context(format!("failed to create directory {:?}", tmpdir))?; + let mut outfile = File::create(tmpfile).context(format!("failed to create file {:?}", tmpfile))?; // Read from the beginning of header, which means buffer including only data blobs. // It means it is necessary to call header.translate_offset(), in contrast to @@ -122,20 +123,21 @@ pub fn get_data_blobs<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFil let mut partdata = vec![0u8; data_length as usize]; - f.seek(SeekFrom::Start(header.translate_offset(data_offset.into())))?; - f.read_exact(&mut partdata)?; + let translated_offset = header.translate_offset(data_offset.into()); + f.seek(SeekFrom::Start(translated_offset)).context(format!("failed to seek at offset {:?}", translated_offset))?; + f.read_exact(&mut partdata).context(format!("failed to read data with length {:?}", data_length))?; // In case of bzip2-compressed chunks, extract. if pop.type_.unwrap() == proto::install_operation::Type::REPLACE_BZ.into() { let mut bzdecoder = BzDecoder::new(&partdata[..]); let mut partdata_unpacked = Vec::new(); - bzdecoder.read_to_end(&mut partdata_unpacked)?; + bzdecoder.read_to_end(&mut partdata_unpacked).context(format!("failed to unpack bzip2ed data at offset {:?}", translated_offset))?; - outfile.write_all(&partdata_unpacked)?; + outfile.write_all(&partdata_unpacked).context(format!("failed to copy unpacked data at offset {:?}", translated_offset))?; } else { - outfile.write_all(&partdata)?; + outfile.write_all(&partdata).context(format!("failed to copy plain data at offset {:?}", translated_offset))?; } - outfile.flush()?; + outfile.flush().context(format!("failed to flush at offset {:?}", translated_offset))?; } Ok(outfile) @@ -184,11 +186,19 @@ pub fn verify_sig_pubkey(digest: &[u8], sig: &Signature, pubkeyfile: &str) -> Op debug!("special_fields: {:?}", sig.special_fields()); // verify signature with pubkey - let res_verify = verify_sig::verify_rsa_pkcs_prehash(&digest, sig.data(), get_public_key_pkcs_pem(pubkeyfile, KeyTypePkcs8)); + let pkcspem_pubkey = match get_public_key_pkcs_pem(pubkeyfile, KeyTypePkcs8) { + Ok(key) => key, + Err(err) => { + error!("failed to get PKCS8 PEM public key ({:?}) with error {:?}", pubkeyfile, err); + return None; + } + }; + + let res_verify = verify_sig::verify_rsa_pkcs_prehash(digest, sig.data(), pkcspem_pubkey); match res_verify { Ok(res_verify) => res_verify, Err(err) => { - error!("verify_rsa_pkcs signature ({}) failed with {}", sig, err); + error!("verify_rsa_pkcs signature ({:?}) failed with error {:?}", sig, err); return None; } }; diff --git a/update-format-crau/src/verify_sig.rs b/update-format-crau/src/verify_sig.rs index 7a6ac16..fcfaff2 100644 --- a/update-format-crau/src/verify_sig.rs +++ b/update-format-crau/src/verify_sig.rs @@ -1,3 +1,4 @@ +use anyhow::{Context, Result, bail}; use rsa::{RsaPrivateKey, RsaPublicKey}; use rsa::pkcs1::{DecodeRsaPrivateKey, DecodeRsaPublicKey}; use rsa::pkcs8::{DecodePrivateKey, DecodePublicKey}; @@ -6,7 +7,6 @@ use rsa::signature::{SignatureEncoding, Signer, Verifier}; use rsa::signature::hazmat::PrehashVerifier; use rsa::sha2::Sha256; use std::{fs, str}; -use std::error::Error; #[derive(Debug)] pub enum KeyType { @@ -20,7 +20,7 @@ pub enum KeyType { // Takes a data buffer and a private key, to sign the data // with the private key and verify the data with the public key. -pub fn sign_rsa_pkcs(databuf: &[u8], private_key: RsaPrivateKey) -> Result, Box> { +pub fn sign_rsa_pkcs(databuf: &[u8], private_key: RsaPrivateKey) -> Result> { let signing_key = pkcs1v15::SigningKey::::new(private_key); let signature = signing_key.sign(databuf); @@ -33,14 +33,14 @@ pub fn sign_rsa_pkcs(databuf: &[u8], private_key: RsaPrivateKey) -> Result Result<(), Box> { +pub fn verify_rsa_pkcs_buf(databuf: &[u8], signature: &[u8], public_key: RsaPublicKey) -> Result<()> { // Equivalent of: // openssl rsautl -verify -pubin -key |public_key_path| // - in |sig_data| -out |out_hash_data| let verifying_key = pkcs1v15::VerifyingKey::::new(public_key); - Ok(verifying_key.verify(databuf, &pkcs1v15::Signature::try_from(signature).unwrap())?) + verifying_key.verify(databuf, &pkcs1v15::Signature::try_from(signature).unwrap()).context(format!("failed to verify signature ({:?})", signature)) } // Takes a data buffer, signature and a public key, to verify the data @@ -49,40 +49,40 @@ pub fn verify_rsa_pkcs_buf(databuf: &[u8], signature: &[u8], public_key: RsaPubl // buffer, so it does not have a limitation of max size of input data. // It relies on RSA PrehashVerifier. // TODO: consider migrating to RSA DigestVerifier. -pub fn verify_rsa_pkcs_prehash(digestbuf: &[u8], signature: &[u8], public_key: RsaPublicKey) -> Result<(), Box> { +pub fn verify_rsa_pkcs_prehash(digestbuf: &[u8], signature: &[u8], public_key: RsaPublicKey) -> Result<()> { let verifying_key = pkcs1v15::VerifyingKey::::new(public_key); - Ok(verifying_key.verify_prehash(digestbuf, &pkcs1v15::Signature::try_from(signature).unwrap())?) + verifying_key.verify_prehash(digestbuf, &pkcs1v15::Signature::try_from(signature).unwrap()).context(format!("failed to verify_prehash signature ({:?})", signature)) } -pub fn get_private_key_pkcs_pem(private_key_path: &str, key_type: KeyType) -> RsaPrivateKey { +pub fn get_private_key_pkcs_pem(private_key_path: &str, key_type: KeyType) -> Result { let private_key_buf = fs::read_to_string(private_key_path).unwrap(); let out_key = match key_type { - KeyType::KeyTypePkcs1 => RsaPrivateKey::from_pkcs1_pem(private_key_buf.as_str()).unwrap_or_else(|error| { - panic!("failed to parse PKCS1 PEM message: {:?}", error); + KeyType::KeyTypePkcs1 => RsaPrivateKey::from_pkcs1_pem(private_key_buf.as_str()).or_else(|error| { + bail!("failed to parse PKCS1 PEM message: {:?}", error); }), - KeyType::KeyTypePkcs8 => RsaPrivateKey::from_pkcs8_pem(private_key_buf.as_str()).unwrap_or_else(|error| { - panic!("failed to parse PKCS8 PEM message: {:?}", error); + KeyType::KeyTypePkcs8 => RsaPrivateKey::from_pkcs8_pem(private_key_buf.as_str()).or_else(|error| { + bail!("failed to parse PKCS8 PEM message: {:?}", error); }), KeyType::KeyTypeNone => { - panic!("invalid key type: {:?}", key_type); + bail!("invalid key type: {:?}", key_type); } }; out_key } -pub fn get_public_key_pkcs_pem(public_key_path: &str, key_type: KeyType) -> RsaPublicKey { +pub fn get_public_key_pkcs_pem(public_key_path: &str, key_type: KeyType) -> Result { let public_key_buf = fs::read_to_string(public_key_path).unwrap(); let out_key = match key_type { - KeyType::KeyTypePkcs1 => RsaPublicKey::from_pkcs1_pem(public_key_buf.as_str()).unwrap_or_else(|error| { - panic!("failed to parse PKCS1 PEM message: {:?}", error); + KeyType::KeyTypePkcs1 => RsaPublicKey::from_pkcs1_pem(public_key_buf.as_str()).or_else(|error| { + bail!("failed to parse PKCS1 PEM message: {:?}", error); }), - KeyType::KeyTypePkcs8 => RsaPublicKey::from_public_key_pem(public_key_buf.as_str()).unwrap_or_else(|error| { - panic!("failed to parse PKCS8 PEM message: {:?}", error); + KeyType::KeyTypePkcs8 => RsaPublicKey::from_public_key_pem(public_key_buf.as_str()).or_else(|error| { + bail!("failed to parse PKCS8 PEM message: {:?}", error); }), KeyType::KeyTypeNone => { - panic!("invalid key type: {:?}", key_type); + bail!("invalid key type: {:?}", key_type); } }; @@ -103,28 +103,36 @@ mod tests { #[test] fn test_verify_sig() { // PKCS1 - let signature = sign_rsa_pkcs(TESTDATA.as_bytes(), get_private_key_pkcs_pem(PRIVKEY_PKCS1_PATH, KeyTypePkcs1)).unwrap_or_else(|error| { + let signature = sign_rsa_pkcs( + TESTDATA.as_bytes(), + get_private_key_pkcs_pem(PRIVKEY_PKCS1_PATH, KeyTypePkcs1).unwrap(), + ) + .unwrap_or_else(|error| { panic!("failed to sign data: {:?}", error); }); _ = verify_rsa_pkcs_buf( TESTDATA.as_bytes(), signature.as_slice(), - get_public_key_pkcs_pem(PUBKEY_PKCS1_PATH, KeyTypePkcs1), + get_public_key_pkcs_pem(PUBKEY_PKCS1_PATH, KeyTypePkcs1).unwrap(), ) .unwrap_or_else(|error| { panic!("failed to verify data: {:?}", error); }); // PKCS8 - let signature = sign_rsa_pkcs(TESTDATA.as_bytes(), get_private_key_pkcs_pem(PRIVKEY_PKCS8_PATH, KeyTypePkcs8)).unwrap_or_else(|error| { + let signature = sign_rsa_pkcs( + TESTDATA.as_bytes(), + get_private_key_pkcs_pem(PRIVKEY_PKCS8_PATH, KeyTypePkcs8).unwrap(), + ) + .unwrap_or_else(|error| { panic!("failed to sign data: {:?}", error); }); _ = verify_rsa_pkcs_buf( TESTDATA.as_bytes(), signature.as_slice(), - get_public_key_pkcs_pem(PUBKEY_PKCS8_PATH, KeyTypePkcs8), + get_public_key_pkcs_pem(PUBKEY_PKCS8_PATH, KeyTypePkcs8).unwrap(), ) .unwrap_or_else(|error| { panic!("failed to verify data: {:?}", error);