From 0d2a158ead9f4ad3481dab849ce0004b0fe80e7b Mon Sep 17 00:00:00 2001 From: Andrew Gunnerson Date: Wed, 6 Sep 2023 23:04:18 -0400 Subject: [PATCH] Remove unnecessary use of anyhow macro Signed-off-by: Andrew Gunnerson --- avbroot/src/cli/avb.rs | 22 +++---- avbroot/src/cli/boot.rs | 14 ++--- avbroot/src/cli/key.rs | 16 +++--- avbroot/src/cli/ota.rs | 123 ++++++++++++++++++++-------------------- e2e/src/config.rs | 6 +- e2e/src/download.rs | 12 ++-- e2e/src/main.rs | 34 +++++------ 7 files changed, 112 insertions(+), 115 deletions(-) diff --git a/avbroot/src/cli/avb.rs b/avbroot/src/cli/avb.rs index f0e0dfb..4d5008c 100644 --- a/avbroot/src/cli/avb.rs +++ b/avbroot/src/cli/avb.rs @@ -50,14 +50,14 @@ pub fn verify_headers( let path = directory.join(format!("{name}.img")); let raw_reader = - File::open(&path).with_context(|| anyhow!("Failed to open for reading: {path:?}"))?; + File::open(&path).with_context(|| format!("Failed to open for reading: {path:?}"))?; let (header, _, _) = avb::load_image(BufReader::new(raw_reader)) - .with_context(|| anyhow!("Failed to load vbmeta structures: {path:?}"))?; + .with_context(|| format!("Failed to load vbmeta structures: {path:?}"))?; // Verify the header's signature. let public_key = header .verify() - .with_context(|| anyhow!("Failed to verify header signature: {path:?}"))?; + .with_context(|| format!("Failed to verify header signature: {path:?}"))?; if let Some(k) = &public_key { let prefix = format!("{name} has a signed vbmeta header"); @@ -92,7 +92,7 @@ pub fn verify_headers( } avb::Descriptor::ChainPartition(d) => { let target_key = avb::decode_public_key(&d.public_key).with_context(|| { - anyhow!("Failed to decode chained public key for: {target_name}") + format!("Failed to decode chained public key for: {target_name}") })?; verify_headers(directory, target_name, Some(&target_key), seen, descriptors)?; @@ -134,12 +134,12 @@ pub fn verify_descriptors( || Ok(Box::new(BufReader::new(reader.clone()))), cancel_signal, ) - .with_context(|| anyhow!("Failed to verify hashtree descriptor for: {name}"))?; + .with_context(|| format!("Failed to verify hashtree descriptor for: {name}"))?; } Descriptor::Hash(d) => { status!("Verifying hash descriptor for: {name}"); d.verify(BufReader::new(reader), cancel_signal) - .with_context(|| anyhow!("Failed to verify hash descriptor for: {name}"))?; + .with_context(|| format!("Failed to verify hash descriptor for: {name}"))?; } _ => unreachable!("Non-verifiable descriptor: {descriptor:?}"), } @@ -153,10 +153,10 @@ pub fn avb_main(cli: &AvbCli, cancel_signal: &Arc) -> Result<()> { match &cli.command { AvbCommand::Dump(c) => { let raw_reader = File::open(&c.input) - .with_context(|| anyhow!("Failed to open for reading: {:?}", c.input))?; + .with_context(|| format!("Failed to open for reading: {:?}", c.input))?; let reader = BufReader::new(raw_reader); let (header, footer, image_size) = avb::load_image(reader) - .with_context(|| anyhow!("Failed to load vbmeta structures: {:?}", c.input))?; + .with_context(|| format!("Failed to load vbmeta structures: {:?}", c.input))?; println!("Image size: {image_size}"); println!("Header: {header:#?}"); @@ -164,9 +164,9 @@ pub fn avb_main(cli: &AvbCli, cancel_signal: &Arc) -> Result<()> { } AvbCommand::Verify(c) => { let public_key = if let Some(p) = &c.public_key { - let data = fs::read(p).with_context(|| anyhow!("Failed to read file: {p:?}"))?; + let data = fs::read(p).with_context(|| format!("Failed to read file: {p:?}"))?; let key = avb::decode_public_key(&data) - .with_context(|| anyhow!("Failed to decode public key: {p:?}"))?; + .with_context(|| format!("Failed to decode public key: {p:?}"))?; Some(key) } else { @@ -177,7 +177,7 @@ pub fn avb_main(cli: &AvbCli, cancel_signal: &Arc) -> Result<()> { let name = c .input .file_stem() - .with_context(|| anyhow!("Path is not a file: {:?}", c.input))? + .with_context(|| format!("Path is not a file: {:?}", c.input))? .to_str() .ok_or_else(|| anyhow!("Invalid UTF-8: {:?}", c.input))?; diff --git a/avbroot/src/cli/boot.rs b/avbroot/src/cli/boot.rs index 68ebd19..04117d5 100644 --- a/avbroot/src/cli/boot.rs +++ b/avbroot/src/cli/boot.rs @@ -9,7 +9,7 @@ use std::{ path::{Path, PathBuf}, }; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{bail, Context, Result}; use clap::{Parser, Subcommand}; use crate::{ @@ -82,7 +82,7 @@ fn read_avb_header_if_exists(path: &Path) -> Result> { Err(e) => Err(e).with_context(|| format!("Failed to open for reading: {path:?}"))?, }; let header = Header::from_reader(BufReader::new(file)) - .with_context(|| anyhow!("Failed to read vbmeta header: {path:?}"))?; + .with_context(|| format!("Failed to read vbmeta header: {path:?}"))?; Ok(Some(header)) } @@ -106,7 +106,7 @@ fn write_text_if_not_empty(path: &Path, text: &str) -> Result<()> { fn write_avb_header(path: &Path, header: &Header) -> Result<()> { let file = - File::create(path).with_context(|| anyhow!("Failed to open for writing: {path:?}"))?; + File::create(path).with_context(|| format!("Failed to open for writing: {path:?}"))?; header.to_writer(BufWriter::new(file))?; Ok(()) @@ -272,9 +272,9 @@ fn info_subcommand(boot_cli: &BootCli, cli: &InfoCli) -> Result<()> { pub fn magisk_info_subcommand(cli: &MagiskInfoCli) -> Result<()> { let raw_reader = File::open(&cli.image) - .with_context(|| anyhow!("Failed to open for reading: {:?}", cli.image))?; + .with_context(|| format!("Failed to open for reading: {:?}", cli.image))?; let boot_image = BootImage::from_reader(BufReader::new(raw_reader)) - .with_context(|| anyhow!("Failed to load boot image: {:?}", cli.image))?; + .with_context(|| format!("Failed to load boot image: {:?}", cli.image))?; let mut ramdisks = vec![]; @@ -297,9 +297,9 @@ pub fn magisk_info_subcommand(cli: &MagiskInfoCli) -> Result<()> { for (i, ramdisk) in ramdisks.iter().enumerate() { let reader = Cursor::new(ramdisk); let reader = CompressedReader::new(reader, true) - .with_context(|| anyhow!("Failed to load ramdisk #{i}"))?; + .with_context(|| format!("Failed to load ramdisk #{i}"))?; let entries = cpio::load(reader, false) - .with_context(|| anyhow!("Failed to load ramdisk #{i} cpio"))?; + .with_context(|| format!("Failed to load ramdisk #{i} cpio"))?; if let Some(e) = entries.iter().find(|e| e.name == b".backup/.magisk") { io::stdout().write_all(&e.content)?; diff --git a/avbroot/src/cli/key.rs b/avbroot/src/cli/key.rs index 0667b7a..b8680bd 100644 --- a/avbroot/src/cli/key.rs +++ b/avbroot/src/cli/key.rs @@ -10,7 +10,7 @@ use std::{ time::Duration, }; -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result}; use clap::{Args, Parser, Subcommand}; use crate::{ @@ -36,30 +36,30 @@ pub fn key_main(cli: &KeyCli) -> Result<()> { crypto::generate_rsa_key_pair().context("Failed to generate RSA keypair")?; crypto::write_pem_key_file(&c.output, &private_key, &passphrase) - .with_context(|| anyhow!("Failed to write private key: {:?}", c.output))?; + .with_context(|| format!("Failed to write private key: {:?}", c.output))?; } KeyCommand::GenerateCert(c) => { let passphrase = get_passphrase(&c.passphrase, &c.key); let private_key = crypto::read_pem_key_file(&c.key, &passphrase) - .with_context(|| anyhow!("Failed to load key: {:?}", c.key))?; + .with_context(|| format!("Failed to load key: {:?}", c.key))?; let validity = Duration::from_secs(c.validity * 24 * 60 * 60); let cert = crypto::generate_cert(&private_key, rand::random(), validity, &c.subject) .context("Failed to generate certificate")?; crypto::write_pem_cert_file(&c.output, &cert) - .with_context(|| anyhow!("Failed to write certificate: {:?}", c.output))?; + .with_context(|| format!("Failed to write certificate: {:?}", c.output))?; } KeyCommand::ExtractAvb(c) => { let public_key = if let Some(p) = &c.input.key { let passphrase = get_passphrase(&c.passphrase, p); let private_key = crypto::read_pem_key_file(p, &passphrase) - .with_context(|| anyhow!("Failed to load key: {p:?}"))?; + .with_context(|| format!("Failed to load key: {p:?}"))?; private_key.to_public_key() } else if let Some(p) = &c.input.cert { let certificate = crypto::read_pem_cert_file(p) - .with_context(|| anyhow!("Failed to load certificate: {p:?}"))?; + .with_context(|| format!("Failed to load certificate: {p:?}"))?; crypto::get_public_key(&certificate)? } else { @@ -67,10 +67,10 @@ pub fn key_main(cli: &KeyCli) -> Result<()> { }; let encoded = avb::encode_public_key(&public_key) - .with_context(|| anyhow!("Failed to encode public key in AVB format"))?; + .context("Failed to encode public key in AVB format")?; fs::write(&c.output, encoded) - .with_context(|| anyhow!("Failed to write public key: {:?}", c.output))?; + .with_context(|| format!("Failed to write public key: {:?}", c.output))?; } } diff --git a/avbroot/src/cli/ota.rs b/avbroot/src/cli/ota.rs index 5e00d9c..8bdbda3 100644 --- a/avbroot/src/cli/ota.rs +++ b/avbroot/src/cli/ota.rs @@ -159,14 +159,14 @@ fn open_input_streams( status!("Opening external image: {name}: {path:?}"); let file = File::open(path) - .with_context(|| anyhow!("Failed to open external image: {path:?}"))?; + .with_context(|| format!("Failed to open external image: {path:?}"))?; input_streams.insert(name.clone(), Box::new(file)); } else { status!("Extracting from original payload: {name}"); let stream = payload::extract_image_to_memory(&open_payload, header, name, cancel_signal) - .with_context(|| anyhow!("Failed to extract from original payload: {name}"))?; + .with_context(|| format!("Failed to extract from original payload: {name}"))?; input_streams.insert(name.clone(), Box::new(stream)); } } @@ -219,7 +219,7 @@ fn patch_boot_images( let mut writer = Cursor::new(Vec::new()); boot::patch_boot(s, &mut writer, key_avb, &p, cancel_signal) - .with_context(|| anyhow!("Failed to patch boot image: {n}"))?; + .with_context(|| format!("Failed to patch boot image: {n}"))?; Ok((n, writer)) }) @@ -247,7 +247,7 @@ fn get_vbmeta_patch_order( for name in vbmeta_images { let reader = images.get_mut(name).unwrap(); let (header, footer, _) = avb::load_image(reader) - .with_context(|| anyhow!("Failed to load vbmeta image: {name}"))?; + .with_context(|| format!("Failed to load vbmeta image: {name}"))?; if let Some(f) = footer { warning!("{name} is a vbmeta partition, but has a footer: {f:?}"); @@ -366,7 +366,7 @@ fn update_vbmeta_descriptors( let reader = images.get_mut(dep).unwrap(); let (header, _, _) = avb::load_image(reader) - .with_context(|| anyhow!("Failed to load vbmeta footer from image: {dep}"))?; + .with_context(|| format!("Failed to load vbmeta footer from image: {dep}"))?; if header.public_key.is_empty() { // vbmeta is unsigned. Use the existing descriptor. @@ -404,15 +404,15 @@ fn update_vbmeta_descriptors( parent_header .sign(key) - .with_context(|| anyhow!("Failed to sign vbmeta header for image: {name}"))?; + .with_context(|| format!("Failed to sign vbmeta header for image: {name}"))?; let mut writer = Cursor::new(Vec::new()); parent_header .to_writer(&mut writer) - .with_context(|| anyhow!("Failed to write vbmeta image: {name}"))?; + .with_context(|| format!("Failed to write vbmeta image: {name}"))?; padding::write_zeros(&mut writer, block_size) - .with_context(|| anyhow!("Failed to write vbmeta padding: {name}"))?; + .with_context(|| format!("Failed to write vbmeta padding: {name}"))?; *images.get_mut(name).unwrap() = Box::new(writer); } @@ -462,8 +462,8 @@ fn patch_ota_payload( cert_ota: &Certificate, cancel_signal: &Arc, ) -> Result<(String, u64)> { - let header = PayloadHeader::from_reader(open_payload()?) - .with_context(|| anyhow!("Failed to load OTA payload header"))?; + let header = + PayloadHeader::from_reader(open_payload()?).context("Failed to load OTA payload header")?; if !header.is_full_ota() { bail!("Payload is a delta OTA, not a full OTA"); } @@ -556,7 +556,7 @@ fn patch_ota_payload( .par_iter_mut() .map(|(name, stream)| -> Result<()> { compress_image(name, stream, &header, block_size, cancel_signal) - .with_context(|| anyhow!("Failed to compress image: {name}")) + .with_context(|| format!("Failed to compress image: {name}")) }) .collect::>()?; @@ -564,12 +564,12 @@ fn patch_ota_payload( let header_locked = header.lock().unwrap(); let mut payload_writer = PayloadWriter::new(writer, header_locked.clone(), key_ota.clone()) - .with_context(|| anyhow!("Failed to write payload header"))?; + .context("Failed to write payload header")?; let mut orig_payload_reader = open_payload()?; while payload_writer .begin_next_operation() - .with_context(|| anyhow!("Failed to begin next payload blob entry"))? + .context("Failed to begin next payload blob entry")? { let name = payload_writer.partition().unwrap().partition_name.clone(); let operation = payload_writer.operation().unwrap(); @@ -584,7 +584,7 @@ fn patch_ota_payload( reader.rewind()?; stream::copy_n(&mut reader, &mut payload_writer, data_length, cancel_signal) - .with_context(|| anyhow!("Failed to copy from replacement image: {name}"))?; + .with_context(|| format!("Failed to copy from replacement image: {name}"))?; } else { // Copy from the original payload. let pi = payload_writer.partition_index().unwrap(); @@ -599,7 +599,7 @@ fn patch_ota_payload( orig_payload_reader .seek(SeekFrom::Start(data_offset)) - .with_context(|| anyhow!("Failed to seek original payload to {data_offset}"))?; + .with_context(|| format!("Failed to seek original payload to {data_offset}"))?; stream::copy_n( &mut orig_payload_reader, @@ -607,13 +607,13 @@ fn patch_ota_payload( data_length, cancel_signal, ) - .with_context(|| anyhow!("Failed to copy from original payload: {name}"))?; + .with_context(|| format!("Failed to copy from original payload: {name}"))?; } } let (_, properties, metadata_size) = payload_writer .finish() - .with_context(|| anyhow!("Failed to finalize payload"))?; + .context("Failed to finalize payload")?; Ok((properties, metadata_size)) } @@ -663,7 +663,7 @@ fn patch_ota_zip( for path in &paths { let mut reader = zip_reader .by_name(path) - .with_context(|| anyhow!("Failed to open zip entry: {path}"))?; + .with_context(|| format!("Failed to open zip entry: {path}"))?; // Android's libarchive parser is broken and only reads data descriptor // size fields as 64-bit integers if the central directory says the file @@ -686,7 +686,7 @@ fn patch_ota_zip( let mut buf = vec![]; reader .read_to_end(&mut buf) - .with_context(|| anyhow!("Failed to read OTA metadata: {path}"))?; + .with_context(|| format!("Failed to read OTA metadata: {path}"))?; metadata_pb_raw = Some(buf); continue; } @@ -696,10 +696,10 @@ fn patch_ota_zip( // All remaining entries are written immediately. zip_writer .start_file_with_extra_data(path, options) - .with_context(|| anyhow!("Failed to begin new zip entry: {path}"))?; + .with_context(|| format!("Failed to begin new zip entry: {path}"))?; let offset = zip_writer .end_extra_data() - .with_context(|| anyhow!("Failed to end new zip entry: {path}"))?; + .with_context(|| format!("Failed to end new zip entry: {path}"))?; let mut writer = CountingWriter::new(&mut zip_writer); match path.as_str() { @@ -708,7 +708,7 @@ fn patch_ota_zip( status!("Replacing zip entry: {path}"); crypto::write_pem_cert(&mut writer, cert_ota) - .with_context(|| anyhow!("Failed to write entry: {path}"))?; + .with_context(|| format!("Failed to write entry: {path}"))?; } ota::PATH_PAYLOAD => { status!("Patching zip entry: {path}"); @@ -741,7 +741,7 @@ fn patch_ota_zip( cert_ota, cancel_signal, ) - .with_context(|| anyhow!("Failed to patch payload: {path}"))?; + .with_context(|| format!("Failed to patch payload: {path}"))?; properties = Some(p); payload_metadata_size = Some(m); @@ -752,13 +752,13 @@ fn patch_ota_zip( // payload.bin is guaranteed to be patched first. writer .write_all(properties.as_ref().unwrap().as_bytes()) - .with_context(|| anyhow!("Failed to write payload properties: {path}"))?; + .with_context(|| format!("Failed to write payload properties: {path}"))?; } _ => { status!("Copying zip entry: {path}"); stream::copy(&mut reader, &mut writer, cancel_signal) - .with_context(|| anyhow!("Failed to copy zip entry: {path}"))?; + .with_context(|| format!("Failed to copy zip entry: {path}"))?; } } @@ -785,7 +785,7 @@ fn patch_ota_zip( &metadata_pb_raw.unwrap(), payload_metadata_size.unwrap(), ) - .with_context(|| anyhow!("Failed to write new OTA metadata"))?; + .context("Failed to write new OTA metadata")?; Ok((metadata, payload_metadata_size.unwrap())) } @@ -806,7 +806,7 @@ fn extract_ota_zip( } fs::create_dir_all(directory) - .with_context(|| anyhow!("Failed to create directory: {directory:?}"))?; + .with_context(|| format!("Failed to create directory: {directory:?}"))?; status!("Extracting from the payload: {}", joined(images)); @@ -817,7 +817,7 @@ fn extract_ota_zip( let path = directory.join(format!("{name}.img")); let file = File::create(&path) .map(PSeekFile::new) - .with_context(|| anyhow!("Failed to open for writing: {path:?}"))?; + .with_context(|| format!("Failed to open for writing: {path:?}"))?; Ok((name.as_str(), file)) }) .collect::>>()?; @@ -838,7 +838,7 @@ fn extract_ota_zip( images.iter().map(|n| n.as_str()), cancel_signal, ) - .with_context(|| anyhow!("Failed to extract images from payload"))?; + .context("Failed to extract images from payload")?; Ok(()) } @@ -869,11 +869,11 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu }; let key_avb = crypto::read_pem_key_file(&cli.key_avb, &passphrase_avb) - .with_context(|| anyhow!("Failed to load key: {:?}", cli.key_avb))?; + .with_context(|| format!("Failed to load key: {:?}", cli.key_avb))?; let key_ota = crypto::read_pem_key_file(&cli.key_ota, &passphrase_ota) - .with_context(|| anyhow!("Failed to load key: {:?}", cli.key_ota))?; + .with_context(|| format!("Failed to load key: {:?}", cli.key_ota))?; let cert_ota = crypto::read_pem_cert_file(&cli.cert_ota) - .with_context(|| anyhow!("Failed to load certificate: {:?}", cli.cert_ota))?; + .with_context(|| format!("Failed to load certificate: {:?}", cli.cert_ota))?; if !crypto::cert_matches_key(&cert_ota, &key_ota)? { bail!( @@ -904,7 +904,7 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu cli.ignore_magisk_warnings, move |s| warning!("{s}"), ) - .with_context(|| anyhow!("Failed to create Magisk boot image patcher"))?; + .context("Failed to create Magisk boot image patcher")?; Some(Box::new(patcher)) } else if let Some(prepatched) = &cli.root.prepatched { @@ -922,9 +922,9 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu let raw_reader = File::open(&cli.input) .map(PSeekFile::new) - .with_context(|| anyhow!("Failed to open for reading: {:?}", cli.input))?; + .with_context(|| format!("Failed to open for reading: {:?}", cli.input))?; let mut zip_reader = ZipArchive::new(BufReader::new(raw_reader.clone())) - .with_context(|| anyhow!("Failed to read zip: {:?}", cli.input))?; + .with_context(|| format!("Failed to read zip: {:?}", cli.input))?; // Open the output file for reading too, so we can verify offsets later. let temp_writer = NamedTempFile::with_prefix_in( @@ -933,7 +933,7 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu .unwrap_or_else(|| OsStr::new("avbroot.tmp")), output.parent().unwrap_or_else(|| Path::new(".")), ) - .with_context(|| anyhow!("Failed to open temporary output file"))?; + .context("Failed to open temporary output file")?; let temp_path = temp_writer.path().to_owned(); let hole_punching_writer = HolePunchingWriter::new(temp_writer); let buffered_writer = BufWriter::new(hole_punching_writer); @@ -953,21 +953,19 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu &cert_ota, cancel_signal, ) - .with_context(|| anyhow!("Failed to patch OTA zip"))?; + .context("Failed to patch OTA zip")?; - let sign_writer = zip_writer + let signing_writer = zip_writer .finish() - .with_context(|| anyhow!("Failed to finalize output zip"))?; - let buffered_writer = sign_writer + .context("Failed to finalize output zip")?; + let buffered_writer = signing_writer .finish(&key_ota, &cert_ota) - .with_context(|| anyhow!("Failed to sign output zip"))?; + .context("Failed to sign output zip")?; let hole_punching_writer = buffered_writer .into_inner() - .with_context(|| anyhow!("Failed to flush output zip"))?; + .context("Failed to flush output zip")?; let mut temp_writer = hole_punching_writer.into_inner(); - temp_writer - .flush() - .with_context(|| anyhow!("Failed to flush output zip"))?; + temp_writer.flush().context("Failed to flush output zip")?; // We do a lot of low-level hackery. Reopen and verify offsets. status!("Verifying metadata offsets"); @@ -977,7 +975,7 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu &metadata, payload_metadata_size, ) - .with_context(|| anyhow!("Failed to verify OTA metadata offsets"))?; + .context("Failed to verify OTA metadata offsets")?; status!("Completed after {:.1}s", start.elapsed().as_secs_f64()); @@ -1000,11 +998,11 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu temp_writer .as_file() .set_permissions(Permissions::from_mode(mode)) - .with_context(|| anyhow!("Failed to set permissions to {mode:o}: {temp_path:?}"))?; + .with_context(|| format!("Failed to set permissions to {mode:o}: {temp_path:?}"))?; } temp_writer.persist(output.as_ref()).with_context(|| { - anyhow!("Failed to move temporary file to output path: {temp_path:?} -> {output:?}") + format!("Failed to move temporary file to output path: {temp_path:?} -> {output:?}") })?; Ok(()) @@ -1013,12 +1011,12 @@ pub fn patch_subcommand(cli: &PatchCli, cancel_signal: &Arc) -> Resu pub fn extract_subcommand(cli: &ExtractCli, cancel_signal: &Arc) -> Result<()> { let raw_reader = File::open(&cli.input) .map(PSeekFile::new) - .with_context(|| anyhow!("Failed to open for reading: {:?}", cli.input))?; + .with_context(|| format!("Failed to open for reading: {:?}", cli.input))?; let mut zip = ZipArchive::new(BufReader::new(raw_reader.clone())) - .with_context(|| anyhow!("Failed to read zip: {:?}", cli.input))?; + .with_context(|| format!("Failed to read zip: {:?}", cli.input))?; let payload_entry = zip .by_name(ota::PATH_PAYLOAD) - .with_context(|| anyhow!("Failed to open zip entry: {:?}", ota::PATH_PAYLOAD))?; + .with_context(|| format!("Failed to open zip entry: {:?}", ota::PATH_PAYLOAD))?; let payload_offset = payload_entry.data_start(); let payload_size = payload_entry.size(); @@ -1030,7 +1028,7 @@ pub fn extract_subcommand(cli: &ExtractCli, cancel_signal: &Arc) -> )?; let header = PayloadHeader::from_reader(&mut payload_reader) - .with_context(|| anyhow!("Failed to load OTA payload header"))?; + .context("Failed to load OTA payload header")?; if !header.is_full_ota() { bail!("Payload is a delta OTA, not a full OTA"); } @@ -1072,7 +1070,7 @@ pub fn extract_subcommand(cli: &ExtractCli, cancel_signal: &Arc) -> pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Result<()> { let raw_reader = File::open(&cli.input) .map(PSeekFile::new) - .with_context(|| anyhow!("Failed to open for reading: {:?}", cli.input))?; + .with_context(|| format!("Failed to open for reading: {:?}", cli.input))?; let mut reader = BufReader::new(raw_reader); status!("Verifying whole-file signature"); @@ -1087,7 +1085,7 @@ pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Re ); } else if let Some(p) = &cli.cert_ota { let verify_cert = crypto::read_pem_cert_file(p) - .with_context(|| anyhow!("Failed to load certificate: {:?}", p))?; + .with_context(|| format!("Failed to load certificate: {:?}", p))?; if embedded_cert != verify_cert { bail!("OTA has a valid signature, but was not signed with: {p:?}"); @@ -1097,7 +1095,7 @@ pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Re } ota::verify_metadata(&mut reader, &metadata, header.blob_offset) - .with_context(|| anyhow!("Failed to verify OTA metadata offsets"))?; + .context("Failed to verify OTA metadata offsets")?; status!("Verifying payload"); @@ -1106,7 +1104,7 @@ pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Re .get(ota::PF_NAME) .ok_or_else(|| anyhow!("Missing property files: {}", ota::PF_NAME))?; let pfs = ota::parse_property_files(pfs_raw) - .with_context(|| anyhow!("Failed to parse property files: {}", ota::PF_NAME))?; + .with_context(|| format!("Failed to parse property files: {}", ota::PF_NAME))?; let pf_payload = pfs .iter() .find(|pf| pf.name == ota::PATH_PAYLOAD) @@ -1118,8 +1116,7 @@ pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Re status!("Extracting partition images to temporary directory"); - let temp_dir = - TempDir::new().with_context(|| anyhow!("Failed to create temporary directory"))?; + let temp_dir = TempDir::new().context("Failed to create temporary directory")?; let raw_reader = reader.into_inner(); let unique_images = header .manifest @@ -1147,13 +1144,13 @@ pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Re .path() .join(format!("{}.img", partitions_by_type["@otacerts"])); let file = - File::open(&path).with_context(|| anyhow!("Failed to open for reading: {path:?}"))?; + File::open(&path).with_context(|| format!("Failed to open for reading: {path:?}"))?; BootImage::from_reader(BufReader::new(file)) - .with_context(|| anyhow!("Failed to read boot image: {path:?}"))? + .with_context(|| format!("Failed to read boot image: {path:?}"))? }; let ramdisk_certs = OtaCertPatcher::get_certificates(&boot_image) - .with_context(|| anyhow!("Failed to read ramdisk's otacerts.zip"))?; + .context("Failed to read ramdisk's otacerts.zip")?; if !ramdisk_certs.contains(&ota_cert) { bail!("Ramdisk's otacerts.zip does not contain OTA certificate"); } @@ -1161,9 +1158,9 @@ pub fn verify_subcommand(cli: &VerifyCli, cancel_signal: &Arc) -> Re status!("Verifying AVB signatures"); let public_key = if let Some(p) = &cli.public_key_avb { - let data = fs::read(p).with_context(|| anyhow!("Failed to read file: {p:?}"))?; + let data = fs::read(p).with_context(|| format!("Failed to read file: {p:?}"))?; let key = avb::decode_public_key(&data) - .with_context(|| anyhow!("Failed to decode public key: {p:?}"))?; + .with_context(|| format!("Failed to decode public key: {p:?}"))?; Some(key) } else { diff --git a/e2e/src/config.rs b/e2e/src/config.rs index 0969eb9..6b79823 100644 --- a/e2e/src/config.rs +++ b/e2e/src/config.rs @@ -5,7 +5,7 @@ use std::{collections::BTreeMap, fs, ops::Range, path::Path}; -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use toml_edit::{ ser::ValueSerializer, @@ -131,9 +131,9 @@ pub fn add_device(document: &mut Document, name: &str, device: &Device) -> Resul pub fn load_config(path: &Path) -> Result<(Config, Document)> { let contents = - fs::read_to_string(path).with_context(|| anyhow!("Failed to read config: {path:?}"))?; + fs::read_to_string(path).with_context(|| format!("Failed to read config: {path:?}"))?; let config: Config = toml_edit::de::from_str(&contents) - .with_context(|| anyhow!("Failed to parse config: {path:?}"))?; + .with_context(|| format!("Failed to parse config: {path:?}"))?; let document: Document = contents.parse().unwrap(); Ok((config, document)) diff --git a/e2e/src/download.rs b/e2e/src/download.rs index 9a0dad1..d6f9c28 100644 --- a/e2e/src/download.rs +++ b/e2e/src/download.rs @@ -145,7 +145,7 @@ async fn download_range( .send() .await .and_then(|r| r.error_for_status()) - .with_context(|| anyhow!("Failed to start download for range: {initial_range:?}"))?; + .with_context(|| format!("Failed to start download for range: {initial_range:?}"))?; let mut stream = response.bytes_stream(); let mut range = initial_range.clone(); @@ -246,11 +246,11 @@ async fn download_ranges( .create(true) .open(output) .map(PSeekFile::new) - .with_context(|| anyhow!("Failed to open for writing: {output:?}")) + .with_context(|| format!("Failed to open for writing: {output:?}")) })?; task::block_in_place(|| file.set_len(file_size)) - .with_context(|| anyhow!("Failed to set file size: {output:?}"))?; + .with_context(|| format!("Failed to set file size: {output:?}"))?; // Queue of ranges that need to be downloaded. let mut remaining = VecDeque::from(match initial_ranges { @@ -381,11 +381,11 @@ fn read_state(path: &Path) -> Result> { let data = match fs::read_to_string(path) { Ok(f) => f, Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(None), - Err(e) => Err(e).with_context(|| anyhow!("Failed to read download state: {path:?}"))?, + Err(e) => Err(e).with_context(|| format!("Failed to read download state: {path:?}"))?, }; let state = toml_edit::de::from_str(&data) - .with_context(|| anyhow!("Failed to parse download state: {path:?}"))?; + .with_context(|| format!("Failed to parse download state: {path:?}"))?; Ok(Some(state)) } @@ -393,7 +393,7 @@ fn read_state(path: &Path) -> Result> { fn write_state(path: &Path, state: &State) -> Result<()> { let data = toml_edit::ser::to_string(state).unwrap(); - fs::write(path, data).with_context(|| anyhow!("Failed to write download state: {path:?}"))?; + fs::write(path, data).with_context(|| format!("Failed to write download state: {path:?}"))?; Ok(()) } diff --git a/e2e/src/main.rs b/e2e/src/main.rs index 0b3bca3..48e1fb9 100644 --- a/e2e/src/main.rs +++ b/e2e/src/main.rs @@ -96,12 +96,12 @@ fn strip_image( let mut raw_reader = File::open(input) .map(PSeekFile::new) - .with_context(|| anyhow!("Failed to open for reading: {input:?}"))?; + .with_context(|| format!("Failed to open for reading: {input:?}"))?; let mut zip_reader = ZipArchive::new(BufReader::new(raw_reader.clone())) - .with_context(|| anyhow!("Failed to read zip: {input:?}"))?; + .with_context(|| format!("Failed to read zip: {input:?}"))?; let payload_entry = zip_reader .by_name(ota::PATH_PAYLOAD) - .with_context(|| anyhow!("Failed to open zip entry: {:?}", ota::PATH_PAYLOAD))?; + .with_context(|| format!("Failed to open zip entry: {:?}", ota::PATH_PAYLOAD))?; let payload_offset = payload_entry.data_start(); let payload_size = payload_entry.size(); @@ -113,7 +113,7 @@ fn strip_image( )?; let header = PayloadHeader::from_reader(&mut payload_reader) - .with_context(|| anyhow!("Failed to load OTA payload header"))?; + .context("Failed to load OTA payload header")?; let required_images = avbroot::cli::ota::get_required_images(&header.manifest, "@gki_ramdisk", true)? @@ -147,10 +147,10 @@ fn strip_image( let mut context = ring::digest::Context::new(&ring::digest::SHA256); let raw_writer = - File::create(output).with_context(|| anyhow!("Failed to open for writing: {output:?}"))?; + File::create(output).with_context(|| format!("Failed to open for writing: {output:?}"))?; raw_writer .set_len(file_size) - .with_context(|| anyhow!("Failed to set file size: {output:?}"))?; + .with_context(|| format!("Failed to set file size: {output:?}"))?; let mut buf_writer = BufWriter::new(raw_writer); let mut buf_reader = BufReader::new(raw_reader); @@ -198,7 +198,7 @@ fn hash_file(path: &Path, cancel_signal: &Arc) -> Result<[u8; 32]> { println!("Calculating hash of {path:?}"); let raw_reader = - File::open(path).with_context(|| anyhow!("Failed to open for reading: {path:?}"))?; + File::open(path).with_context(|| format!("Failed to open for reading: {path:?}"))?; let buf_reader = BufReader::new(raw_reader); let context = ring::digest::Context::new(&ring::digest::SHA256); let mut hashing_reader = HashingReader::new(buf_reader, context); @@ -249,7 +249,7 @@ fn download_file( if let Some(parent) = path.parent() { fs::create_dir_all(parent) - .with_context(|| anyhow!("Failed to create directory: {parent:?}"))?; + .with_context(|| format!("Failed to create directory: {parent:?}"))?; } let mut do_validate = validate != Validate::Never; @@ -376,7 +376,7 @@ fn test_keys() -> Result<(TempDir, Vec, Vec)> { ("ota.crt", &ota_cert[..], Some("--cert-ota"), Some("--cert-ota")), ] { let path = temp_dir.path().join(name); - fs::write(&path, data).with_context(|| anyhow!("Failed to write test key: {path:?}"))?; + fs::write(&path, data).with_context(|| format!("Failed to write test key: {path:?}"))?; if let Some(arg) = patch_arg { patch_args.push(arg.into()); @@ -464,12 +464,12 @@ fn verify_image(input_file: &Path, cancel_signal: &Arc) -> Result<() fn get_magisk_partition(path: &Path) -> Result { let raw_reader = - File::open(path).with_context(|| anyhow!("Failed to open for reading: {path:?}"))?; + File::open(path).with_context(|| format!("Failed to open for reading: {path:?}"))?; let mut zip = ZipArchive::new(BufReader::new(raw_reader)) - .with_context(|| anyhow!("Failed to read zip: {path:?}"))?; + .with_context(|| format!("Failed to read zip: {path:?}"))?; let payload_entry = zip .by_name(ota::PATH_PAYLOAD) - .with_context(|| anyhow!("Failed to open zip entry: {:?}", ota::PATH_PAYLOAD))?; + .with_context(|| format!("Failed to open zip entry: {:?}", ota::PATH_PAYLOAD))?; let payload_offset = payload_entry.data_start(); let payload_size = payload_entry.size(); @@ -480,7 +480,7 @@ fn get_magisk_partition(path: &Path) -> Result { let mut payload_reader = SectionReader::new(buf_reader, payload_offset, payload_size)?; let header = PayloadHeader::from_reader(&mut payload_reader) - .with_context(|| anyhow!("Failed to load OTA payload header"))?; + .context("Failed to load OTA payload header")?; let images = avbroot::cli::ota::get_partitions_by_type(&header.manifest)?; Ok(images["@gki_ramdisk"].clone()) @@ -625,11 +625,11 @@ fn add_subcommand(cli: &AddCli, cancel_signal: &Arc) -> Result<()> { let config_serialized = document.to_string(); fs::write(&cli.config.config, config_serialized) - .with_context(|| anyhow!("Failed to write config: {:?}", cli.config.config))?; + .with_context(|| format!("Failed to write config: {:?}", cli.config.config))?; if cli.patch.delete_on_success { for path in [full_ota_patched, stripped_ota_patched] { - fs::remove_file(&path).with_context(|| anyhow!("Failed to delete file: {path:?}"))?; + fs::remove_file(&path).with_context(|| format!("Failed to delete file: {path:?}"))?; } } @@ -745,7 +745,7 @@ fn test_subcommand(cli: &TestCli, cancel_signal: &Arc) -> Result<()> ]; fs::remove_file(&patched_file) - .with_context(|| anyhow!("Failed to delete file: {patched_file:?}"))?; + .with_context(|| format!("Failed to delete file: {patched_file:?}"))?; patch_image(&image_file, &patched_file, &prepatched_args, cancel_signal)?; @@ -753,7 +753,7 @@ fn test_subcommand(cli: &TestCli, cancel_signal: &Arc) -> Result<()> if cli.patch.delete_on_success { fs::remove_file(&patched_file) - .with_context(|| anyhow!("Failed to delete file: {patched_file:?}"))?; + .with_context(|| format!("Failed to delete file: {patched_file:?}"))?; } }