Give good error messages when archive extensions are invalid (#817)

+ simplify unit tests, don't require them to start the logger

+ unrelated TODOs
This commit is contained in:
João Marcos 2025-05-06 00:08:38 -03:00 committed by GitHub
parent 07967927dd
commit c8f97197c3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 342 additions and 54 deletions

View File

@ -37,6 +37,8 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
# TODO: avoid exploding the matrix by removing unrar and bzip3 from the all combinations runs
# I can add a monthly run with all combinations
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}} feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}} feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}} feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}

View File

@ -26,6 +26,9 @@ Categories Used:
- Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb)) - Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb))
### Improvements ### Improvements
- Give better error messages when archive extensions are invalid [\#817](https://github.com/ouch-org/ouch/pull/817) ([marcospb19](https://github.com/marcospb19))
### Bug Fixes ### Bug Fixes
### Tweaks ### Tweaks

35
Cargo.lock generated
View File

@ -519,6 +519,12 @@ dependencies = [
"powerfmt", "powerfmt",
] ]
[[package]]
name = "diff"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]] [[package]]
name = "difflib" name = "difflib"
version = "0.4.0" version = "0.4.0"
@ -818,6 +824,15 @@ version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itertools"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
dependencies = [
"either",
]
[[package]] [[package]]
name = "jiff" name = "jiff"
version = "0.2.8" version = "0.2.8"
@ -1081,17 +1096,21 @@ dependencies = [
"filetime_creation", "filetime_creation",
"flate2", "flate2",
"fs-err", "fs-err",
"glob",
"gzp", "gzp",
"ignore", "ignore",
"infer", "infer",
"insta", "insta",
"is_executable", "is_executable",
"itertools",
"libc", "libc",
"linked-hash-map", "linked-hash-map",
"lz4_flex", "lz4_flex",
"memchr",
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"parse-display", "parse-display",
"pretty_assertions",
"proptest", "proptest",
"rand 0.8.5", "rand 0.8.5",
"rayon", "rayon",
@ -1246,6 +1265,16 @@ dependencies = [
"termtree", "termtree",
] ]
[[package]]
name = "pretty_assertions"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d"
dependencies = [
"diff",
"yansi",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.93" version = "1.0.93"
@ -2047,6 +2076,12 @@ dependencies = [
"lzma-sys", "lzma-sys",
] ]
[[package]]
name = "yansi"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]] [[package]]
name = "zerocopy" name = "zerocopy"
version = "0.7.35" version = "0.7.35"

View File

@ -19,7 +19,7 @@ brotli = "7.0.0"
bstr = { version = "1.10.0", default-features = false, features = ["std"] } bstr = { version = "1.10.0", default-features = false, features = ["std"] }
bytesize = "1.3.0" bytesize = "1.3.0"
bzip2 = "0.4.4" bzip2 = "0.4.4"
bzip3 = { version = "0.9.0", features = ["bundled"] , optional = true } bzip3 = { version = "0.9.0", features = ["bundled"], optional = true }
clap = { version = "4.5.20", features = ["derive", "env"] } clap = { version = "4.5.20", features = ["derive", "env"] }
filetime_creation = "0.2" filetime_creation = "0.2"
flate2 = { version = "1.0.30", default-features = false } flate2 = { version = "1.0.30", default-features = false }
@ -58,9 +58,13 @@ clap_mangen = "0.2.24"
[dev-dependencies] [dev-dependencies]
assert_cmd = "2.0.14" assert_cmd = "2.0.14"
glob = "0.3.2"
infer = "0.16.0" infer = "0.16.0"
insta = { version = "1.40.0", features = ["filters"] } insta = { version = "1.40.0", features = ["filters"] }
itertools = "0.14.0"
memchr = "2.7.4"
parse-display = "0.9.1" parse-display = "0.9.1"
pretty_assertions = "1.4.1"
proptest = "1.5.0" proptest = "1.5.0"
rand = { version = "0.8.5", default-features = false, features = [ rand = { version = "0.8.5", default-features = false, features = [
"small_rng", "small_rng",
@ -88,4 +92,5 @@ inherits = "release"
lto = false lto = false
opt-level = 2 opt-level = 2
incremental = true incremental = true
codegen-units = 16 codegen-units = 32
strip = false

View File

@ -132,7 +132,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)), Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?), Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)), Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!(), Tar | Zip | Rar | SevenZip => decoder,
}; };
Ok(decoder) Ok(decoder)
}; };

View File

@ -36,7 +36,6 @@ pub fn list_archive_contents(
let zip_archive = zip::ZipArchive::new(reader)?; let zip_archive = zip::ZipArchive::new(reader)?;
let files = crate::archive::zip::list_archive(zip_archive, password); let files = crate::archive::zip::list_archive(zip_archive, password);
list::list_files(archive_path, files, list_options)?; list::list_files(archive_path, files, list_options)?;
return Ok(()); return Ok(());
} }
@ -46,7 +45,7 @@ pub fn list_archive_contents(
// Grab previous decoder and wrap it inside of a new one // Grab previous decoder and wrap it inside of a new one
let chain_reader_decoder = let chain_reader_decoder =
|format: &CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> { |format: CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
let decoder: Box<dyn Read + Send> = match format { let decoder: Box<dyn Read + Send> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)), Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)), Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
@ -62,16 +61,22 @@ pub fn list_archive_contents(
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)), Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?), Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)), Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!(), Tar | Zip | Rar | SevenZip => unreachable!("should be treated by caller"),
}; };
Ok(decoder) Ok(decoder)
}; };
for format in formats.iter().skip(1).rev() { let mut misplaced_archive_format = None;
for &format in formats.iter().skip(1).rev() {
if format.archive_format() {
misplaced_archive_format = Some(format);
break;
}
reader = chain_reader_decoder(format, reader)?; reader = chain_reader_decoder(format, reader)?;
} }
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match formats[0] { let archive_format = misplaced_archive_format.unwrap_or(formats[0]);
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match archive_format {
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))), Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
Zip => { Zip => {
if formats.len() > 1 { if formats.len() > 1 {
@ -120,7 +125,7 @@ pub fn list_archive_contents(
Box::new(archive::sevenz::list_archive(archive_path, password)?) Box::new(archive::sevenz::list_archive(archive_path, password)?)
} }
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => { Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
panic!("Not an archive! This should never happen, if it does, something is wrong with `CompressionFormat::is_archive()`. Please report this error!"); unreachable!("Not an archive, should be validated before calling this function.");
} }
}; };

View File

@ -81,7 +81,7 @@ pub fn run(
let parsed_formats = parse_format_flag(&formats)?; let parsed_formats = parse_format_flag(&formats)?;
(Some(formats), parsed_formats) (Some(formats), parsed_formats)
} }
None => (None, extension::extensions_from_path(&output_path)), None => (None, extension::extensions_from_path(&output_path)?),
}; };
check::check_invalid_compression_with_non_archive_format( check::check_invalid_compression_with_non_archive_format(
@ -158,6 +158,7 @@ pub fn run(
if let Some(format) = args.format { if let Some(format) = args.format {
let format = parse_format_flag(&format)?; let format = parse_format_flag(&format)?;
for path in files.iter() { for path in files.iter() {
// TODO: use Error::Custom
let file_name = path.file_name().ok_or_else(|| Error::NotFound { let file_name = path.file_name().ok_or_else(|| Error::NotFound {
error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)), error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)),
})?; })?;
@ -166,7 +167,7 @@ pub fn run(
} }
} else { } else {
for path in files.iter() { for path in files.iter() {
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path); let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path)?;
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? { if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
return Ok(()); return Ok(());
@ -228,7 +229,7 @@ pub fn run(
} }
} else { } else {
for path in files.iter() { for path in files.iter() {
let mut file_formats = extension::extensions_from_path(path); let mut file_formats = extension::extensions_from_path(path)?;
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? { if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
return Ok(()); return Ok(());

View File

@ -5,7 +5,10 @@ use std::{ffi::OsStr, fmt, path::Path};
use bstr::ByteSlice; use bstr::ByteSlice;
use CompressionFormat::*; use CompressionFormat::*;
use crate::{error::Error, utils::logger::warning}; use crate::{
error::{Error, FinalError, Result},
utils::logger::warning,
};
pub const SUPPORTED_EXTENSIONS: &[&str] = &[ pub const SUPPORTED_EXTENSIONS: &[&str] = &[
"tar", "tar",
@ -60,8 +63,8 @@ impl Extension {
/// Checks if the first format in `compression_formats` is an archive /// Checks if the first format in `compression_formats` is an archive
pub fn is_archive(&self) -> bool { pub fn is_archive(&self) -> bool {
// Safety: we check that `compression_formats` is not empty in `Self::new` // Index Safety: we check that `compression_formats` is not empty in `Self::new`
self.compression_formats[0].is_archive_format() self.compression_formats[0].archive_format()
} }
} }
@ -103,7 +106,7 @@ pub enum CompressionFormat {
impl CompressionFormat { impl CompressionFormat {
/// Currently supported archive formats are .tar (and aliases to it) and .zip /// Currently supported archive formats are .tar (and aliases to it) and .zip
fn is_archive_format(&self) -> bool { pub fn archive_format(&self) -> bool {
// Keep this match like that without a wildcard `_` so we don't forget to update it // Keep this match like that without a wildcard `_` so we don't forget to update it
match self { match self {
Tar | Zip | Rar | SevenZip => true, Tar | Zip | Rar | SevenZip => true,
@ -147,14 +150,13 @@ fn to_extension(ext: &[u8]) -> Option<Extension> {
)) ))
} }
fn split_extension(name: &mut &[u8]) -> Option<Extension> { fn split_extension_at_end(name: &[u8]) -> Option<(&[u8], Extension)> {
let (new_name, ext) = name.rsplit_once_str(b".")?; let (new_name, ext) = name.rsplit_once_str(b".")?;
if matches!(new_name, b"" | b"." | b"..") { if matches!(new_name, b"" | b"." | b"..") {
return None; return None;
} }
let ext = to_extension(ext)?; let ext = to_extension(ext)?;
*name = new_name; Some((new_name, ext))
Some(ext)
} }
pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> { pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
@ -188,17 +190,41 @@ pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
/// Extracts extensions from a path. /// Extracts extensions from a path.
/// ///
/// Returns both the remaining path and the list of extension objects /// Returns both the remaining path and the list of extension objects.
pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension>) { pub fn separate_known_extensions_from_name(path: &Path) -> Result<(&Path, Vec<Extension>)> {
let mut extensions = vec![]; let mut extensions = vec![];
let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else { let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else {
return (path, extensions); return Ok((path, extensions));
}; };
// While there is known extensions at the tail, grab them while let Some((new_name, extension)) = split_extension_at_end(name) {
while let Some(extension) = split_extension(&mut name) { name = new_name;
extensions.insert(0, extension); extensions.insert(0, extension);
if extensions[0].is_archive() {
if let Some((_, misplaced_extension)) = split_extension_at_end(name) {
return Err(FinalError::with_title("File extensions are invalid for operation")
.detail(format!(
"The archive extension '.{}' must come before any non-archive extensions, like '.{}'",
extensions[0].display_text, misplaced_extension.display_text
))
.detail(format!(
"File: '{path:?}' contains '.{}' and '.{}'",
misplaced_extension.display_text, extensions[0].display_text,
))
.detail(format!("'.{}' is an archive format", extensions[0].display_text))
.detail(format!(
"'.{}' isn't an archive format",
misplaced_extension.display_text
))
.hint("You can use `--format` to specify what format to use, examples:")
.hint(" ouch compress 1 2 file --format zip")
.hint(" ouch decompress file --format gz")
.hint(" ouch list archive --format zip")
.into());
}
break;
}
} }
if let Ok(name) = name.to_str() { if let Ok(name) = name.to_str() {
@ -210,13 +236,12 @@ pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension
} }
} }
(name.to_path().unwrap(), extensions) Ok((name.to_path().unwrap(), extensions))
} }
/// Extracts extensions from a path, return only the list of extension objects /// Extracts extensions from a path, return only the list of extension objects
pub fn extensions_from_path(path: &Path) -> Vec<Extension> { pub fn extensions_from_path(path: &Path) -> Result<Vec<Extension>> {
let (_, extensions) = separate_known_extensions_from_name(path); separate_known_extensions_from_name(path).map(|(_, extensions)| extensions)
extensions
} }
/// Panics if formats has an empty list of compression formats /// Panics if formats has an empty list of compression formats
@ -271,14 +296,13 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::utils::logger::spawn_logger_thread;
#[test] #[test]
fn test_extensions_from_path() { fn test_extensions_from_path() {
let path = Path::new("bolovo.tar.gz"); let path = Path::new("bolovo.tar.gz");
let extensions: Vec<Extension> = extensions_from_path(path); let extensions = extensions_from_path(path).unwrap();
let formats: Vec<CompressionFormat> = flatten_compression_formats(&extensions); let formats = flatten_compression_formats(&extensions);
assert_eq!(formats, vec![Tar, Gzip]); assert_eq!(formats, vec![Tar, Gzip]);
} }
@ -286,32 +310,31 @@ mod tests {
#[test] #[test]
/// Test extension parsing for input/output files /// Test extension parsing for input/output files
fn test_separate_known_extensions_from_name() { fn test_separate_known_extensions_from_name() {
spawn_logger_thread();
assert_eq!( assert_eq!(
separate_known_extensions_from_name("file".as_ref()), separate_known_extensions_from_name("file".as_ref()).unwrap(),
("file".as_ref(), vec![]) ("file".as_ref(), vec![])
); );
assert_eq!( assert_eq!(
separate_known_extensions_from_name("tar".as_ref()), separate_known_extensions_from_name("tar".as_ref()).unwrap(),
("tar".as_ref(), vec![]) ("tar".as_ref(), vec![])
); );
assert_eq!( assert_eq!(
separate_known_extensions_from_name(".tar".as_ref()), separate_known_extensions_from_name(".tar".as_ref()).unwrap(),
(".tar".as_ref(), vec![]) (".tar".as_ref(), vec![])
); );
assert_eq!( assert_eq!(
separate_known_extensions_from_name("file.tar".as_ref()), separate_known_extensions_from_name("file.tar".as_ref()).unwrap(),
("file".as_ref(), vec![Extension::new(&[Tar], "tar")]) ("file".as_ref(), vec![Extension::new(&[Tar], "tar")])
); );
assert_eq!( assert_eq!(
separate_known_extensions_from_name("file.tar.gz".as_ref()), separate_known_extensions_from_name("file.tar.gz".as_ref()).unwrap(),
( (
"file".as_ref(), "file".as_ref(),
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")] vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
) )
); );
assert_eq!( assert_eq!(
separate_known_extensions_from_name(".tar.gz".as_ref()), separate_known_extensions_from_name(".tar.gz".as_ref()).unwrap(),
(".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")]) (".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")])
); );
} }
@ -367,4 +390,10 @@ mod tests {
"linux.pkg.info.tar.zst" "linux.pkg.info.tar.zst"
); );
} }
#[test]
fn test_extension_parsing_with_multiple_archive_formats() {
assert!(separate_known_extensions_from_name("file.tar.zip".as_ref()).is_err());
assert!(separate_known_extensions_from_name("file.7z.zst.zip.lz4".as_ref()).is_err());
}
} }

View File

@ -125,10 +125,14 @@ mod logger_thread {
static SENDER: OnceLock<LogSender> = OnceLock::new(); static SENDER: OnceLock<LogSender> = OnceLock::new();
#[track_caller] #[track_caller]
fn setup_channel() -> LogReceiver { fn setup_channel() -> Option<LogReceiver> {
let (tx, rx) = mpsc::channel(); let mut optional = None;
SENDER.set(tx).expect("`setup_channel` should only be called once"); SENDER.get_or_init(|| {
rx let (tx, rx) = mpsc::channel();
optional = Some(rx);
tx
});
optional
} }
#[track_caller] #[track_caller]
@ -138,6 +142,9 @@ mod logger_thread {
#[track_caller] #[track_caller]
pub(super) fn send_print_command(msg: PrintMessage) { pub(super) fn send_print_command(msg: PrintMessage) {
if cfg!(test) {
spawn_logger_thread();
}
get_sender() get_sender()
.send(LoggerCommand::Print(msg)) .send(LoggerCommand::Print(msg))
.expect("Failed to send print command"); .expect("Failed to send print command");
@ -170,8 +177,9 @@ mod logger_thread {
} }
pub fn spawn_logger_thread() { pub fn spawn_logger_thread() {
let log_receiver = setup_channel(); if let Some(log_receiver) = setup_channel() {
thread::spawn(move || run_logger(log_receiver)); thread::spawn(move || run_logger(log_receiver));
}
} }
fn run_logger(log_receiver: LogReceiver) { fn run_logger(log_receiver: LogReceiver) {

View File

@ -7,8 +7,12 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use bstr::ByteSlice;
use fs_err as fs; use fs_err as fs;
use itertools::Itertools;
use memchr::memmem;
use parse_display::Display; use parse_display::Display;
use pretty_assertions::assert_eq;
use proptest::sample::size_range; use proptest::sample::size_range;
use rand::{rngs::SmallRng, Rng, SeedableRng}; use rand::{rngs::SmallRng, Rng, SeedableRng};
use tempfile::tempdir; use tempfile::tempdir;
@ -17,7 +21,7 @@ use test_strategy::{proptest, Arbitrary};
use crate::utils::{assert_same_directory, write_random_content}; use crate::utils::{assert_same_directory, write_random_content};
/// tar and zip extensions /// tar and zip extensions
#[derive(Arbitrary, Debug, Display)] #[derive(Arbitrary, Clone, Copy, Debug, Display)]
#[display(style = "lowercase")] #[display(style = "lowercase")]
enum DirectoryExtension { enum DirectoryExtension {
#[display("7z")] #[display("7z")]
@ -211,7 +215,7 @@ fn multiple_files(
let before = &dir.join("before"); let before = &dir.join("before");
let before_dir = &before.join("dir"); let before_dir = &before.join("dir");
fs::create_dir_all(before_dir).unwrap(); fs::create_dir_all(before_dir).unwrap();
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let after = &dir.join("after"); let after = &dir.join("after");
create_random_files(before_dir, depth, &mut SmallRng::from_entropy()); create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
ouch!("-A", "c", before_dir, archive); ouch!("-A", "c", before_dir, archive);
@ -238,7 +242,7 @@ fn multiple_files_with_conflict_and_choice_to_overwrite(
fs::create_dir_all(after_dir).unwrap(); fs::create_dir_all(after_dir).unwrap();
create_random_files(after_dir, depth, &mut SmallRng::from_entropy()); create_random_files(after_dir, depth, &mut SmallRng::from_entropy());
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
ouch!("-A", "c", before_dir, archive); ouch!("-A", "c", before_dir, archive);
crate::utils::cargo_bin() crate::utils::cargo_bin()
@ -279,7 +283,7 @@ fn multiple_files_with_conflict_and_choice_to_not_overwrite(
fs::write(after_dir.join("something.txt"), "Some content").unwrap(); fs::write(after_dir.join("something.txt"), "Some content").unwrap();
fs::copy(after_dir.join("something.txt"), after_backup_dir.join("something.txt")).unwrap(); fs::copy(after_dir.join("something.txt"), after_backup_dir.join("something.txt")).unwrap();
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
ouch!("-A", "c", before_dir, archive); ouch!("-A", "c", before_dir, archive);
crate::utils::cargo_bin() crate::utils::cargo_bin()
@ -390,7 +394,7 @@ fn smart_unpack_with_single_file(
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
crate::utils::cargo_bin() crate::utils::cargo_bin()
.arg("compress") .arg("compress")
@ -441,7 +445,7 @@ fn smart_unpack_with_multiple_files(
.map(|entry| entry.unwrap().path()) .map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>(); .collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let output_path = root_path.join("archive"); let output_path = root_path.join("archive");
assert!(!output_path.exists()); assert!(!output_path.exists());
@ -490,7 +494,7 @@ fn no_smart_unpack_with_single_file(
.map(|entry| entry.unwrap().path()) .map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>(); .collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let output_path = root_path.join("archive"); let output_path = root_path.join("archive");
assert!(!output_path.exists()); assert!(!output_path.exists());
@ -540,7 +544,7 @@ fn no_smart_unpack_with_multiple_files(
.map(|entry| entry.unwrap().path()) .map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>(); .collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let output_path = root_path.join("archive"); let output_path = root_path.join("archive");
assert!(!output_path.exists()); assert!(!output_path.exists());
@ -588,7 +592,7 @@ fn multiple_files_with_disabled_smart_unpack_by_dir(
let dest_files_path = root_path.join("dest_files"); let dest_files_path = root_path.join("dest_files");
fs::create_dir_all(&dest_files_path).unwrap(); fs::create_dir_all(&dest_files_path).unwrap();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
crate::utils::cargo_bin() crate::utils::cargo_bin()
.arg("compress") .arg("compress")
@ -705,7 +709,7 @@ fn symlink_pack_and_unpack(
files_path.push(symlink_path); files_path.push(symlink_path);
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions))); let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
crate::utils::cargo_bin() crate::utils::cargo_bin()
.arg("compress") .arg("compress")
@ -865,3 +869,199 @@ fn unpack_multiple_sources_into_the_same_destination_with_merge(
assert_eq!(5, out_path.as_path().read_dir()?.count()); assert_eq!(5, out_path.as_path().read_dir()?.count());
} }
#[test]
fn reading_nested_archives_with_two_archive_extensions_adjacent() {
let archive_formats = ["tar", "zip", "7z"].into_iter();
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
let temp_dir = tempdir().unwrap();
let in_dir = |path: &str| format!("{}/{}", temp_dir.path().display(), path);
fs::write(in_dir("a.txt"), "contents").unwrap();
let files = [
"a.txt",
&format!("b.{first_archive}"),
&format!("c.{first_archive}.{second_archive}"),
];
let transformations = [first_archive, second_archive];
let compressed_path = in_dir(files.last().unwrap());
for (window, format) in files.windows(2).zip(transformations.iter()) {
let [a, b] = [window[0], window[1]].map(in_dir);
crate::utils::cargo_bin()
.args(["compress", &a, &b, "--format", format])
.assert()
.success();
}
let output = crate::utils::cargo_bin()
.args(["list", &compressed_path, "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args(["decompress", &compressed_path, "--dir", &in_dir("out"), "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
}
}
#[test]
fn reading_nested_archives_with_two_archive_extensions_interleaved() {
let archive_formats = ["tar", "zip", "7z"].into_iter();
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
let temp_dir = tempdir().unwrap();
let in_dir = |path: &str| format!("{}/{}", temp_dir.path().display(), path);
fs::write(in_dir("a.txt"), "contents").unwrap();
let files = [
"a.txt",
&format!("c.{first_archive}"),
&format!("d.{first_archive}.zst"),
&format!("e.{first_archive}.zst.{second_archive}"),
&format!("f.{first_archive}.zst.{second_archive}.lz4"),
];
let transformations = [first_archive, "zst", second_archive, "lz4"];
let compressed_path = in_dir(files.last().unwrap());
for (window, format) in files.windows(2).zip(transformations.iter()) {
let [a, b] = [window[0], window[1]].map(in_dir);
crate::utils::cargo_bin()
.args(["compress", &a, &b, "--format", format])
.assert()
.success();
}
let output = crate::utils::cargo_bin()
.args(["list", &compressed_path, "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args(["decompress", &compressed_path, "--dir", &in_dir("out"), "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
}
}
#[test]
fn compressing_archive_with_two_archive_formats() {
let archive_formats = ["tar", "zip", "7z"].into_iter();
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
let temp_dir = tempdir().unwrap();
let dir = temp_dir.path().display().to_string();
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.{first_archive}.{second_archive}"),
"--yes",
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.{first_archive}.{second_archive}"),
"--yes",
"--format",
&format!("{first_archive}.{second_archive}"),
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(
stderr.as_bytes(),
b"can only be used at the start of the file extension",
)
.is_some());
crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.{first_archive}.{second_archive}"),
"--yes",
"--format",
first_archive,
])
.assert()
.success();
}
}
#[test]
fn fail_when_compressing_archive_as_the_second_extension() {
for archive_format in ["tar", "zip", "7z"] {
let temp_dir = tempdir().unwrap();
let dir = temp_dir.path().display().to_string();
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.zst.{archive_format}"),
"--yes",
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out_file"),
"--yes",
"--format",
&format!("zst.{archive_format}"),
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(
stderr.as_bytes(),
format!("'{archive_format}' can only be used at the start of the file extension").as_bytes(),
)
.is_some());
}
}