Compare commits

...

11 Commits
0.6.0 ... main

Author SHA1 Message Date
João Marcos
11344a6ffd
Fix tar extraction count when --quiet (#824) 2025-05-17 13:02:38 -03:00
tommady
da9b32a366
Fix 7z BadSignature error when compressing and then listing (#819) 2025-05-17 13:00:25 -03:00
João Marcos
c3b89b038d
improve misleading error message (#818)
Follow up to #817
2025-05-06 00:27:43 -03:00
João Marcos
c8f97197c3
Give good error messages when archive extensions are invalid (#817)
+ simplify unit tests, don't require them to start the logger

+ unrelated TODOs
2025-05-06 00:08:38 -03:00
Amyspark
07967927dd
feat: Make bzip3 optout (#814)
Co-authored-by: João Marcos <marcospb19@hotmail.com>
2025-05-03 20:43:59 -03:00
tommady
1ff1932e3d
Merge folders in decompression (#798)
Signed-off-by: tommady <tommady@users.noreply.github.com>
2025-05-01 04:20:33 -03:00
Talison Fabio
c97bb6a2d6
feat: Add flag '--no-smart-unpack' to disable smart unpack (#809) 2025-04-28 11:03:50 -03:00
João Marcos
2b9da1e441 Bump version to 0.6.1 2025-04-20 23:05:08 -03:00
João Marcos
add1793d75 Update draft-release-automatic-trigger.yml 2025-04-20 17:19:32 -03:00
João Marcos
c3ff0e963f
Releases: restore previous directory structure (#805) 2025-04-20 13:12:34 -03:00
João Marcos
0b122fa05c
Fix .zip crash when file mode isn't present (#804) 2025-04-20 13:09:48 -03:00
24 changed files with 824 additions and 156 deletions

View File

@ -25,7 +25,7 @@ on:
type: boolean
required: true
artifact_upload_mode:
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar), or 'all' to upload all feature combinations."
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar+bzip3), or 'all' to upload all feature combinations."
type: string
required: true
@ -37,7 +37,10 @@ jobs:
strategy:
fail-fast: false
matrix:
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
# TODO: avoid exploding the matrix by removing unrar and bzip3 from the all combinations runs
# I can add a monthly run with all combinations
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
feature-use-zstd-thin: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
target:
@ -76,12 +79,14 @@ jobs:
- target: armv7-unknown-linux-musleabihf
use-cross: true
# features (unless `matrix_all_combinations` is true, we only run these on linux-gnu)
- feature-unrar: true
- feature-unrar: false
target: x86_64-unknown-linux-gnu
- feature-use-zlib: true
target: x86_64-unknown-linux-gnu
- feature-use-zstd-thin: true
target: x86_64-unknown-linux-gnu
- feature-bzip3: false
target: x86_64-unknown-linux-gnu
steps:
- name: Checkout
@ -105,6 +110,7 @@ jobs:
if [[ "${{ matrix.feature-unrar }}" == true ]]; then FEATURES+=(unrar); fi
if [[ "${{ matrix.feature-use-zlib }}" == true ]]; then FEATURES+=(use_zlib); fi
if [[ "${{ matrix.feature-use-zstd-thin }}" == true ]]; then FEATURES+=(use_zstd_thin); fi
if [[ "${{ matrix.feature-bzip3 }}" == true ]]; then FEATURES+=(bzip3); fi
# Output plus-separated list for artifact names
IFS='+'
echo "FEATURES_PLUS=${FEATURES[*]}" >> $GITHUB_OUTPUT
@ -127,7 +133,7 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}"
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}-${{ matrix.feature-bzip3 }}"
- name: Test on stable
# there's no way to run tests for ARM64 Windows for now
@ -146,7 +152,7 @@ jobs:
if: |
${{ inputs.artifact_upload_mode != 'none' &&
(inputs.artifact_upload_mode == 'all' ||
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin)) }}
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin && matrix.feature-bzip3)) }}
uses: actions/upload-artifact@v4
with:
name: ouch-${{ matrix.target }}${{ steps.concat-features.outputs.FEATURES_PLUS != '' && format('-{0}', steps.concat-features.outputs.FEATURES_PLUS) || '' }}

View File

@ -3,7 +3,7 @@ name: Automatic trigger draft release
on:
push:
tags:
- "[0-9]+.[0-9]+.[0-9]+"
- "[0-9]+.[0-9]+.[0-9]+-rc[0-9]+"
jobs:
call-workflow-build-artifacts-and-run-tests:

View File

@ -18,13 +18,30 @@ Categories Used:
**Bullet points in chronological order by PR**
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.0...HEAD)
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.1...HEAD)
### New Features
- Merge folders in decompression [\#798](https://github.com/ouch-org/ouch/pull/798) ([tommady](https://github.com/tommady))
- Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb))
### Improvements
- Give better error messages when archive extensions are invalid [\#817](https://github.com/ouch-org/ouch/pull/817) ([marcospb19](https://github.com/marcospb19))
### Bug Fixes
- Fix tar extraction count when --quiet [\#824](https://github.com/ouch-org/ouch/pull/824) ([marcospb19](https://github.com/marcospb19))
- Fix 7z BadSignature error when compressing and then listing [\#819](https://github.com/ouch-org/ouch/pull/819) ([tommady](https://github.com/tommady))
### Tweaks
- Make `.bz3` opt-out [\#814](https://github.com/ouch-org/ouch/pull/814) ([amyspark](https://github.com/amyspark))
## [0.6.1](https://github.com/ouch-org/ouch/compare/0.6.0...0.6.1)
- Fix .zip crash when file mode isn't present [\#804](https://github.com/ouch-org/ouch/pull/804) ([marcospb19](https://github.com/marcospb19))
## [0.6.0](https://github.com/ouch-org/ouch/compare/0.5.1...0.6.0)
### New Features
@ -51,7 +68,7 @@ Categories Used:
### Improvements
- Fix logging IO bottleneck [\#642](https://github.com/ouch-org/ouch/pull/642) ([AntoniosBarotsis](https://github.com/AntoniosBarotsis))
- Support decompression over stdin [\#692](https://github.com/ouch-org/ouch/pull/692) ([rcorre](https://github.com/rcorre))
- Support decompression over stdin [\#692](https://github.com/ouch-org/ouch/pull/692) ([rcorre](https://github.com/rcorre))
- Make `--format` more forgiving with the formatting of the provided format [\#519](https://github.com/ouch-org/ouch/pull/519) ([marcospb19](https://github.com/marcospb19))
- Use buffered writer for list output [\#764](https://github.com/ouch-org/ouch/pull/764) ([killercup](https://github.com/killercup))
- Disable smart unpack when `--dir` flag is provided in decompress command [\#782](https://github.com/ouch-org/ouch/pull/782) ([talis-fb](https://github.com/talis-fb))

37
Cargo.lock generated
View File

@ -519,6 +519,12 @@ dependencies = [
"powerfmt",
]
[[package]]
name = "diff"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "difflib"
version = "0.4.0"
@ -818,6 +824,15 @@ version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itertools"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
dependencies = [
"either",
]
[[package]]
name = "jiff"
version = "0.2.8"
@ -1066,7 +1081,7 @@ checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e"
[[package]]
name = "ouch"
version = "0.6.0"
version = "0.6.1"
dependencies = [
"assert_cmd",
"atty",
@ -1081,17 +1096,21 @@ dependencies = [
"filetime_creation",
"flate2",
"fs-err",
"glob",
"gzp",
"ignore",
"infer",
"insta",
"is_executable",
"itertools",
"libc",
"linked-hash-map",
"lz4_flex",
"memchr",
"num_cpus",
"once_cell",
"parse-display",
"pretty_assertions",
"proptest",
"rand 0.8.5",
"rayon",
@ -1246,6 +1265,16 @@ dependencies = [
"termtree",
]
[[package]]
name = "pretty_assertions"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d"
dependencies = [
"diff",
"yansi",
]
[[package]]
name = "proc-macro2"
version = "1.0.93"
@ -2047,6 +2076,12 @@ dependencies = [
"lzma-sys",
]
[[package]]
name = "yansi"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "zerocopy"
version = "0.7.35"

View File

@ -1,6 +1,6 @@
[package]
name = "ouch"
version = "0.6.0"
version = "0.6.1"
authors = [
"João Marcos <marcospb19@hotmail.com>",
"Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>",
@ -19,7 +19,7 @@ brotli = "7.0.0"
bstr = { version = "1.10.0", default-features = false, features = ["std"] }
bytesize = "1.3.0"
bzip2 = "0.4.4"
bzip3 = { version = "0.9.0", features = ["bundled"] }
bzip3 = { version = "0.9.0", features = ["bundled"], optional = true }
clap = { version = "4.5.20", features = ["derive", "env"] }
filetime_creation = "0.2"
flate2 = { version = "1.0.30", default-features = false }
@ -58,9 +58,13 @@ clap_mangen = "0.2.24"
[dev-dependencies]
assert_cmd = "2.0.14"
glob = "0.3.2"
infer = "0.16.0"
insta = { version = "1.40.0", features = ["filters"] }
itertools = "0.14.0"
memchr = "2.7.4"
parse-display = "0.9.1"
pretty_assertions = "1.4.1"
proptest = "1.5.0"
rand = { version = "0.8.5", default-features = false, features = [
"small_rng",
@ -70,7 +74,7 @@ regex = "1.10.4"
test-strategy = "0.4.0"
[features]
default = ["unrar", "use_zlib", "use_zstd_thin"]
default = ["unrar", "use_zlib", "use_zstd_thin", "bzip3"]
use_zlib = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib"]
use_zstd_thin = ["zstd/thin"]
allow_piped_choice = []
@ -88,4 +92,5 @@ inherits = "release"
lto = false
opt-level = 2
incremental = true
codegen-units = 16
codegen-units = 32
strip = false

View File

@ -8,7 +8,7 @@ cd downloaded_artifacts
echo "entered 'downloaded_artifacts/'"
ls -lA -w 1
TARGETS=(
PLATFORMS=(
"aarch64-pc-windows-msvc"
"aarch64-unknown-linux-gnu"
"aarch64-unknown-linux-musl"
@ -20,42 +20,40 @@ TARGETS=(
"x86_64-unknown-linux-gnu"
"x86_64-unknown-linux-musl"
)
# Temporary, we'll remove allow_piped_choice later
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin"
# TODO: remove allow_piped_choice later
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin+bzip3"
for target in "${TARGETS[@]}"; do
input_dir="ouch-${target}-${DEFAULT_FEATURES}"
for platform in "${PLATFORMS[@]}"; do
path="ouch-${platform}"
echo "Processing $path"
if [ ! -d "$input_dir" ]; then
echo "ERROR: Could not find artifact directory for $target with default features ($input_dir)"
if [ ! -d "${path}-${DEFAULT_FEATURES}" ]; then
echo "ERROR: Could not find artifact directory for $platform with default features ($path)"
exit 1
fi
mv "${path}-${DEFAULT_FEATURES}" "$path" # remove the annoying suffix
echo "Processing $input_dir"
cp ../{README.md,LICENSE,CHANGELOG.md} "$path"
mkdir -p "$path/man"
mkdir -p "$path/completions"
cp ../{README.md,LICENSE,CHANGELOG.md} "$input_dir"
mkdir -p "$input_dir/man"
mkdir -p "$input_dir/completions"
mv "$path"/man-page-and-completions-artifacts/*.1 "$path/man"
mv "$path"/man-page-and-completions-artifacts/* "$path/completions"
rm -r "$path/man-page-and-completions-artifacts"
mv "$input_dir"/man-page-and-completions-artifacts/*.1 "$input_dir/man"
mv "$input_dir"/man-page-and-completions-artifacts/* "$input_dir/completions"
rm -r "$input_dir/man-page-and-completions-artifacts"
if [[ "$platform" == *"-windows-"* ]]; then
mv "$path/target/$platform/release/ouch.exe" "$path"
rm -rf "$path/target"
output_name="ouch-${target}"
if [[ "$target" == *"-windows-"* ]]; then
mv "$input_dir/target/$target/release/ouch.exe" "$input_dir"
rm -rf "$input_dir/target"
zip -r "../output_assets/${output_name}.zip" "$input_dir"
echo "Created output_assets/${output_name}.zip"
zip -r "../output_assets/${path}.zip" "$path"
echo "Created output_assets/${path}.zip"
else
mv "$input_dir/target/$target/release/ouch" "$input_dir"
rm -rf "$input_dir/target"
chmod +x "$input_dir/ouch"
mv "$path/target/$platform/release/ouch" "$path"
rm -rf "$path/target"
chmod +x "$path/ouch"
tar czf "../output_assets/${output_name}.tar.gz" "$input_dir"
echo "Created output_assets/${output_name}.tar.gz"
tar czf "../output_assets/${path}.tar.gz" "$path"
echo "Created output_assets/${path}.tar.gz"
fi
done

View File

@ -0,0 +1,7 @@
use crate::Error;
pub fn no_support() -> Error {
Error::UnsupportedFormat {
reason: "BZip3 support is disabled for this build, possibly due to missing bindgen-cli dependency.".into(),
}
}

View File

@ -1,5 +1,7 @@
//! Archive compression algorithms
#[cfg(not(feature = "bzip3"))]
pub mod bzip3_stub;
#[cfg(feature = "unrar")]
pub mod rar;
#[cfg(not(feature = "unrar"))]

View File

@ -18,8 +18,6 @@ pub fn unpack_archive(
password: Option<&[u8]>,
quiet: bool,
) -> crate::Result<usize> {
assert!(output_folder.read_dir().expect("dir exists").next().is_none());
let archive = match password {
Some(password) => Archive::with_password(archive_path, password),
None => Archive::new(archive_path),

View File

@ -171,12 +171,10 @@ where
}
/// List contents of `archive_path`, returning a vector of archive entries
pub fn list_archive(
archive_path: &Path,
password: Option<&[u8]>,
) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>> {
let reader = fs::File::open(archive_path)?;
pub fn list_archive<R>(reader: R, password: Option<&[u8]>) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>>
where
R: Read + Seek,
{
let mut files = Vec::new();
let entry_extract_fn = |entry: &SevenZArchiveEntry, _: &mut dyn Read, _: &PathBuf| {

View File

@ -24,7 +24,6 @@ use crate::{
/// Unpacks the archive given by `archive` into the folder given by `into`.
/// Assumes that output_folder is empty
pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool) -> crate::Result<usize> {
assert!(output_folder.read_dir().expect("dir exists").next().is_none());
let mut archive = tar::Archive::new(reader);
let mut files_unpacked = 0;
@ -60,9 +59,8 @@ pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool)
Bytes::new(file.size()),
utils::strip_cur_dir(&output_folder.join(file.path()?)),
));
files_unpacked += 1;
}
files_unpacked += 1;
}
Ok(files_unpacked)

View File

@ -37,8 +37,6 @@ pub fn unpack_archive<R>(
where
R: Read + Seek,
{
assert!(output_folder.read_dir().expect("dir exists").next().is_none());
let mut unpacked_files = 0;
for idx in 0..archive.len() {
@ -85,10 +83,8 @@ where
));
}
let mode = file.unix_mode().ok_or_else(|| {
std::io::Error::new(std::io::ErrorKind::InvalidData, "Cannot extract file's mode")
})?;
let is_symlink = (mode & 0o170000) == 0o120000;
let mode = file.unix_mode();
let is_symlink = mode.is_some_and(|mode| mode & 0o170000 == 0o120000);
if is_symlink {
let mut target = String::new();

View File

@ -100,6 +100,10 @@ pub enum Subcommand {
/// Remove the source file after successful decompression
#[arg(short = 'r', long)]
remove: bool,
/// Disable Smart Unpack
#[arg(long)]
no_smart_unpack: bool,
},
/// List contents of an archive
#[command(visible_aliases = ["l", "ls"])]
@ -156,6 +160,7 @@ mod tests {
files: vec!["\x00\x11\x22".into()],
output_dir: None,
remove: false,
no_smart_unpack: false,
},
}
}
@ -169,6 +174,7 @@ mod tests {
files: to_paths(["file.tar.gz"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
@ -180,6 +186,7 @@ mod tests {
files: to_paths(["file.tar.gz"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
@ -191,6 +198,7 @@ mod tests {
files: to_paths(["a", "b", "c"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}

View File

@ -57,10 +57,16 @@ pub fn compress_files(
encoder,
level.map_or_else(Default::default, |l| bzip2::Compression::new((l as u32).clamp(1, 9))),
)),
Bzip3 => Box::new(
// Use block size of 16 MiB
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(
// Use block size of 16 MiB
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
)
}
Lz4 => Box::new(lz4_flex::frame::FrameEncoder::new(encoder).auto_finish()),
Lzma => Box::new(xz2::write::XzEncoder::new(
encoder,

View File

@ -6,6 +6,8 @@ use std::{
use fs_err as fs;
#[cfg(not(feature = "bzip3"))]
use crate::archive;
use crate::{
commands::{warn_user_about_loading_sevenz_in_memory, warn_user_about_loading_zip_in_memory},
extension::{
@ -32,6 +34,7 @@ pub struct DecompressOptions<'a> {
pub output_dir: &'a Path,
pub output_file_path: PathBuf,
pub is_output_dir_provided: bool,
pub is_smart_unpack: bool,
pub question_policy: QuestionPolicy,
pub quiet: bool,
pub password: Option<&'a [u8]>,
@ -75,6 +78,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -116,13 +120,19 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
let decoder: Box<dyn Read> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
Bzip3 => Box::new(bzip3::read::Bz3Decoder::new(decoder)?),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(bzip3::read::Bz3Decoder::new(decoder)?)
}
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!(),
Tar | Zip | Rar | SevenZip => decoder,
};
Ok(decoder)
};
@ -137,7 +147,11 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
reader = chain_reader_decoder(&first_extension, reader)?;
let mut writer = match utils::ask_to_create_file(&options.output_file_path, options.question_policy)? {
let mut writer = match utils::ask_to_create_file(
&options.output_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
Some(file) => file,
None => return Ok(()),
};
@ -153,6 +167,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -187,6 +202,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -219,6 +235,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -261,6 +278,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -296,12 +314,19 @@ fn execute_decompression(
output_file_path: &Path,
question_policy: QuestionPolicy,
is_output_dir_provided: bool,
is_smart_unpack: bool,
) -> crate::Result<ControlFlow<(), usize>> {
if is_output_dir_provided {
unpack(unpack_fn, output_dir, question_policy)
} else {
smart_unpack(unpack_fn, output_dir, output_file_path, question_policy)
if is_smart_unpack {
return smart_unpack(unpack_fn, output_dir, output_file_path, question_policy);
}
let target_output_dir = if is_output_dir_provided {
output_dir
} else {
output_file_path
};
unpack(unpack_fn, target_output_dir, question_policy)
}
/// Unpacks an archive creating the output directory, this function will create the output_dir
@ -318,7 +343,7 @@ fn unpack(
let output_dir_cleaned = if is_valid_output_dir {
output_dir.to_owned()
} else {
match utils::resolve_path_conflict(output_dir, question_policy)? {
match utils::resolve_path_conflict(output_dir, question_policy, QuestionAction::Decompression)? {
Some(path) => path,
None => return Ok(ControlFlow::Break(())),
}
@ -374,7 +399,7 @@ fn smart_unpack(
// Before moving, need to check if a file with the same name already exists
// If it does, need to ask the user what to do
new_path = match utils::resolve_path_conflict(&new_path, question_policy)? {
new_path = match utils::resolve_path_conflict(&new_path, question_policy, QuestionAction::Decompression)? {
Some(path) => path,
None => return Ok(ControlFlow::Break(())),
};

View File

@ -6,7 +6,7 @@ use std::{
use fs_err as fs;
use crate::{
archive::sevenz,
archive,
commands::warn_user_about_loading_zip_in_memory,
extension::CompressionFormat::{self, *},
list::{self, FileInArchive, ListOptions},
@ -36,7 +36,6 @@ pub fn list_archive_contents(
let zip_archive = zip::ZipArchive::new(reader)?;
let files = crate::archive::zip::list_archive(zip_archive, password);
list::list_files(archive_path, files, list_options)?;
return Ok(());
}
@ -46,26 +45,38 @@ pub fn list_archive_contents(
// Grab previous decoder and wrap it inside of a new one
let chain_reader_decoder =
|format: &CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
|format: CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
let decoder: Box<dyn Read + Send> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
Bzip3 => Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap()),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap())
}
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!(),
Tar | Zip | Rar | SevenZip => unreachable!("should be treated by caller"),
};
Ok(decoder)
};
for format in formats.iter().skip(1).rev() {
let mut misplaced_archive_format = None;
for &format in formats.iter().skip(1).rev() {
if format.archive_format() {
misplaced_archive_format = Some(format);
break;
}
reader = chain_reader_decoder(format, reader)?;
}
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match formats[0] {
let archive_format = misplaced_archive_format.unwrap_or(formats[0]);
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match archive_format {
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
Zip => {
if formats.len() > 1 {
@ -111,10 +122,13 @@ pub fn list_archive_contents(
}
}
Box::new(sevenz::list_archive(archive_path, password)?)
let mut vec = vec![];
io::copy(&mut reader, &mut vec)?;
Box::new(archive::sevenz::list_archive(io::Cursor::new(vec), password)?)
}
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
panic!("Not an archive! This should never happen, if it does, something is wrong with `CompressionFormat::is_archive()`. Please report this error!");
unreachable!("Not an archive, should be validated before calling this function.");
}
};

View File

@ -20,6 +20,7 @@ use crate::{
list::ListOptions,
utils::{
self, colors::*, is_path_stdin, logger::info_accessible, path_to_str, EscapedPathDisplay, FileVisibilityPolicy,
QuestionAction,
},
CliArgs, QuestionPolicy,
};
@ -80,7 +81,7 @@ pub fn run(
let parsed_formats = parse_format_flag(&formats)?;
(Some(formats), parsed_formats)
}
None => (None, extension::extensions_from_path(&output_path)),
None => (None, extension::extensions_from_path(&output_path)?),
};
check::check_invalid_compression_with_non_archive_format(
@ -91,10 +92,11 @@ pub fn run(
)?;
check::check_archive_formats_position(&formats, &output_path)?;
let output_file = match utils::ask_to_create_file(&output_path, question_policy)? {
Some(writer) => writer,
None => return Ok(()),
};
let output_file =
match utils::ask_to_create_file(&output_path, question_policy, QuestionAction::Compression)? {
Some(writer) => writer,
None => return Ok(()),
};
let level = if fast {
Some(1) // Lowest level of compression
@ -148,6 +150,7 @@ pub fn run(
files,
output_dir,
remove,
no_smart_unpack,
} => {
let mut output_paths = vec![];
let mut formats = vec![];
@ -155,6 +158,7 @@ pub fn run(
if let Some(format) = args.format {
let format = parse_format_flag(&format)?;
for path in files.iter() {
// TODO: use Error::Custom
let file_name = path.file_name().ok_or_else(|| Error::NotFound {
error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)),
})?;
@ -163,7 +167,7 @@ pub fn run(
}
} else {
for path in files.iter() {
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path);
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path)?;
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
return Ok(());
@ -176,9 +180,11 @@ pub fn run(
check::check_missing_formats_when_decompressing(&files, &formats)?;
let is_output_dir_provided = output_dir.is_some();
let is_smart_unpack = !is_output_dir_provided && !no_smart_unpack;
// The directory that will contain the output files
// We default to the current directory if the user didn't specify an output directory with --dir
let is_output_dir_provided = output_dir.is_some();
let output_dir = if let Some(dir) = output_dir {
utils::create_dir_if_non_existent(&dir)?;
dir
@ -200,9 +206,10 @@ pub fn run(
decompress_file(DecompressOptions {
input_file_path: input_path,
formats,
is_output_dir_provided,
output_dir: &output_dir,
output_file_path,
is_output_dir_provided,
is_smart_unpack,
question_policy,
quiet: args.quiet,
password: args.password.as_deref().map(|str| {
@ -222,7 +229,7 @@ pub fn run(
}
} else {
for path in files.iter() {
let mut file_formats = extension::extensions_from_path(path);
let mut file_formats = extension::extensions_from_path(path)?;
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
return Ok(());

View File

@ -200,6 +200,7 @@ impl From<std::io::Error> for Error {
}
}
#[cfg(feature = "bzip3")]
impl From<bzip3::Error> for Error {
fn from(err: bzip3::Error) -> Self {
use bzip3::Error as Bz3Error;

View File

@ -5,7 +5,10 @@ use std::{ffi::OsStr, fmt, path::Path};
use bstr::ByteSlice;
use CompressionFormat::*;
use crate::{error::Error, utils::logger::warning};
use crate::{
error::{Error, FinalError, Result},
utils::logger::warning,
};
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
"tar",
@ -60,8 +63,8 @@ impl Extension {
/// Checks if the first format in `compression_formats` is an archive
pub fn is_archive(&self) -> bool {
// Safety: we check that `compression_formats` is not empty in `Self::new`
self.compression_formats[0].is_archive_format()
// Index Safety: we check that `compression_formats` is not empty in `Self::new`
self.compression_formats[0].archive_format()
}
}
@ -103,7 +106,7 @@ pub enum CompressionFormat {
impl CompressionFormat {
/// Currently supported archive formats are .tar (and aliases to it) and .zip
fn is_archive_format(&self) -> bool {
pub fn archive_format(&self) -> bool {
// Keep this match like that without a wildcard `_` so we don't forget to update it
match self {
Tar | Zip | Rar | SevenZip => true,
@ -147,14 +150,13 @@ fn to_extension(ext: &[u8]) -> Option<Extension> {
))
}
fn split_extension(name: &mut &[u8]) -> Option<Extension> {
fn split_extension_at_end(name: &[u8]) -> Option<(&[u8], Extension)> {
let (new_name, ext) = name.rsplit_once_str(b".")?;
if matches!(new_name, b"" | b"." | b"..") {
return None;
}
let ext = to_extension(ext)?;
*name = new_name;
Some(ext)
Some((new_name, ext))
}
pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
@ -188,17 +190,40 @@ pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
/// Extracts extensions from a path.
///
/// Returns both the remaining path and the list of extension objects
pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension>) {
/// Returns both the remaining path and the list of extension objects.
pub fn separate_known_extensions_from_name(path: &Path) -> Result<(&Path, Vec<Extension>)> {
let mut extensions = vec![];
let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else {
return (path, extensions);
return Ok((path, extensions));
};
// While there is known extensions at the tail, grab them
while let Some(extension) = split_extension(&mut name) {
while let Some((new_name, extension)) = split_extension_at_end(name) {
name = new_name;
extensions.insert(0, extension);
if extensions[0].is_archive() {
if let Some((_, misplaced_extension)) = split_extension_at_end(name) {
let mut error = FinalError::with_title("File extensions are invalid for operation").detail(format!(
"The archive extension '.{}' can only be placed at the start of the extension list",
extensions[0].display_text,
));
if misplaced_extension.compression_formats == extensions[0].compression_formats {
error = error.detail(format!(
"File: '{path:?}' contains '.{}' and '.{}'",
misplaced_extension.display_text, extensions[0].display_text,
));
}
return Err(error
.hint("You can use `--format` to specify what format to use, examples:")
.hint(" ouch compress file.zip.zip file --format zip")
.hint(" ouch decompress file --format zst")
.hint(" ouch list archive --format tar.gz")
.into());
}
break;
}
}
if let Ok(name) = name.to_str() {
@ -210,13 +235,12 @@ pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension
}
}
(name.to_path().unwrap(), extensions)
Ok((name.to_path().unwrap(), extensions))
}
/// Extracts extensions from a path, return only the list of extension objects
pub fn extensions_from_path(path: &Path) -> Vec<Extension> {
let (_, extensions) = separate_known_extensions_from_name(path);
extensions
pub fn extensions_from_path(path: &Path) -> Result<Vec<Extension>> {
separate_known_extensions_from_name(path).map(|(_, extensions)| extensions)
}
/// Panics if formats has an empty list of compression formats
@ -271,14 +295,13 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::logger::spawn_logger_thread;
#[test]
fn test_extensions_from_path() {
let path = Path::new("bolovo.tar.gz");
let extensions: Vec<Extension> = extensions_from_path(path);
let formats: Vec<CompressionFormat> = flatten_compression_formats(&extensions);
let extensions = extensions_from_path(path).unwrap();
let formats = flatten_compression_formats(&extensions);
assert_eq!(formats, vec![Tar, Gzip]);
}
@ -286,32 +309,31 @@ mod tests {
#[test]
/// Test extension parsing for input/output files
fn test_separate_known_extensions_from_name() {
let _handler = spawn_logger_thread();
assert_eq!(
separate_known_extensions_from_name("file".as_ref()),
separate_known_extensions_from_name("file".as_ref()).unwrap(),
("file".as_ref(), vec![])
);
assert_eq!(
separate_known_extensions_from_name("tar".as_ref()),
separate_known_extensions_from_name("tar".as_ref()).unwrap(),
("tar".as_ref(), vec![])
);
assert_eq!(
separate_known_extensions_from_name(".tar".as_ref()),
separate_known_extensions_from_name(".tar".as_ref()).unwrap(),
(".tar".as_ref(), vec![])
);
assert_eq!(
separate_known_extensions_from_name("file.tar".as_ref()),
separate_known_extensions_from_name("file.tar".as_ref()).unwrap(),
("file".as_ref(), vec![Extension::new(&[Tar], "tar")])
);
assert_eq!(
separate_known_extensions_from_name("file.tar.gz".as_ref()),
separate_known_extensions_from_name("file.tar.gz".as_ref()).unwrap(),
(
"file".as_ref(),
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
)
);
assert_eq!(
separate_known_extensions_from_name(".tar.gz".as_ref()),
separate_known_extensions_from_name(".tar.gz".as_ref()).unwrap(),
(".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")])
);
}
@ -367,4 +389,10 @@ mod tests {
"linux.pkg.info.tar.zst"
);
}
#[test]
fn test_extension_parsing_with_multiple_archive_formats() {
assert!(separate_known_extensions_from_name("file.tar.zip".as_ref()).is_err());
assert!(separate_known_extensions_from_name("file.7z.zst.zip.lz4".as_ref()).is_err());
}
}

View File

@ -11,7 +11,7 @@ use fs_err as fs;
use super::{question::FileConflitOperation, user_wants_to_overwrite};
use crate::{
extension::Extension,
utils::{logger::info_accessible, EscapedPathDisplay},
utils::{logger::info_accessible, EscapedPathDisplay, QuestionAction},
QuestionPolicy,
};
@ -26,9 +26,13 @@ pub fn is_path_stdin(path: &Path) -> bool {
/// * `Ok(None)` means the user wants to cancel the operation
/// * `Ok(Some(path))` returns a valid PathBuf without any another file or directory with the same name
/// * `Err(_)` is an error
pub fn resolve_path_conflict(path: &Path, question_policy: QuestionPolicy) -> crate::Result<Option<PathBuf>> {
pub fn resolve_path_conflict(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> crate::Result<Option<PathBuf>> {
if path.exists() {
match user_wants_to_overwrite(path, question_policy)? {
match user_wants_to_overwrite(path, question_policy, question_action)? {
FileConflitOperation::Cancel => Ok(None),
FileConflitOperation::Overwrite => {
remove_file_or_dir(path)?;
@ -38,6 +42,7 @@ pub fn resolve_path_conflict(path: &Path, question_policy: QuestionPolicy) -> cr
let renamed_path = rename_for_available_filename(path);
Ok(Some(renamed_path))
}
FileConflitOperation::Merge => Ok(Some(path.to_path_buf())),
}
} else {
Ok(Some(path.to_path_buf()))
@ -128,7 +133,7 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
buf.starts_with(&[0x42, 0x5A, 0x68])
}
fn is_bz3(buf: &[u8]) -> bool {
buf.starts_with(bzip3::MAGIC_NUMBER)
buf.starts_with(b"BZ3v1")
}
fn is_xz(buf: &[u8]) -> bool {
buf.starts_with(&[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])

View File

@ -125,10 +125,14 @@ mod logger_thread {
static SENDER: OnceLock<LogSender> = OnceLock::new();
#[track_caller]
fn setup_channel() -> LogReceiver {
let (tx, rx) = mpsc::channel();
SENDER.set(tx).expect("`setup_channel` should only be called once");
rx
fn setup_channel() -> Option<LogReceiver> {
let mut optional = None;
SENDER.get_or_init(|| {
let (tx, rx) = mpsc::channel();
optional = Some(rx);
tx
});
optional
}
#[track_caller]
@ -138,6 +142,9 @@ mod logger_thread {
#[track_caller]
pub(super) fn send_print_command(msg: PrintMessage) {
if cfg!(test) {
spawn_logger_thread();
}
get_sender()
.send(LoggerCommand::Print(msg))
.expect("Failed to send print command");
@ -170,8 +177,9 @@ mod logger_thread {
}
pub fn spawn_logger_thread() {
let log_receiver = setup_channel();
thread::spawn(move || run_logger(log_receiver));
if let Some(log_receiver) = setup_channel() {
thread::spawn(move || run_logger(log_receiver));
}
}
fn run_logger(log_receiver: LogReceiver) {

View File

@ -48,49 +48,71 @@ pub enum FileConflitOperation {
/// Rename the file
/// It'll be put "_1" at the end of the filename or "_2","_3","_4".. if already exists
Rename,
/// Merge conflicting folders
Merge,
}
/// Check if QuestionPolicy flags were set, otherwise, ask user if they want to overwrite.
pub fn user_wants_to_overwrite(path: &Path, question_policy: QuestionPolicy) -> crate::Result<FileConflitOperation> {
pub fn user_wants_to_overwrite(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> crate::Result<FileConflitOperation> {
use FileConflitOperation as Op;
match question_policy {
QuestionPolicy::AlwaysYes => Ok(Op::Overwrite),
QuestionPolicy::AlwaysNo => Ok(Op::Cancel),
QuestionPolicy::Ask => ask_file_conflict_operation(path),
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action),
}
}
/// Ask the user if they want to overwrite or rename the &Path
pub fn ask_file_conflict_operation(path: &Path) -> Result<FileConflitOperation> {
pub fn ask_file_conflict_operation(path: &Path, question_action: QuestionAction) -> Result<FileConflitOperation> {
use FileConflitOperation as Op;
let path = path_to_str(strip_cur_dir(path));
ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
],
)
.ask()
match question_action {
QuestionAction::Compression => ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
],
)
.ask(),
QuestionAction::Decompression => ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
("merge", Op::Merge, *colors::ORANGE),
],
)
.ask(),
}
}
/// Create the file if it doesn't exist and if it does then ask to overwrite it.
/// If the user doesn't want to overwrite then we return [`Ok(None)`]
pub fn ask_to_create_file(path: &Path, question_policy: QuestionPolicy) -> Result<Option<fs::File>> {
pub fn ask_to_create_file(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> Result<Option<fs::File>> {
match fs::OpenOptions::new().write(true).create_new(true).open(path) {
Ok(w) => Ok(Some(w)),
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {
let action = match question_policy {
QuestionPolicy::AlwaysYes => FileConflitOperation::Overwrite,
QuestionPolicy::AlwaysNo => FileConflitOperation::Cancel,
QuestionPolicy::Ask => ask_file_conflict_operation(path)?,
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action)?,
};
match action {
FileConflitOperation::Merge => Ok(Some(fs::File::create(path)?)),
FileConflitOperation::Overwrite => {
utils::remove_file_or_dir(path)?;
Ok(Some(fs::File::create(path)?))

View File

@ -7,8 +7,12 @@ use std::{
path::{Path, PathBuf},
};
use bstr::ByteSlice;
use fs_err as fs;
use itertools::Itertools;
use memchr::memmem;
use parse_display::Display;
use pretty_assertions::assert_eq;
use proptest::sample::size_range;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use tempfile::tempdir;
@ -17,7 +21,7 @@ use test_strategy::{proptest, Arbitrary};
use crate::utils::{assert_same_directory, write_random_content};
/// tar and zip extensions
#[derive(Arbitrary, Debug, Display)]
#[derive(Arbitrary, Clone, Copy, Debug, Display)]
#[display(style = "lowercase")]
enum DirectoryExtension {
#[display("7z")]
@ -25,6 +29,7 @@ enum DirectoryExtension {
Tar,
Tbz,
Tbz2,
#[cfg(feature = "bzip3")]
Tbz3,
Tgz,
Tlz4,
@ -41,6 +46,7 @@ enum DirectoryExtension {
enum FileExtension {
Bz,
Bz2,
#[cfg(feature = "bzip3")]
Bz3,
Gz,
Lz4,
@ -59,9 +65,9 @@ enum Extension {
}
/// Converts a list of extension structs to string
fn merge_extensions(ext: impl ToString, exts: Vec<FileExtension>) -> String {
fn merge_extensions(ext: impl ToString, exts: &[FileExtension]) -> String {
once(ext.to_string())
.chain(exts.into_iter().map(|x| x.to_string()))
.chain(exts.iter().map(|x| x.to_string()))
.collect::<Vec<_>>()
.join(".")
}
@ -89,6 +95,7 @@ fn create_random_files(dir: impl Into<PathBuf>, depth: u8, rng: &mut SmallRng) {
}
/// Create n random files on directory dir
#[cfg_attr(not(feature = "allow_piped_choice"), allow(dead_code))]
fn create_n_random_files(n: usize, dir: impl Into<PathBuf>, rng: &mut SmallRng) {
let dir: &PathBuf = &dir.into();
@ -114,7 +121,7 @@ fn single_empty_file(ext: Extension, #[any(size_range(0..8).lift())] exts: Vec<F
let before = &dir.join("before");
fs::create_dir(before).unwrap();
let before_file = &before.join("file");
let archive = &dir.join(format!("file.{}", merge_extensions(ext, exts)));
let archive = &dir.join(format!("file.{}", merge_extensions(ext, &exts)));
let after = &dir.join("after");
fs::write(before_file, []).unwrap();
ouch!("-A", "c", before_file, archive);
@ -137,7 +144,7 @@ fn single_file(
let before = &dir.join("before");
fs::create_dir(before).unwrap();
let before_file = &before.join("file");
let archive = &dir.join(format!("file.{}", merge_extensions(ext, exts)));
let archive = &dir.join(format!("file.{}", merge_extensions(ext, &exts)));
let after = &dir.join("after");
write_random_content(
&mut fs::File::create(before_file).unwrap(),
@ -167,7 +174,7 @@ fn single_file_stdin(
let before = &dir.join("before");
fs::create_dir(before).unwrap();
let before_file = &before.join("file");
let format = merge_extensions(&ext, exts);
let format = merge_extensions(&ext, &exts);
let archive = &dir.join(format!("file.{}", format));
let after = &dir.join("after");
write_random_content(
@ -208,7 +215,7 @@ fn multiple_files(
let before = &dir.join("before");
let before_dir = &before.join("dir");
fs::create_dir_all(before_dir).unwrap();
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let after = &dir.join("after");
create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
ouch!("-A", "c", before_dir, archive);
@ -235,7 +242,7 @@ fn multiple_files_with_conflict_and_choice_to_overwrite(
fs::create_dir_all(after_dir).unwrap();
create_random_files(after_dir, depth, &mut SmallRng::from_entropy());
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
ouch!("-A", "c", before_dir, archive);
crate::utils::cargo_bin()
@ -276,7 +283,7 @@ fn multiple_files_with_conflict_and_choice_to_not_overwrite(
fs::write(after_dir.join("something.txt"), "Some content").unwrap();
fs::copy(after_dir.join("something.txt"), after_backup_dir.join("something.txt")).unwrap();
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
ouch!("-A", "c", before_dir, archive);
crate::utils::cargo_bin()
@ -309,7 +316,7 @@ fn multiple_files_with_conflict_and_choice_to_rename(
fs::create_dir_all(&dest_files_path).unwrap();
create_n_random_files(5, &dest_files_path, &mut SmallRng::from_entropy());
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
ouch!("-A", "c", &src_files_path, archive);
let dest_files_path_renamed = &root_path.join("dest_files_1");
@ -349,7 +356,7 @@ fn multiple_files_with_conflict_and_choice_to_rename_with_already_a_renamed(
fs::create_dir_all(&dest_files_path_1).unwrap();
create_n_random_files(5, &dest_files_path_1, &mut SmallRng::from_entropy());
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
ouch!("-A", "c", &src_files_path, archive);
let dest_files_path_renamed = &root_path.join("dest_files_2");
@ -367,6 +374,201 @@ fn multiple_files_with_conflict_and_choice_to_rename_with_already_a_renamed(
assert_same_directory(src_files_path, dest_files_path_renamed.join("src_files"), false);
}
#[proptest(cases = 25)]
fn smart_unpack_with_single_file(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
let files_path = ["file1.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.inspect(|path| {
let mut file = fs::File::create(path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
})
.collect::<Vec<_>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args(files_path)
.arg(archive)
.assert()
.success();
let output_file = root_path.join("file1.txt");
assert!(!output_file.exists());
// Decompress the archive with Smart Unpack
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg(archive)
.assert()
.success();
assert!(output_file.exists());
let output_content = fs::read_to_string(&output_file).unwrap();
assert_eq!(output_content, "Some content");
}
#[proptest(cases = 25)]
fn smart_unpack_with_multiple_files(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.for_each(|path| {
let mut file = fs::File::create(&path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
});
let input_files = src_files_path
.read_dir()
.unwrap()
.map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let output_path = root_path.join("archive");
assert!(!output_path.exists());
crate::utils::cargo_bin()
.arg("compress")
.args(input_files)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg(archive)
.assert()
.success();
assert!(output_path.exists(), "Output directory does not exist");
assert_same_directory(src_files_path, output_path, false);
}
#[proptest(cases = 25)]
fn no_smart_unpack_with_single_file(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
["file1.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.for_each(|path| {
let mut file = fs::File::create(&path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
});
let input_files = src_files_path
.read_dir()
.unwrap()
.map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let output_path = root_path.join("archive");
assert!(!output_path.exists());
crate::utils::cargo_bin()
.arg("compress")
.args(input_files)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg("--no-smart-unpack")
.arg(archive)
.assert()
.success();
assert!(output_path.exists(), "Output directory does not exist");
assert_same_directory(src_files_path, output_path, false);
}
#[proptest(cases = 25)]
fn no_smart_unpack_with_multiple_files(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.for_each(|path| {
let mut file = fs::File::create(&path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
});
let input_files = src_files_path
.read_dir()
.unwrap()
.map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
let output_path = root_path.join("archive");
assert!(!output_path.exists());
crate::utils::cargo_bin()
.arg("compress")
.args(input_files)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg("--no-smart-unpack")
.arg(archive)
.assert()
.success();
assert!(output_path.exists(), "Output directory does not exist");
assert_same_directory(src_files_path, output_path, false);
}
#[proptest(cases = 25)]
fn multiple_files_with_disabled_smart_unpack_by_dir(
ext: DirectoryExtension,
@ -390,7 +592,7 @@ fn multiple_files_with_disabled_smart_unpack_by_dir(
let dest_files_path = root_path.join("dest_files");
fs::create_dir_all(&dest_files_path).unwrap();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
@ -490,10 +692,9 @@ fn symlink_pack_and_unpack(
let mut files_path = ["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.map(|path| {
let mut file = fs::File::create(&path).unwrap();
.inspect(|path| {
let mut file = fs::File::create(path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
path
})
.collect::<Vec<_>>();
@ -508,7 +709,7 @@ fn symlink_pack_and_unpack(
files_path.push(symlink_path);
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
@ -604,3 +805,287 @@ fn no_git_folder_after_decompression_with_gitignore_flag_active() {
".git folder should not exist after decompression"
);
}
#[cfg(feature = "allow_piped_choice")]
#[proptest(cases = 25)]
fn unpack_multiple_sources_into_the_same_destination_with_merge(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir()?;
let root_path = temp_dir.path();
let source_path = root_path
.join(format!("example_{}", merge_extensions(&ext, &extra_extensions)))
.join("sub_a")
.join("sub_b")
.join("sub_c");
fs::create_dir_all(&source_path)?;
let archive = root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args([
fs::File::create(source_path.join("file1.txt"))?.path(),
fs::File::create(source_path.join("file2.txt"))?.path(),
fs::File::create(source_path.join("file3.txt"))?.path(),
])
.arg(&archive)
.assert()
.success();
fs::remove_dir_all(&source_path)?;
fs::create_dir_all(&source_path)?;
let archive1 = root_path.join(format!("archive1.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args([
fs::File::create(source_path.join("file3.txt"))?.path(),
fs::File::create(source_path.join("file4.txt"))?.path(),
fs::File::create(source_path.join("file5.txt"))?.path(),
])
.arg(&archive1)
.assert()
.success();
let out_path = root_path.join(format!("out_{}", merge_extensions(&ext, &extra_extensions)));
fs::create_dir_all(&out_path)?;
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&out_path)
.assert()
.success();
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive1)
.arg("-d")
.arg(&out_path)
.write_stdin("m")
.assert()
.success();
assert_eq!(5, out_path.as_path().read_dir()?.count());
}
#[test]
fn reading_nested_archives_with_two_archive_extensions_adjacent() {
let archive_formats = ["tar", "zip", "7z"].into_iter();
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
let temp_dir = tempdir().unwrap();
let in_dir = |path: &str| format!("{}/{}", temp_dir.path().display(), path);
fs::write(in_dir("a.txt"), "contents").unwrap();
let files = [
"a.txt",
&format!("b.{first_archive}"),
&format!("c.{first_archive}.{second_archive}"),
];
let transformations = [first_archive, second_archive];
let compressed_path = in_dir(files.last().unwrap());
for (window, format) in files.windows(2).zip(transformations.iter()) {
let [a, b] = [window[0], window[1]].map(in_dir);
crate::utils::cargo_bin()
.args(["compress", &a, &b, "--format", format])
.assert()
.success();
}
let output = crate::utils::cargo_bin()
.args(["list", &compressed_path, "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args(["decompress", &compressed_path, "--dir", &in_dir("out"), "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
}
}
#[test]
fn reading_nested_archives_with_two_archive_extensions_interleaved() {
let archive_formats = ["tar", "zip", "7z"].into_iter();
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
let temp_dir = tempdir().unwrap();
let in_dir = |path: &str| format!("{}/{}", temp_dir.path().display(), path);
fs::write(in_dir("a.txt"), "contents").unwrap();
let files = [
"a.txt",
&format!("c.{first_archive}"),
&format!("d.{first_archive}.zst"),
&format!("e.{first_archive}.zst.{second_archive}"),
&format!("f.{first_archive}.zst.{second_archive}.lz4"),
];
let transformations = [first_archive, "zst", second_archive, "lz4"];
let compressed_path = in_dir(files.last().unwrap());
for (window, format) in files.windows(2).zip(transformations.iter()) {
let [a, b] = [window[0], window[1]].map(in_dir);
crate::utils::cargo_bin()
.args(["compress", &a, &b, "--format", format])
.assert()
.success();
}
let output = crate::utils::cargo_bin()
.args(["list", &compressed_path, "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args(["decompress", &compressed_path, "--dir", &in_dir("out"), "--yes"])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
}
}
#[test]
fn compressing_archive_with_two_archive_formats() {
let archive_formats = ["tar", "zip", "7z"].into_iter();
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
let temp_dir = tempdir().unwrap();
let dir = temp_dir.path().display().to_string();
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.{first_archive}.{second_archive}"),
"--yes",
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.{first_archive}.{second_archive}"),
"--yes",
"--format",
&format!("{first_archive}.{second_archive}"),
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(
stderr.as_bytes(),
b"can only be used at the start of the file extension",
)
.is_some());
crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.{first_archive}.{second_archive}"),
"--yes",
"--format",
first_archive,
])
.assert()
.success();
}
}
#[test]
fn fail_when_compressing_archive_as_the_second_extension() {
for archive_format in ["tar", "zip", "7z"] {
let temp_dir = tempdir().unwrap();
let dir = temp_dir.path().display().to_string();
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out.zst.{archive_format}"),
"--yes",
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
let output = crate::utils::cargo_bin()
.args([
"compress",
"README.md",
&format!("{dir}/out_file"),
"--yes",
"--format",
&format!("zst.{archive_format}"),
])
.assert()
.failure()
.get_output()
.clone();
let stderr = output.stderr.to_str().unwrap();
assert!(memmem::find(
stderr.as_bytes(),
format!("'{archive_format}' can only be used at the start of the file extension").as_bytes(),
)
.is_some());
}
}
#[test]
fn sevenz_list_should_not_failed() {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
let archive = root_path.join("archive.7z.gz");
crate::utils::cargo_bin()
.arg("compress")
.arg("--yes")
.arg(fs::File::create(src_files_path.join("README.md")).unwrap().path())
.arg(&archive)
.assert()
.success();
crate::utils::cargo_bin()
.arg("list")
.arg("--yes")
.arg(&archive)
.assert()
.success();
}

View File

@ -2,7 +2,6 @@
///
/// See CONTRIBUTING.md for a brief guide on how to use [`insta`] for these tests.
/// [`insta`]: https://docs.rs/insta
#[macro_use]
mod utils;