mirror of
https://github.com/ouch-org/ouch.git
synced 2025-06-07 20:15:27 +00:00
Compare commits
No commits in common. "main" and "0.6.1" have entirely different histories.
@ -25,7 +25,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
required: true
|
required: true
|
||||||
artifact_upload_mode:
|
artifact_upload_mode:
|
||||||
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar+bzip3), or 'all' to upload all feature combinations."
|
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar), or 'all' to upload all feature combinations."
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
@ -37,10 +37,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
# TODO: avoid exploding the matrix by removing unrar and bzip3 from the all combinations runs
|
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
||||||
# I can add a monthly run with all combinations
|
|
||||||
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
|
|
||||||
feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
|
|
||||||
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
||||||
feature-use-zstd-thin: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
feature-use-zstd-thin: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
||||||
target:
|
target:
|
||||||
@ -79,14 +76,12 @@ jobs:
|
|||||||
- target: armv7-unknown-linux-musleabihf
|
- target: armv7-unknown-linux-musleabihf
|
||||||
use-cross: true
|
use-cross: true
|
||||||
# features (unless `matrix_all_combinations` is true, we only run these on linux-gnu)
|
# features (unless `matrix_all_combinations` is true, we only run these on linux-gnu)
|
||||||
- feature-unrar: false
|
- feature-unrar: true
|
||||||
target: x86_64-unknown-linux-gnu
|
target: x86_64-unknown-linux-gnu
|
||||||
- feature-use-zlib: true
|
- feature-use-zlib: true
|
||||||
target: x86_64-unknown-linux-gnu
|
target: x86_64-unknown-linux-gnu
|
||||||
- feature-use-zstd-thin: true
|
- feature-use-zstd-thin: true
|
||||||
target: x86_64-unknown-linux-gnu
|
target: x86_64-unknown-linux-gnu
|
||||||
- feature-bzip3: false
|
|
||||||
target: x86_64-unknown-linux-gnu
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@ -110,7 +105,6 @@ jobs:
|
|||||||
if [[ "${{ matrix.feature-unrar }}" == true ]]; then FEATURES+=(unrar); fi
|
if [[ "${{ matrix.feature-unrar }}" == true ]]; then FEATURES+=(unrar); fi
|
||||||
if [[ "${{ matrix.feature-use-zlib }}" == true ]]; then FEATURES+=(use_zlib); fi
|
if [[ "${{ matrix.feature-use-zlib }}" == true ]]; then FEATURES+=(use_zlib); fi
|
||||||
if [[ "${{ matrix.feature-use-zstd-thin }}" == true ]]; then FEATURES+=(use_zstd_thin); fi
|
if [[ "${{ matrix.feature-use-zstd-thin }}" == true ]]; then FEATURES+=(use_zstd_thin); fi
|
||||||
if [[ "${{ matrix.feature-bzip3 }}" == true ]]; then FEATURES+=(bzip3); fi
|
|
||||||
# Output plus-separated list for artifact names
|
# Output plus-separated list for artifact names
|
||||||
IFS='+'
|
IFS='+'
|
||||||
echo "FEATURES_PLUS=${FEATURES[*]}" >> $GITHUB_OUTPUT
|
echo "FEATURES_PLUS=${FEATURES[*]}" >> $GITHUB_OUTPUT
|
||||||
@ -133,7 +127,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}-${{ matrix.feature-bzip3 }}"
|
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}"
|
||||||
|
|
||||||
- name: Test on stable
|
- name: Test on stable
|
||||||
# there's no way to run tests for ARM64 Windows for now
|
# there's no way to run tests for ARM64 Windows for now
|
||||||
@ -152,7 +146,7 @@ jobs:
|
|||||||
if: |
|
if: |
|
||||||
${{ inputs.artifact_upload_mode != 'none' &&
|
${{ inputs.artifact_upload_mode != 'none' &&
|
||||||
(inputs.artifact_upload_mode == 'all' ||
|
(inputs.artifact_upload_mode == 'all' ||
|
||||||
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin && matrix.feature-bzip3)) }}
|
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin)) }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ouch-${{ matrix.target }}${{ steps.concat-features.outputs.FEATURES_PLUS != '' && format('-{0}', steps.concat-features.outputs.FEATURES_PLUS) || '' }}
|
name: ouch-${{ matrix.target }}${{ steps.concat-features.outputs.FEATURES_PLUS != '' && format('-{0}', steps.concat-features.outputs.FEATURES_PLUS) || '' }}
|
||||||
|
13
CHANGELOG.md
13
CHANGELOG.md
@ -21,23 +21,10 @@ Categories Used:
|
|||||||
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.1...HEAD)
|
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.1...HEAD)
|
||||||
|
|
||||||
### New Features
|
### New Features
|
||||||
|
|
||||||
- Merge folders in decompression [\#798](https://github.com/ouch-org/ouch/pull/798) ([tommady](https://github.com/tommady))
|
|
||||||
- Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb))
|
|
||||||
|
|
||||||
### Improvements
|
### Improvements
|
||||||
|
|
||||||
- Give better error messages when archive extensions are invalid [\#817](https://github.com/ouch-org/ouch/pull/817) ([marcospb19](https://github.com/marcospb19))
|
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
|
||||||
- Fix tar extraction count when --quiet [\#824](https://github.com/ouch-org/ouch/pull/824) ([marcospb19](https://github.com/marcospb19))
|
|
||||||
- Fix 7z BadSignature error when compressing and then listing [\#819](https://github.com/ouch-org/ouch/pull/819) ([tommady](https://github.com/tommady))
|
|
||||||
|
|
||||||
### Tweaks
|
### Tweaks
|
||||||
|
|
||||||
- Make `.bz3` opt-out [\#814](https://github.com/ouch-org/ouch/pull/814) ([amyspark](https://github.com/amyspark))
|
|
||||||
|
|
||||||
## [0.6.1](https://github.com/ouch-org/ouch/compare/0.6.0...0.6.1)
|
## [0.6.1](https://github.com/ouch-org/ouch/compare/0.6.0...0.6.1)
|
||||||
|
|
||||||
- Fix .zip crash when file mode isn't present [\#804](https://github.com/ouch-org/ouch/pull/804) ([marcospb19](https://github.com/marcospb19))
|
- Fix .zip crash when file mode isn't present [\#804](https://github.com/ouch-org/ouch/pull/804) ([marcospb19](https://github.com/marcospb19))
|
||||||
|
35
Cargo.lock
generated
35
Cargo.lock
generated
@ -519,12 +519,6 @@ dependencies = [
|
|||||||
"powerfmt",
|
"powerfmt",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "diff"
|
|
||||||
version = "0.1.13"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "difflib"
|
name = "difflib"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@ -824,15 +818,6 @@ version = "1.70.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
|
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "itertools"
|
|
||||||
version = "0.14.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
|
|
||||||
dependencies = [
|
|
||||||
"either",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jiff"
|
name = "jiff"
|
||||||
version = "0.2.8"
|
version = "0.2.8"
|
||||||
@ -1096,21 +1081,17 @@ dependencies = [
|
|||||||
"filetime_creation",
|
"filetime_creation",
|
||||||
"flate2",
|
"flate2",
|
||||||
"fs-err",
|
"fs-err",
|
||||||
"glob",
|
|
||||||
"gzp",
|
"gzp",
|
||||||
"ignore",
|
"ignore",
|
||||||
"infer",
|
"infer",
|
||||||
"insta",
|
"insta",
|
||||||
"is_executable",
|
"is_executable",
|
||||||
"itertools",
|
|
||||||
"libc",
|
"libc",
|
||||||
"linked-hash-map",
|
"linked-hash-map",
|
||||||
"lz4_flex",
|
"lz4_flex",
|
||||||
"memchr",
|
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"parse-display",
|
"parse-display",
|
||||||
"pretty_assertions",
|
|
||||||
"proptest",
|
"proptest",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rayon",
|
"rayon",
|
||||||
@ -1265,16 +1246,6 @@ dependencies = [
|
|||||||
"termtree",
|
"termtree",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pretty_assertions"
|
|
||||||
version = "1.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d"
|
|
||||||
dependencies = [
|
|
||||||
"diff",
|
|
||||||
"yansi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.93"
|
version = "1.0.93"
|
||||||
@ -2076,12 +2047,6 @@ dependencies = [
|
|||||||
"lzma-sys",
|
"lzma-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "yansi"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerocopy"
|
name = "zerocopy"
|
||||||
version = "0.7.35"
|
version = "0.7.35"
|
||||||
|
11
Cargo.toml
11
Cargo.toml
@ -19,7 +19,7 @@ brotli = "7.0.0"
|
|||||||
bstr = { version = "1.10.0", default-features = false, features = ["std"] }
|
bstr = { version = "1.10.0", default-features = false, features = ["std"] }
|
||||||
bytesize = "1.3.0"
|
bytesize = "1.3.0"
|
||||||
bzip2 = "0.4.4"
|
bzip2 = "0.4.4"
|
||||||
bzip3 = { version = "0.9.0", features = ["bundled"], optional = true }
|
bzip3 = { version = "0.9.0", features = ["bundled"] }
|
||||||
clap = { version = "4.5.20", features = ["derive", "env"] }
|
clap = { version = "4.5.20", features = ["derive", "env"] }
|
||||||
filetime_creation = "0.2"
|
filetime_creation = "0.2"
|
||||||
flate2 = { version = "1.0.30", default-features = false }
|
flate2 = { version = "1.0.30", default-features = false }
|
||||||
@ -58,13 +58,9 @@ clap_mangen = "0.2.24"
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
assert_cmd = "2.0.14"
|
assert_cmd = "2.0.14"
|
||||||
glob = "0.3.2"
|
|
||||||
infer = "0.16.0"
|
infer = "0.16.0"
|
||||||
insta = { version = "1.40.0", features = ["filters"] }
|
insta = { version = "1.40.0", features = ["filters"] }
|
||||||
itertools = "0.14.0"
|
|
||||||
memchr = "2.7.4"
|
|
||||||
parse-display = "0.9.1"
|
parse-display = "0.9.1"
|
||||||
pretty_assertions = "1.4.1"
|
|
||||||
proptest = "1.5.0"
|
proptest = "1.5.0"
|
||||||
rand = { version = "0.8.5", default-features = false, features = [
|
rand = { version = "0.8.5", default-features = false, features = [
|
||||||
"small_rng",
|
"small_rng",
|
||||||
@ -74,7 +70,7 @@ regex = "1.10.4"
|
|||||||
test-strategy = "0.4.0"
|
test-strategy = "0.4.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["unrar", "use_zlib", "use_zstd_thin", "bzip3"]
|
default = ["unrar", "use_zlib", "use_zstd_thin"]
|
||||||
use_zlib = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib"]
|
use_zlib = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib"]
|
||||||
use_zstd_thin = ["zstd/thin"]
|
use_zstd_thin = ["zstd/thin"]
|
||||||
allow_piped_choice = []
|
allow_piped_choice = []
|
||||||
@ -92,5 +88,4 @@ inherits = "release"
|
|||||||
lto = false
|
lto = false
|
||||||
opt-level = 2
|
opt-level = 2
|
||||||
incremental = true
|
incremental = true
|
||||||
codegen-units = 32
|
codegen-units = 16
|
||||||
strip = false
|
|
||||||
|
@ -21,7 +21,7 @@ PLATFORMS=(
|
|||||||
"x86_64-unknown-linux-musl"
|
"x86_64-unknown-linux-musl"
|
||||||
)
|
)
|
||||||
# TODO: remove allow_piped_choice later
|
# TODO: remove allow_piped_choice later
|
||||||
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin+bzip3"
|
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin"
|
||||||
|
|
||||||
for platform in "${PLATFORMS[@]}"; do
|
for platform in "${PLATFORMS[@]}"; do
|
||||||
path="ouch-${platform}"
|
path="ouch-${platform}"
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
use crate::Error;
|
|
||||||
|
|
||||||
pub fn no_support() -> Error {
|
|
||||||
Error::UnsupportedFormat {
|
|
||||||
reason: "BZip3 support is disabled for this build, possibly due to missing bindgen-cli dependency.".into(),
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +1,5 @@
|
|||||||
//! Archive compression algorithms
|
//! Archive compression algorithms
|
||||||
|
|
||||||
#[cfg(not(feature = "bzip3"))]
|
|
||||||
pub mod bzip3_stub;
|
|
||||||
#[cfg(feature = "unrar")]
|
#[cfg(feature = "unrar")]
|
||||||
pub mod rar;
|
pub mod rar;
|
||||||
#[cfg(not(feature = "unrar"))]
|
#[cfg(not(feature = "unrar"))]
|
||||||
|
@ -18,6 +18,8 @@ pub fn unpack_archive(
|
|||||||
password: Option<&[u8]>,
|
password: Option<&[u8]>,
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
) -> crate::Result<usize> {
|
) -> crate::Result<usize> {
|
||||||
|
assert!(output_folder.read_dir().expect("dir exists").next().is_none());
|
||||||
|
|
||||||
let archive = match password {
|
let archive = match password {
|
||||||
Some(password) => Archive::with_password(archive_path, password),
|
Some(password) => Archive::with_password(archive_path, password),
|
||||||
None => Archive::new(archive_path),
|
None => Archive::new(archive_path),
|
||||||
|
@ -171,10 +171,12 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// List contents of `archive_path`, returning a vector of archive entries
|
/// List contents of `archive_path`, returning a vector of archive entries
|
||||||
pub fn list_archive<R>(reader: R, password: Option<&[u8]>) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>>
|
pub fn list_archive(
|
||||||
where
|
archive_path: &Path,
|
||||||
R: Read + Seek,
|
password: Option<&[u8]>,
|
||||||
{
|
) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>> {
|
||||||
|
let reader = fs::File::open(archive_path)?;
|
||||||
|
|
||||||
let mut files = Vec::new();
|
let mut files = Vec::new();
|
||||||
|
|
||||||
let entry_extract_fn = |entry: &SevenZArchiveEntry, _: &mut dyn Read, _: &PathBuf| {
|
let entry_extract_fn = |entry: &SevenZArchiveEntry, _: &mut dyn Read, _: &PathBuf| {
|
||||||
|
@ -24,6 +24,7 @@ use crate::{
|
|||||||
/// Unpacks the archive given by `archive` into the folder given by `into`.
|
/// Unpacks the archive given by `archive` into the folder given by `into`.
|
||||||
/// Assumes that output_folder is empty
|
/// Assumes that output_folder is empty
|
||||||
pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool) -> crate::Result<usize> {
|
pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool) -> crate::Result<usize> {
|
||||||
|
assert!(output_folder.read_dir().expect("dir exists").next().is_none());
|
||||||
let mut archive = tar::Archive::new(reader);
|
let mut archive = tar::Archive::new(reader);
|
||||||
|
|
||||||
let mut files_unpacked = 0;
|
let mut files_unpacked = 0;
|
||||||
@ -59,9 +60,10 @@ pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool)
|
|||||||
Bytes::new(file.size()),
|
Bytes::new(file.size()),
|
||||||
utils::strip_cur_dir(&output_folder.join(file.path()?)),
|
utils::strip_cur_dir(&output_folder.join(file.path()?)),
|
||||||
));
|
));
|
||||||
}
|
|
||||||
files_unpacked += 1;
|
files_unpacked += 1;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(files_unpacked)
|
Ok(files_unpacked)
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,8 @@ pub fn unpack_archive<R>(
|
|||||||
where
|
where
|
||||||
R: Read + Seek,
|
R: Read + Seek,
|
||||||
{
|
{
|
||||||
|
assert!(output_folder.read_dir().expect("dir exists").next().is_none());
|
||||||
|
|
||||||
let mut unpacked_files = 0;
|
let mut unpacked_files = 0;
|
||||||
|
|
||||||
for idx in 0..archive.len() {
|
for idx in 0..archive.len() {
|
||||||
|
@ -100,10 +100,6 @@ pub enum Subcommand {
|
|||||||
/// Remove the source file after successful decompression
|
/// Remove the source file after successful decompression
|
||||||
#[arg(short = 'r', long)]
|
#[arg(short = 'r', long)]
|
||||||
remove: bool,
|
remove: bool,
|
||||||
|
|
||||||
/// Disable Smart Unpack
|
|
||||||
#[arg(long)]
|
|
||||||
no_smart_unpack: bool,
|
|
||||||
},
|
},
|
||||||
/// List contents of an archive
|
/// List contents of an archive
|
||||||
#[command(visible_aliases = ["l", "ls"])]
|
#[command(visible_aliases = ["l", "ls"])]
|
||||||
@ -160,7 +156,6 @@ mod tests {
|
|||||||
files: vec!["\x00\x11\x22".into()],
|
files: vec!["\x00\x11\x22".into()],
|
||||||
output_dir: None,
|
output_dir: None,
|
||||||
remove: false,
|
remove: false,
|
||||||
no_smart_unpack: false,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -174,7 +169,6 @@ mod tests {
|
|||||||
files: to_paths(["file.tar.gz"]),
|
files: to_paths(["file.tar.gz"]),
|
||||||
output_dir: None,
|
output_dir: None,
|
||||||
remove: false,
|
remove: false,
|
||||||
no_smart_unpack: false,
|
|
||||||
},
|
},
|
||||||
..mock_cli_args()
|
..mock_cli_args()
|
||||||
}
|
}
|
||||||
@ -186,7 +180,6 @@ mod tests {
|
|||||||
files: to_paths(["file.tar.gz"]),
|
files: to_paths(["file.tar.gz"]),
|
||||||
output_dir: None,
|
output_dir: None,
|
||||||
remove: false,
|
remove: false,
|
||||||
no_smart_unpack: false,
|
|
||||||
},
|
},
|
||||||
..mock_cli_args()
|
..mock_cli_args()
|
||||||
}
|
}
|
||||||
@ -198,7 +191,6 @@ mod tests {
|
|||||||
files: to_paths(["a", "b", "c"]),
|
files: to_paths(["a", "b", "c"]),
|
||||||
output_dir: None,
|
output_dir: None,
|
||||||
remove: false,
|
remove: false,
|
||||||
no_smart_unpack: false,
|
|
||||||
},
|
},
|
||||||
..mock_cli_args()
|
..mock_cli_args()
|
||||||
}
|
}
|
||||||
|
@ -57,16 +57,10 @@ pub fn compress_files(
|
|||||||
encoder,
|
encoder,
|
||||||
level.map_or_else(Default::default, |l| bzip2::Compression::new((l as u32).clamp(1, 9))),
|
level.map_or_else(Default::default, |l| bzip2::Compression::new((l as u32).clamp(1, 9))),
|
||||||
)),
|
)),
|
||||||
Bzip3 => {
|
Bzip3 => Box::new(
|
||||||
#[cfg(not(feature = "bzip3"))]
|
|
||||||
return Err(archive::bzip3_stub::no_support());
|
|
||||||
|
|
||||||
#[cfg(feature = "bzip3")]
|
|
||||||
Box::new(
|
|
||||||
// Use block size of 16 MiB
|
// Use block size of 16 MiB
|
||||||
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
|
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
|
||||||
)
|
),
|
||||||
}
|
|
||||||
Lz4 => Box::new(lz4_flex::frame::FrameEncoder::new(encoder).auto_finish()),
|
Lz4 => Box::new(lz4_flex::frame::FrameEncoder::new(encoder).auto_finish()),
|
||||||
Lzma => Box::new(xz2::write::XzEncoder::new(
|
Lzma => Box::new(xz2::write::XzEncoder::new(
|
||||||
encoder,
|
encoder,
|
||||||
|
@ -6,8 +6,6 @@ use std::{
|
|||||||
|
|
||||||
use fs_err as fs;
|
use fs_err as fs;
|
||||||
|
|
||||||
#[cfg(not(feature = "bzip3"))]
|
|
||||||
use crate::archive;
|
|
||||||
use crate::{
|
use crate::{
|
||||||
commands::{warn_user_about_loading_sevenz_in_memory, warn_user_about_loading_zip_in_memory},
|
commands::{warn_user_about_loading_sevenz_in_memory, warn_user_about_loading_zip_in_memory},
|
||||||
extension::{
|
extension::{
|
||||||
@ -34,7 +32,6 @@ pub struct DecompressOptions<'a> {
|
|||||||
pub output_dir: &'a Path,
|
pub output_dir: &'a Path,
|
||||||
pub output_file_path: PathBuf,
|
pub output_file_path: PathBuf,
|
||||||
pub is_output_dir_provided: bool,
|
pub is_output_dir_provided: bool,
|
||||||
pub is_smart_unpack: bool,
|
|
||||||
pub question_policy: QuestionPolicy,
|
pub question_policy: QuestionPolicy,
|
||||||
pub quiet: bool,
|
pub quiet: bool,
|
||||||
pub password: Option<&'a [u8]>,
|
pub password: Option<&'a [u8]>,
|
||||||
@ -78,7 +75,6 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
&options.output_file_path,
|
&options.output_file_path,
|
||||||
options.question_policy,
|
options.question_policy,
|
||||||
options.is_output_dir_provided,
|
options.is_output_dir_provided,
|
||||||
options.is_smart_unpack,
|
|
||||||
)? {
|
)? {
|
||||||
files
|
files
|
||||||
} else {
|
} else {
|
||||||
@ -120,19 +116,13 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
let decoder: Box<dyn Read> = match format {
|
let decoder: Box<dyn Read> = match format {
|
||||||
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
|
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
|
||||||
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
|
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
|
||||||
Bzip3 => {
|
Bzip3 => Box::new(bzip3::read::Bz3Decoder::new(decoder)?),
|
||||||
#[cfg(not(feature = "bzip3"))]
|
|
||||||
return Err(archive::bzip3_stub::no_support());
|
|
||||||
|
|
||||||
#[cfg(feature = "bzip3")]
|
|
||||||
Box::new(bzip3::read::Bz3Decoder::new(decoder)?)
|
|
||||||
}
|
|
||||||
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
|
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
|
||||||
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
|
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
|
||||||
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
|
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
|
||||||
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
|
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
|
||||||
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
|
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
|
||||||
Tar | Zip | Rar | SevenZip => decoder,
|
Tar | Zip | Rar | SevenZip => unreachable!(),
|
||||||
};
|
};
|
||||||
Ok(decoder)
|
Ok(decoder)
|
||||||
};
|
};
|
||||||
@ -147,11 +137,7 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
||||||
reader = chain_reader_decoder(&first_extension, reader)?;
|
reader = chain_reader_decoder(&first_extension, reader)?;
|
||||||
|
|
||||||
let mut writer = match utils::ask_to_create_file(
|
let mut writer = match utils::ask_to_create_file(&options.output_file_path, options.question_policy)? {
|
||||||
&options.output_file_path,
|
|
||||||
options.question_policy,
|
|
||||||
QuestionAction::Decompression,
|
|
||||||
)? {
|
|
||||||
Some(file) => file,
|
Some(file) => file,
|
||||||
None => return Ok(()),
|
None => return Ok(()),
|
||||||
};
|
};
|
||||||
@ -167,7 +153,6 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
&options.output_file_path,
|
&options.output_file_path,
|
||||||
options.question_policy,
|
options.question_policy,
|
||||||
options.is_output_dir_provided,
|
options.is_output_dir_provided,
|
||||||
options.is_smart_unpack,
|
|
||||||
)? {
|
)? {
|
||||||
files
|
files
|
||||||
} else {
|
} else {
|
||||||
@ -202,7 +187,6 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
&options.output_file_path,
|
&options.output_file_path,
|
||||||
options.question_policy,
|
options.question_policy,
|
||||||
options.is_output_dir_provided,
|
options.is_output_dir_provided,
|
||||||
options.is_smart_unpack,
|
|
||||||
)? {
|
)? {
|
||||||
files
|
files
|
||||||
} else {
|
} else {
|
||||||
@ -235,7 +219,6 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
&options.output_file_path,
|
&options.output_file_path,
|
||||||
options.question_policy,
|
options.question_policy,
|
||||||
options.is_output_dir_provided,
|
options.is_output_dir_provided,
|
||||||
options.is_smart_unpack,
|
|
||||||
)? {
|
)? {
|
||||||
files
|
files
|
||||||
} else {
|
} else {
|
||||||
@ -278,7 +261,6 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
|||||||
&options.output_file_path,
|
&options.output_file_path,
|
||||||
options.question_policy,
|
options.question_policy,
|
||||||
options.is_output_dir_provided,
|
options.is_output_dir_provided,
|
||||||
options.is_smart_unpack,
|
|
||||||
)? {
|
)? {
|
||||||
files
|
files
|
||||||
} else {
|
} else {
|
||||||
@ -314,19 +296,12 @@ fn execute_decompression(
|
|||||||
output_file_path: &Path,
|
output_file_path: &Path,
|
||||||
question_policy: QuestionPolicy,
|
question_policy: QuestionPolicy,
|
||||||
is_output_dir_provided: bool,
|
is_output_dir_provided: bool,
|
||||||
is_smart_unpack: bool,
|
|
||||||
) -> crate::Result<ControlFlow<(), usize>> {
|
) -> crate::Result<ControlFlow<(), usize>> {
|
||||||
if is_smart_unpack {
|
if is_output_dir_provided {
|
||||||
return smart_unpack(unpack_fn, output_dir, output_file_path, question_policy);
|
unpack(unpack_fn, output_dir, question_policy)
|
||||||
}
|
|
||||||
|
|
||||||
let target_output_dir = if is_output_dir_provided {
|
|
||||||
output_dir
|
|
||||||
} else {
|
} else {
|
||||||
output_file_path
|
smart_unpack(unpack_fn, output_dir, output_file_path, question_policy)
|
||||||
};
|
}
|
||||||
|
|
||||||
unpack(unpack_fn, target_output_dir, question_policy)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unpacks an archive creating the output directory, this function will create the output_dir
|
/// Unpacks an archive creating the output directory, this function will create the output_dir
|
||||||
@ -343,7 +318,7 @@ fn unpack(
|
|||||||
let output_dir_cleaned = if is_valid_output_dir {
|
let output_dir_cleaned = if is_valid_output_dir {
|
||||||
output_dir.to_owned()
|
output_dir.to_owned()
|
||||||
} else {
|
} else {
|
||||||
match utils::resolve_path_conflict(output_dir, question_policy, QuestionAction::Decompression)? {
|
match utils::resolve_path_conflict(output_dir, question_policy)? {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => return Ok(ControlFlow::Break(())),
|
None => return Ok(ControlFlow::Break(())),
|
||||||
}
|
}
|
||||||
@ -399,7 +374,7 @@ fn smart_unpack(
|
|||||||
|
|
||||||
// Before moving, need to check if a file with the same name already exists
|
// Before moving, need to check if a file with the same name already exists
|
||||||
// If it does, need to ask the user what to do
|
// If it does, need to ask the user what to do
|
||||||
new_path = match utils::resolve_path_conflict(&new_path, question_policy, QuestionAction::Decompression)? {
|
new_path = match utils::resolve_path_conflict(&new_path, question_policy)? {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => return Ok(ControlFlow::Break(())),
|
None => return Ok(ControlFlow::Break(())),
|
||||||
};
|
};
|
||||||
|
@ -6,7 +6,7 @@ use std::{
|
|||||||
use fs_err as fs;
|
use fs_err as fs;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
archive,
|
archive::sevenz,
|
||||||
commands::warn_user_about_loading_zip_in_memory,
|
commands::warn_user_about_loading_zip_in_memory,
|
||||||
extension::CompressionFormat::{self, *},
|
extension::CompressionFormat::{self, *},
|
||||||
list::{self, FileInArchive, ListOptions},
|
list::{self, FileInArchive, ListOptions},
|
||||||
@ -36,6 +36,7 @@ pub fn list_archive_contents(
|
|||||||
let zip_archive = zip::ZipArchive::new(reader)?;
|
let zip_archive = zip::ZipArchive::new(reader)?;
|
||||||
let files = crate::archive::zip::list_archive(zip_archive, password);
|
let files = crate::archive::zip::list_archive(zip_archive, password);
|
||||||
list::list_files(archive_path, files, list_options)?;
|
list::list_files(archive_path, files, list_options)?;
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,38 +46,26 @@ pub fn list_archive_contents(
|
|||||||
|
|
||||||
// Grab previous decoder and wrap it inside of a new one
|
// Grab previous decoder and wrap it inside of a new one
|
||||||
let chain_reader_decoder =
|
let chain_reader_decoder =
|
||||||
|format: CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
|
|format: &CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
|
||||||
let decoder: Box<dyn Read + Send> = match format {
|
let decoder: Box<dyn Read + Send> = match format {
|
||||||
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
|
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
|
||||||
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
|
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
|
||||||
Bzip3 => {
|
Bzip3 => Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap()),
|
||||||
#[cfg(not(feature = "bzip3"))]
|
|
||||||
return Err(archive::bzip3_stub::no_support());
|
|
||||||
|
|
||||||
#[cfg(feature = "bzip3")]
|
|
||||||
Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap())
|
|
||||||
}
|
|
||||||
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
|
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
|
||||||
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
|
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
|
||||||
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
|
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
|
||||||
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
|
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
|
||||||
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
|
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
|
||||||
Tar | Zip | Rar | SevenZip => unreachable!("should be treated by caller"),
|
Tar | Zip | Rar | SevenZip => unreachable!(),
|
||||||
};
|
};
|
||||||
Ok(decoder)
|
Ok(decoder)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut misplaced_archive_format = None;
|
for format in formats.iter().skip(1).rev() {
|
||||||
for &format in formats.iter().skip(1).rev() {
|
|
||||||
if format.archive_format() {
|
|
||||||
misplaced_archive_format = Some(format);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
reader = chain_reader_decoder(format, reader)?;
|
reader = chain_reader_decoder(format, reader)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let archive_format = misplaced_archive_format.unwrap_or(formats[0]);
|
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match formats[0] {
|
||||||
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match archive_format {
|
|
||||||
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
|
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
|
||||||
Zip => {
|
Zip => {
|
||||||
if formats.len() > 1 {
|
if formats.len() > 1 {
|
||||||
@ -122,13 +111,10 @@ pub fn list_archive_contents(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut vec = vec![];
|
Box::new(sevenz::list_archive(archive_path, password)?)
|
||||||
io::copy(&mut reader, &mut vec)?;
|
|
||||||
|
|
||||||
Box::new(archive::sevenz::list_archive(io::Cursor::new(vec), password)?)
|
|
||||||
}
|
}
|
||||||
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
||||||
unreachable!("Not an archive, should be validated before calling this function.");
|
panic!("Not an archive! This should never happen, if it does, something is wrong with `CompressionFormat::is_archive()`. Please report this error!");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -20,7 +20,6 @@ use crate::{
|
|||||||
list::ListOptions,
|
list::ListOptions,
|
||||||
utils::{
|
utils::{
|
||||||
self, colors::*, is_path_stdin, logger::info_accessible, path_to_str, EscapedPathDisplay, FileVisibilityPolicy,
|
self, colors::*, is_path_stdin, logger::info_accessible, path_to_str, EscapedPathDisplay, FileVisibilityPolicy,
|
||||||
QuestionAction,
|
|
||||||
},
|
},
|
||||||
CliArgs, QuestionPolicy,
|
CliArgs, QuestionPolicy,
|
||||||
};
|
};
|
||||||
@ -81,7 +80,7 @@ pub fn run(
|
|||||||
let parsed_formats = parse_format_flag(&formats)?;
|
let parsed_formats = parse_format_flag(&formats)?;
|
||||||
(Some(formats), parsed_formats)
|
(Some(formats), parsed_formats)
|
||||||
}
|
}
|
||||||
None => (None, extension::extensions_from_path(&output_path)?),
|
None => (None, extension::extensions_from_path(&output_path)),
|
||||||
};
|
};
|
||||||
|
|
||||||
check::check_invalid_compression_with_non_archive_format(
|
check::check_invalid_compression_with_non_archive_format(
|
||||||
@ -92,8 +91,7 @@ pub fn run(
|
|||||||
)?;
|
)?;
|
||||||
check::check_archive_formats_position(&formats, &output_path)?;
|
check::check_archive_formats_position(&formats, &output_path)?;
|
||||||
|
|
||||||
let output_file =
|
let output_file = match utils::ask_to_create_file(&output_path, question_policy)? {
|
||||||
match utils::ask_to_create_file(&output_path, question_policy, QuestionAction::Compression)? {
|
|
||||||
Some(writer) => writer,
|
Some(writer) => writer,
|
||||||
None => return Ok(()),
|
None => return Ok(()),
|
||||||
};
|
};
|
||||||
@ -150,7 +148,6 @@ pub fn run(
|
|||||||
files,
|
files,
|
||||||
output_dir,
|
output_dir,
|
||||||
remove,
|
remove,
|
||||||
no_smart_unpack,
|
|
||||||
} => {
|
} => {
|
||||||
let mut output_paths = vec![];
|
let mut output_paths = vec![];
|
||||||
let mut formats = vec![];
|
let mut formats = vec![];
|
||||||
@ -158,7 +155,6 @@ pub fn run(
|
|||||||
if let Some(format) = args.format {
|
if let Some(format) = args.format {
|
||||||
let format = parse_format_flag(&format)?;
|
let format = parse_format_flag(&format)?;
|
||||||
for path in files.iter() {
|
for path in files.iter() {
|
||||||
// TODO: use Error::Custom
|
|
||||||
let file_name = path.file_name().ok_or_else(|| Error::NotFound {
|
let file_name = path.file_name().ok_or_else(|| Error::NotFound {
|
||||||
error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)),
|
error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)),
|
||||||
})?;
|
})?;
|
||||||
@ -167,7 +163,7 @@ pub fn run(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for path in files.iter() {
|
for path in files.iter() {
|
||||||
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path)?;
|
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path);
|
||||||
|
|
||||||
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
|
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
@ -180,11 +176,9 @@ pub fn run(
|
|||||||
|
|
||||||
check::check_missing_formats_when_decompressing(&files, &formats)?;
|
check::check_missing_formats_when_decompressing(&files, &formats)?;
|
||||||
|
|
||||||
let is_output_dir_provided = output_dir.is_some();
|
|
||||||
let is_smart_unpack = !is_output_dir_provided && !no_smart_unpack;
|
|
||||||
|
|
||||||
// The directory that will contain the output files
|
// The directory that will contain the output files
|
||||||
// We default to the current directory if the user didn't specify an output directory with --dir
|
// We default to the current directory if the user didn't specify an output directory with --dir
|
||||||
|
let is_output_dir_provided = output_dir.is_some();
|
||||||
let output_dir = if let Some(dir) = output_dir {
|
let output_dir = if let Some(dir) = output_dir {
|
||||||
utils::create_dir_if_non_existent(&dir)?;
|
utils::create_dir_if_non_existent(&dir)?;
|
||||||
dir
|
dir
|
||||||
@ -206,10 +200,9 @@ pub fn run(
|
|||||||
decompress_file(DecompressOptions {
|
decompress_file(DecompressOptions {
|
||||||
input_file_path: input_path,
|
input_file_path: input_path,
|
||||||
formats,
|
formats,
|
||||||
is_output_dir_provided,
|
|
||||||
output_dir: &output_dir,
|
output_dir: &output_dir,
|
||||||
output_file_path,
|
output_file_path,
|
||||||
is_smart_unpack,
|
is_output_dir_provided,
|
||||||
question_policy,
|
question_policy,
|
||||||
quiet: args.quiet,
|
quiet: args.quiet,
|
||||||
password: args.password.as_deref().map(|str| {
|
password: args.password.as_deref().map(|str| {
|
||||||
@ -229,7 +222,7 @@ pub fn run(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for path in files.iter() {
|
for path in files.iter() {
|
||||||
let mut file_formats = extension::extensions_from_path(path)?;
|
let mut file_formats = extension::extensions_from_path(path);
|
||||||
|
|
||||||
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
|
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -200,7 +200,6 @@ impl From<std::io::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "bzip3")]
|
|
||||||
impl From<bzip3::Error> for Error {
|
impl From<bzip3::Error> for Error {
|
||||||
fn from(err: bzip3::Error) -> Self {
|
fn from(err: bzip3::Error) -> Self {
|
||||||
use bzip3::Error as Bz3Error;
|
use bzip3::Error as Bz3Error;
|
||||||
|
@ -5,10 +5,7 @@ use std::{ffi::OsStr, fmt, path::Path};
|
|||||||
use bstr::ByteSlice;
|
use bstr::ByteSlice;
|
||||||
use CompressionFormat::*;
|
use CompressionFormat::*;
|
||||||
|
|
||||||
use crate::{
|
use crate::{error::Error, utils::logger::warning};
|
||||||
error::{Error, FinalError, Result},
|
|
||||||
utils::logger::warning,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
|
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
|
||||||
"tar",
|
"tar",
|
||||||
@ -63,8 +60,8 @@ impl Extension {
|
|||||||
|
|
||||||
/// Checks if the first format in `compression_formats` is an archive
|
/// Checks if the first format in `compression_formats` is an archive
|
||||||
pub fn is_archive(&self) -> bool {
|
pub fn is_archive(&self) -> bool {
|
||||||
// Index Safety: we check that `compression_formats` is not empty in `Self::new`
|
// Safety: we check that `compression_formats` is not empty in `Self::new`
|
||||||
self.compression_formats[0].archive_format()
|
self.compression_formats[0].is_archive_format()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -106,7 +103,7 @@ pub enum CompressionFormat {
|
|||||||
|
|
||||||
impl CompressionFormat {
|
impl CompressionFormat {
|
||||||
/// Currently supported archive formats are .tar (and aliases to it) and .zip
|
/// Currently supported archive formats are .tar (and aliases to it) and .zip
|
||||||
pub fn archive_format(&self) -> bool {
|
fn is_archive_format(&self) -> bool {
|
||||||
// Keep this match like that without a wildcard `_` so we don't forget to update it
|
// Keep this match like that without a wildcard `_` so we don't forget to update it
|
||||||
match self {
|
match self {
|
||||||
Tar | Zip | Rar | SevenZip => true,
|
Tar | Zip | Rar | SevenZip => true,
|
||||||
@ -150,13 +147,14 @@ fn to_extension(ext: &[u8]) -> Option<Extension> {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_extension_at_end(name: &[u8]) -> Option<(&[u8], Extension)> {
|
fn split_extension(name: &mut &[u8]) -> Option<Extension> {
|
||||||
let (new_name, ext) = name.rsplit_once_str(b".")?;
|
let (new_name, ext) = name.rsplit_once_str(b".")?;
|
||||||
if matches!(new_name, b"" | b"." | b"..") {
|
if matches!(new_name, b"" | b"." | b"..") {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let ext = to_extension(ext)?;
|
let ext = to_extension(ext)?;
|
||||||
Some((new_name, ext))
|
*name = new_name;
|
||||||
|
Some(ext)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
|
pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
|
||||||
@ -190,40 +188,17 @@ pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
|
|||||||
|
|
||||||
/// Extracts extensions from a path.
|
/// Extracts extensions from a path.
|
||||||
///
|
///
|
||||||
/// Returns both the remaining path and the list of extension objects.
|
/// Returns both the remaining path and the list of extension objects
|
||||||
pub fn separate_known_extensions_from_name(path: &Path) -> Result<(&Path, Vec<Extension>)> {
|
pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension>) {
|
||||||
let mut extensions = vec![];
|
let mut extensions = vec![];
|
||||||
|
|
||||||
let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else {
|
let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else {
|
||||||
return Ok((path, extensions));
|
return (path, extensions);
|
||||||
};
|
};
|
||||||
|
|
||||||
while let Some((new_name, extension)) = split_extension_at_end(name) {
|
// While there is known extensions at the tail, grab them
|
||||||
name = new_name;
|
while let Some(extension) = split_extension(&mut name) {
|
||||||
extensions.insert(0, extension);
|
extensions.insert(0, extension);
|
||||||
if extensions[0].is_archive() {
|
|
||||||
if let Some((_, misplaced_extension)) = split_extension_at_end(name) {
|
|
||||||
let mut error = FinalError::with_title("File extensions are invalid for operation").detail(format!(
|
|
||||||
"The archive extension '.{}' can only be placed at the start of the extension list",
|
|
||||||
extensions[0].display_text,
|
|
||||||
));
|
|
||||||
|
|
||||||
if misplaced_extension.compression_formats == extensions[0].compression_formats {
|
|
||||||
error = error.detail(format!(
|
|
||||||
"File: '{path:?}' contains '.{}' and '.{}'",
|
|
||||||
misplaced_extension.display_text, extensions[0].display_text,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
return Err(error
|
|
||||||
.hint("You can use `--format` to specify what format to use, examples:")
|
|
||||||
.hint(" ouch compress file.zip.zip file --format zip")
|
|
||||||
.hint(" ouch decompress file --format zst")
|
|
||||||
.hint(" ouch list archive --format tar.gz")
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(name) = name.to_str() {
|
if let Ok(name) = name.to_str() {
|
||||||
@ -235,12 +210,13 @@ pub fn separate_known_extensions_from_name(path: &Path) -> Result<(&Path, Vec<Ex
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((name.to_path().unwrap(), extensions))
|
(name.to_path().unwrap(), extensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extracts extensions from a path, return only the list of extension objects
|
/// Extracts extensions from a path, return only the list of extension objects
|
||||||
pub fn extensions_from_path(path: &Path) -> Result<Vec<Extension>> {
|
pub fn extensions_from_path(path: &Path) -> Vec<Extension> {
|
||||||
separate_known_extensions_from_name(path).map(|(_, extensions)| extensions)
|
let (_, extensions) = separate_known_extensions_from_name(path);
|
||||||
|
extensions
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Panics if formats has an empty list of compression formats
|
/// Panics if formats has an empty list of compression formats
|
||||||
@ -295,13 +271,14 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::utils::logger::spawn_logger_thread;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_extensions_from_path() {
|
fn test_extensions_from_path() {
|
||||||
let path = Path::new("bolovo.tar.gz");
|
let path = Path::new("bolovo.tar.gz");
|
||||||
|
|
||||||
let extensions = extensions_from_path(path).unwrap();
|
let extensions: Vec<Extension> = extensions_from_path(path);
|
||||||
let formats = flatten_compression_formats(&extensions);
|
let formats: Vec<CompressionFormat> = flatten_compression_formats(&extensions);
|
||||||
|
|
||||||
assert_eq!(formats, vec![Tar, Gzip]);
|
assert_eq!(formats, vec![Tar, Gzip]);
|
||||||
}
|
}
|
||||||
@ -309,31 +286,32 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
/// Test extension parsing for input/output files
|
/// Test extension parsing for input/output files
|
||||||
fn test_separate_known_extensions_from_name() {
|
fn test_separate_known_extensions_from_name() {
|
||||||
|
let _handler = spawn_logger_thread();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
separate_known_extensions_from_name("file".as_ref()).unwrap(),
|
separate_known_extensions_from_name("file".as_ref()),
|
||||||
("file".as_ref(), vec![])
|
("file".as_ref(), vec![])
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
separate_known_extensions_from_name("tar".as_ref()).unwrap(),
|
separate_known_extensions_from_name("tar".as_ref()),
|
||||||
("tar".as_ref(), vec![])
|
("tar".as_ref(), vec![])
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
separate_known_extensions_from_name(".tar".as_ref()).unwrap(),
|
separate_known_extensions_from_name(".tar".as_ref()),
|
||||||
(".tar".as_ref(), vec![])
|
(".tar".as_ref(), vec![])
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
separate_known_extensions_from_name("file.tar".as_ref()).unwrap(),
|
separate_known_extensions_from_name("file.tar".as_ref()),
|
||||||
("file".as_ref(), vec![Extension::new(&[Tar], "tar")])
|
("file".as_ref(), vec![Extension::new(&[Tar], "tar")])
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
separate_known_extensions_from_name("file.tar.gz".as_ref()).unwrap(),
|
separate_known_extensions_from_name("file.tar.gz".as_ref()),
|
||||||
(
|
(
|
||||||
"file".as_ref(),
|
"file".as_ref(),
|
||||||
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
|
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
separate_known_extensions_from_name(".tar.gz".as_ref()).unwrap(),
|
separate_known_extensions_from_name(".tar.gz".as_ref()),
|
||||||
(".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")])
|
(".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")])
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -389,10 +367,4 @@ mod tests {
|
|||||||
"linux.pkg.info.tar.zst"
|
"linux.pkg.info.tar.zst"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_extension_parsing_with_multiple_archive_formats() {
|
|
||||||
assert!(separate_known_extensions_from_name("file.tar.zip".as_ref()).is_err());
|
|
||||||
assert!(separate_known_extensions_from_name("file.7z.zst.zip.lz4".as_ref()).is_err());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,7 @@ use fs_err as fs;
|
|||||||
use super::{question::FileConflitOperation, user_wants_to_overwrite};
|
use super::{question::FileConflitOperation, user_wants_to_overwrite};
|
||||||
use crate::{
|
use crate::{
|
||||||
extension::Extension,
|
extension::Extension,
|
||||||
utils::{logger::info_accessible, EscapedPathDisplay, QuestionAction},
|
utils::{logger::info_accessible, EscapedPathDisplay},
|
||||||
QuestionPolicy,
|
QuestionPolicy,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -26,13 +26,9 @@ pub fn is_path_stdin(path: &Path) -> bool {
|
|||||||
/// * `Ok(None)` means the user wants to cancel the operation
|
/// * `Ok(None)` means the user wants to cancel the operation
|
||||||
/// * `Ok(Some(path))` returns a valid PathBuf without any another file or directory with the same name
|
/// * `Ok(Some(path))` returns a valid PathBuf without any another file or directory with the same name
|
||||||
/// * `Err(_)` is an error
|
/// * `Err(_)` is an error
|
||||||
pub fn resolve_path_conflict(
|
pub fn resolve_path_conflict(path: &Path, question_policy: QuestionPolicy) -> crate::Result<Option<PathBuf>> {
|
||||||
path: &Path,
|
|
||||||
question_policy: QuestionPolicy,
|
|
||||||
question_action: QuestionAction,
|
|
||||||
) -> crate::Result<Option<PathBuf>> {
|
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
match user_wants_to_overwrite(path, question_policy, question_action)? {
|
match user_wants_to_overwrite(path, question_policy)? {
|
||||||
FileConflitOperation::Cancel => Ok(None),
|
FileConflitOperation::Cancel => Ok(None),
|
||||||
FileConflitOperation::Overwrite => {
|
FileConflitOperation::Overwrite => {
|
||||||
remove_file_or_dir(path)?;
|
remove_file_or_dir(path)?;
|
||||||
@ -42,7 +38,6 @@ pub fn resolve_path_conflict(
|
|||||||
let renamed_path = rename_for_available_filename(path);
|
let renamed_path = rename_for_available_filename(path);
|
||||||
Ok(Some(renamed_path))
|
Ok(Some(renamed_path))
|
||||||
}
|
}
|
||||||
FileConflitOperation::Merge => Ok(Some(path.to_path_buf())),
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Ok(Some(path.to_path_buf()))
|
Ok(Some(path.to_path_buf()))
|
||||||
@ -133,7 +128,7 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
|
|||||||
buf.starts_with(&[0x42, 0x5A, 0x68])
|
buf.starts_with(&[0x42, 0x5A, 0x68])
|
||||||
}
|
}
|
||||||
fn is_bz3(buf: &[u8]) -> bool {
|
fn is_bz3(buf: &[u8]) -> bool {
|
||||||
buf.starts_with(b"BZ3v1")
|
buf.starts_with(bzip3::MAGIC_NUMBER)
|
||||||
}
|
}
|
||||||
fn is_xz(buf: &[u8]) -> bool {
|
fn is_xz(buf: &[u8]) -> bool {
|
||||||
buf.starts_with(&[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])
|
buf.starts_with(&[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])
|
||||||
|
@ -125,14 +125,10 @@ mod logger_thread {
|
|||||||
static SENDER: OnceLock<LogSender> = OnceLock::new();
|
static SENDER: OnceLock<LogSender> = OnceLock::new();
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn setup_channel() -> Option<LogReceiver> {
|
fn setup_channel() -> LogReceiver {
|
||||||
let mut optional = None;
|
|
||||||
SENDER.get_or_init(|| {
|
|
||||||
let (tx, rx) = mpsc::channel();
|
let (tx, rx) = mpsc::channel();
|
||||||
optional = Some(rx);
|
SENDER.set(tx).expect("`setup_channel` should only be called once");
|
||||||
tx
|
rx
|
||||||
});
|
|
||||||
optional
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
@ -142,9 +138,6 @@ mod logger_thread {
|
|||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub(super) fn send_print_command(msg: PrintMessage) {
|
pub(super) fn send_print_command(msg: PrintMessage) {
|
||||||
if cfg!(test) {
|
|
||||||
spawn_logger_thread();
|
|
||||||
}
|
|
||||||
get_sender()
|
get_sender()
|
||||||
.send(LoggerCommand::Print(msg))
|
.send(LoggerCommand::Print(msg))
|
||||||
.expect("Failed to send print command");
|
.expect("Failed to send print command");
|
||||||
@ -177,10 +170,9 @@ mod logger_thread {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spawn_logger_thread() {
|
pub fn spawn_logger_thread() {
|
||||||
if let Some(log_receiver) = setup_channel() {
|
let log_receiver = setup_channel();
|
||||||
thread::spawn(move || run_logger(log_receiver));
|
thread::spawn(move || run_logger(log_receiver));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn run_logger(log_receiver: LogReceiver) {
|
fn run_logger(log_receiver: LogReceiver) {
|
||||||
const FLUSH_TIMEOUT: Duration = Duration::from_millis(200);
|
const FLUSH_TIMEOUT: Duration = Duration::from_millis(200);
|
||||||
|
@ -48,32 +48,26 @@ pub enum FileConflitOperation {
|
|||||||
/// Rename the file
|
/// Rename the file
|
||||||
/// It'll be put "_1" at the end of the filename or "_2","_3","_4".. if already exists
|
/// It'll be put "_1" at the end of the filename or "_2","_3","_4".. if already exists
|
||||||
Rename,
|
Rename,
|
||||||
/// Merge conflicting folders
|
|
||||||
Merge,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if QuestionPolicy flags were set, otherwise, ask user if they want to overwrite.
|
/// Check if QuestionPolicy flags were set, otherwise, ask user if they want to overwrite.
|
||||||
pub fn user_wants_to_overwrite(
|
pub fn user_wants_to_overwrite(path: &Path, question_policy: QuestionPolicy) -> crate::Result<FileConflitOperation> {
|
||||||
path: &Path,
|
|
||||||
question_policy: QuestionPolicy,
|
|
||||||
question_action: QuestionAction,
|
|
||||||
) -> crate::Result<FileConflitOperation> {
|
|
||||||
use FileConflitOperation as Op;
|
use FileConflitOperation as Op;
|
||||||
|
|
||||||
match question_policy {
|
match question_policy {
|
||||||
QuestionPolicy::AlwaysYes => Ok(Op::Overwrite),
|
QuestionPolicy::AlwaysYes => Ok(Op::Overwrite),
|
||||||
QuestionPolicy::AlwaysNo => Ok(Op::Cancel),
|
QuestionPolicy::AlwaysNo => Ok(Op::Cancel),
|
||||||
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action),
|
QuestionPolicy::Ask => ask_file_conflict_operation(path),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ask the user if they want to overwrite or rename the &Path
|
/// Ask the user if they want to overwrite or rename the &Path
|
||||||
pub fn ask_file_conflict_operation(path: &Path, question_action: QuestionAction) -> Result<FileConflitOperation> {
|
pub fn ask_file_conflict_operation(path: &Path) -> Result<FileConflitOperation> {
|
||||||
use FileConflitOperation as Op;
|
use FileConflitOperation as Op;
|
||||||
|
|
||||||
let path = path_to_str(strip_cur_dir(path));
|
let path = path_to_str(strip_cur_dir(path));
|
||||||
match question_action {
|
|
||||||
QuestionAction::Compression => ChoicePrompt::new(
|
ChoicePrompt::new(
|
||||||
format!("Do you want to overwrite {path}?"),
|
format!("Do you want to overwrite {path}?"),
|
||||||
[
|
[
|
||||||
("yes", Op::Overwrite, *colors::GREEN),
|
("yes", Op::Overwrite, *colors::GREEN),
|
||||||
@ -81,38 +75,22 @@ pub fn ask_file_conflict_operation(path: &Path, question_action: QuestionAction)
|
|||||||
("rename", Op::Rename, *colors::BLUE),
|
("rename", Op::Rename, *colors::BLUE),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
.ask(),
|
.ask()
|
||||||
QuestionAction::Decompression => ChoicePrompt::new(
|
|
||||||
format!("Do you want to overwrite {path}?"),
|
|
||||||
[
|
|
||||||
("yes", Op::Overwrite, *colors::GREEN),
|
|
||||||
("no", Op::Cancel, *colors::RED),
|
|
||||||
("rename", Op::Rename, *colors::BLUE),
|
|
||||||
("merge", Op::Merge, *colors::ORANGE),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.ask(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create the file if it doesn't exist and if it does then ask to overwrite it.
|
/// Create the file if it doesn't exist and if it does then ask to overwrite it.
|
||||||
/// If the user doesn't want to overwrite then we return [`Ok(None)`]
|
/// If the user doesn't want to overwrite then we return [`Ok(None)`]
|
||||||
pub fn ask_to_create_file(
|
pub fn ask_to_create_file(path: &Path, question_policy: QuestionPolicy) -> Result<Option<fs::File>> {
|
||||||
path: &Path,
|
|
||||||
question_policy: QuestionPolicy,
|
|
||||||
question_action: QuestionAction,
|
|
||||||
) -> Result<Option<fs::File>> {
|
|
||||||
match fs::OpenOptions::new().write(true).create_new(true).open(path) {
|
match fs::OpenOptions::new().write(true).create_new(true).open(path) {
|
||||||
Ok(w) => Ok(Some(w)),
|
Ok(w) => Ok(Some(w)),
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {
|
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||||
let action = match question_policy {
|
let action = match question_policy {
|
||||||
QuestionPolicy::AlwaysYes => FileConflitOperation::Overwrite,
|
QuestionPolicy::AlwaysYes => FileConflitOperation::Overwrite,
|
||||||
QuestionPolicy::AlwaysNo => FileConflitOperation::Cancel,
|
QuestionPolicy::AlwaysNo => FileConflitOperation::Cancel,
|
||||||
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action)?,
|
QuestionPolicy::Ask => ask_file_conflict_operation(path)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
match action {
|
match action {
|
||||||
FileConflitOperation::Merge => Ok(Some(fs::File::create(path)?)),
|
|
||||||
FileConflitOperation::Overwrite => {
|
FileConflitOperation::Overwrite => {
|
||||||
utils::remove_file_or_dir(path)?;
|
utils::remove_file_or_dir(path)?;
|
||||||
Ok(Some(fs::File::create(path)?))
|
Ok(Some(fs::File::create(path)?))
|
||||||
|
@ -7,12 +7,8 @@ use std::{
|
|||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use bstr::ByteSlice;
|
|
||||||
use fs_err as fs;
|
use fs_err as fs;
|
||||||
use itertools::Itertools;
|
|
||||||
use memchr::memmem;
|
|
||||||
use parse_display::Display;
|
use parse_display::Display;
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
use proptest::sample::size_range;
|
use proptest::sample::size_range;
|
||||||
use rand::{rngs::SmallRng, Rng, SeedableRng};
|
use rand::{rngs::SmallRng, Rng, SeedableRng};
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
@ -21,7 +17,7 @@ use test_strategy::{proptest, Arbitrary};
|
|||||||
use crate::utils::{assert_same_directory, write_random_content};
|
use crate::utils::{assert_same_directory, write_random_content};
|
||||||
|
|
||||||
/// tar and zip extensions
|
/// tar and zip extensions
|
||||||
#[derive(Arbitrary, Clone, Copy, Debug, Display)]
|
#[derive(Arbitrary, Debug, Display)]
|
||||||
#[display(style = "lowercase")]
|
#[display(style = "lowercase")]
|
||||||
enum DirectoryExtension {
|
enum DirectoryExtension {
|
||||||
#[display("7z")]
|
#[display("7z")]
|
||||||
@ -29,7 +25,6 @@ enum DirectoryExtension {
|
|||||||
Tar,
|
Tar,
|
||||||
Tbz,
|
Tbz,
|
||||||
Tbz2,
|
Tbz2,
|
||||||
#[cfg(feature = "bzip3")]
|
|
||||||
Tbz3,
|
Tbz3,
|
||||||
Tgz,
|
Tgz,
|
||||||
Tlz4,
|
Tlz4,
|
||||||
@ -46,7 +41,6 @@ enum DirectoryExtension {
|
|||||||
enum FileExtension {
|
enum FileExtension {
|
||||||
Bz,
|
Bz,
|
||||||
Bz2,
|
Bz2,
|
||||||
#[cfg(feature = "bzip3")]
|
|
||||||
Bz3,
|
Bz3,
|
||||||
Gz,
|
Gz,
|
||||||
Lz4,
|
Lz4,
|
||||||
@ -65,9 +59,9 @@ enum Extension {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Converts a list of extension structs to string
|
/// Converts a list of extension structs to string
|
||||||
fn merge_extensions(ext: impl ToString, exts: &[FileExtension]) -> String {
|
fn merge_extensions(ext: impl ToString, exts: Vec<FileExtension>) -> String {
|
||||||
once(ext.to_string())
|
once(ext.to_string())
|
||||||
.chain(exts.iter().map(|x| x.to_string()))
|
.chain(exts.into_iter().map(|x| x.to_string()))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(".")
|
.join(".")
|
||||||
}
|
}
|
||||||
@ -95,7 +89,6 @@ fn create_random_files(dir: impl Into<PathBuf>, depth: u8, rng: &mut SmallRng) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create n random files on directory dir
|
/// Create n random files on directory dir
|
||||||
#[cfg_attr(not(feature = "allow_piped_choice"), allow(dead_code))]
|
|
||||||
fn create_n_random_files(n: usize, dir: impl Into<PathBuf>, rng: &mut SmallRng) {
|
fn create_n_random_files(n: usize, dir: impl Into<PathBuf>, rng: &mut SmallRng) {
|
||||||
let dir: &PathBuf = &dir.into();
|
let dir: &PathBuf = &dir.into();
|
||||||
|
|
||||||
@ -121,7 +114,7 @@ fn single_empty_file(ext: Extension, #[any(size_range(0..8).lift())] exts: Vec<F
|
|||||||
let before = &dir.join("before");
|
let before = &dir.join("before");
|
||||||
fs::create_dir(before).unwrap();
|
fs::create_dir(before).unwrap();
|
||||||
let before_file = &before.join("file");
|
let before_file = &before.join("file");
|
||||||
let archive = &dir.join(format!("file.{}", merge_extensions(ext, &exts)));
|
let archive = &dir.join(format!("file.{}", merge_extensions(ext, exts)));
|
||||||
let after = &dir.join("after");
|
let after = &dir.join("after");
|
||||||
fs::write(before_file, []).unwrap();
|
fs::write(before_file, []).unwrap();
|
||||||
ouch!("-A", "c", before_file, archive);
|
ouch!("-A", "c", before_file, archive);
|
||||||
@ -144,7 +137,7 @@ fn single_file(
|
|||||||
let before = &dir.join("before");
|
let before = &dir.join("before");
|
||||||
fs::create_dir(before).unwrap();
|
fs::create_dir(before).unwrap();
|
||||||
let before_file = &before.join("file");
|
let before_file = &before.join("file");
|
||||||
let archive = &dir.join(format!("file.{}", merge_extensions(ext, &exts)));
|
let archive = &dir.join(format!("file.{}", merge_extensions(ext, exts)));
|
||||||
let after = &dir.join("after");
|
let after = &dir.join("after");
|
||||||
write_random_content(
|
write_random_content(
|
||||||
&mut fs::File::create(before_file).unwrap(),
|
&mut fs::File::create(before_file).unwrap(),
|
||||||
@ -174,7 +167,7 @@ fn single_file_stdin(
|
|||||||
let before = &dir.join("before");
|
let before = &dir.join("before");
|
||||||
fs::create_dir(before).unwrap();
|
fs::create_dir(before).unwrap();
|
||||||
let before_file = &before.join("file");
|
let before_file = &before.join("file");
|
||||||
let format = merge_extensions(&ext, &exts);
|
let format = merge_extensions(&ext, exts);
|
||||||
let archive = &dir.join(format!("file.{}", format));
|
let archive = &dir.join(format!("file.{}", format));
|
||||||
let after = &dir.join("after");
|
let after = &dir.join("after");
|
||||||
write_random_content(
|
write_random_content(
|
||||||
@ -215,7 +208,7 @@ fn multiple_files(
|
|||||||
let before = &dir.join("before");
|
let before = &dir.join("before");
|
||||||
let before_dir = &before.join("dir");
|
let before_dir = &before.join("dir");
|
||||||
fs::create_dir_all(before_dir).unwrap();
|
fs::create_dir_all(before_dir).unwrap();
|
||||||
let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
let after = &dir.join("after");
|
let after = &dir.join("after");
|
||||||
create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
|
create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
|
||||||
ouch!("-A", "c", before_dir, archive);
|
ouch!("-A", "c", before_dir, archive);
|
||||||
@ -242,7 +235,7 @@ fn multiple_files_with_conflict_and_choice_to_overwrite(
|
|||||||
fs::create_dir_all(after_dir).unwrap();
|
fs::create_dir_all(after_dir).unwrap();
|
||||||
create_random_files(after_dir, depth, &mut SmallRng::from_entropy());
|
create_random_files(after_dir, depth, &mut SmallRng::from_entropy());
|
||||||
|
|
||||||
let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
ouch!("-A", "c", before_dir, archive);
|
ouch!("-A", "c", before_dir, archive);
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
crate::utils::cargo_bin()
|
||||||
@ -283,7 +276,7 @@ fn multiple_files_with_conflict_and_choice_to_not_overwrite(
|
|||||||
fs::write(after_dir.join("something.txt"), "Some content").unwrap();
|
fs::write(after_dir.join("something.txt"), "Some content").unwrap();
|
||||||
fs::copy(after_dir.join("something.txt"), after_backup_dir.join("something.txt")).unwrap();
|
fs::copy(after_dir.join("something.txt"), after_backup_dir.join("something.txt")).unwrap();
|
||||||
|
|
||||||
let archive = &dir.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
ouch!("-A", "c", before_dir, archive);
|
ouch!("-A", "c", before_dir, archive);
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
crate::utils::cargo_bin()
|
||||||
@ -316,7 +309,7 @@ fn multiple_files_with_conflict_and_choice_to_rename(
|
|||||||
fs::create_dir_all(&dest_files_path).unwrap();
|
fs::create_dir_all(&dest_files_path).unwrap();
|
||||||
create_n_random_files(5, &dest_files_path, &mut SmallRng::from_entropy());
|
create_n_random_files(5, &dest_files_path, &mut SmallRng::from_entropy());
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
|
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
ouch!("-A", "c", &src_files_path, archive);
|
ouch!("-A", "c", &src_files_path, archive);
|
||||||
|
|
||||||
let dest_files_path_renamed = &root_path.join("dest_files_1");
|
let dest_files_path_renamed = &root_path.join("dest_files_1");
|
||||||
@ -356,7 +349,7 @@ fn multiple_files_with_conflict_and_choice_to_rename_with_already_a_renamed(
|
|||||||
fs::create_dir_all(&dest_files_path_1).unwrap();
|
fs::create_dir_all(&dest_files_path_1).unwrap();
|
||||||
create_n_random_files(5, &dest_files_path_1, &mut SmallRng::from_entropy());
|
create_n_random_files(5, &dest_files_path_1, &mut SmallRng::from_entropy());
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
|
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
ouch!("-A", "c", &src_files_path, archive);
|
ouch!("-A", "c", &src_files_path, archive);
|
||||||
|
|
||||||
let dest_files_path_renamed = &root_path.join("dest_files_2");
|
let dest_files_path_renamed = &root_path.join("dest_files_2");
|
||||||
@ -374,201 +367,6 @@ fn multiple_files_with_conflict_and_choice_to_rename_with_already_a_renamed(
|
|||||||
assert_same_directory(src_files_path, dest_files_path_renamed.join("src_files"), false);
|
assert_same_directory(src_files_path, dest_files_path_renamed.join("src_files"), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proptest(cases = 25)]
|
|
||||||
fn smart_unpack_with_single_file(
|
|
||||||
ext: DirectoryExtension,
|
|
||||||
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
|
|
||||||
) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let root_path = temp_dir.path();
|
|
||||||
|
|
||||||
let src_files_path = root_path.join("src_files");
|
|
||||||
fs::create_dir_all(&src_files_path).unwrap();
|
|
||||||
|
|
||||||
let files_path = ["file1.txt"]
|
|
||||||
.into_iter()
|
|
||||||
.map(|f| src_files_path.join(f))
|
|
||||||
.inspect(|path| {
|
|
||||||
let mut file = fs::File::create(path).unwrap();
|
|
||||||
file.write_all("Some content".as_bytes()).unwrap();
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.args(files_path)
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
let output_file = root_path.join("file1.txt");
|
|
||||||
assert!(!output_file.exists());
|
|
||||||
|
|
||||||
// Decompress the archive with Smart Unpack
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.current_dir(root_path)
|
|
||||||
.arg("decompress")
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
assert!(output_file.exists());
|
|
||||||
|
|
||||||
let output_content = fs::read_to_string(&output_file).unwrap();
|
|
||||||
assert_eq!(output_content, "Some content");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proptest(cases = 25)]
|
|
||||||
fn smart_unpack_with_multiple_files(
|
|
||||||
ext: DirectoryExtension,
|
|
||||||
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
|
|
||||||
) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let root_path = temp_dir.path();
|
|
||||||
|
|
||||||
let src_files_path = root_path.join("src_files");
|
|
||||||
fs::create_dir_all(&src_files_path).unwrap();
|
|
||||||
|
|
||||||
["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
|
|
||||||
.into_iter()
|
|
||||||
.map(|f| src_files_path.join(f))
|
|
||||||
.for_each(|path| {
|
|
||||||
let mut file = fs::File::create(&path).unwrap();
|
|
||||||
file.write_all("Some content".as_bytes()).unwrap();
|
|
||||||
});
|
|
||||||
|
|
||||||
let input_files = src_files_path
|
|
||||||
.read_dir()
|
|
||||||
.unwrap()
|
|
||||||
.map(|entry| entry.unwrap().path())
|
|
||||||
.collect::<Vec<PathBuf>>();
|
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
|
||||||
|
|
||||||
let output_path = root_path.join("archive");
|
|
||||||
assert!(!output_path.exists());
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.args(input_files)
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.current_dir(root_path)
|
|
||||||
.arg("decompress")
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
assert!(output_path.exists(), "Output directory does not exist");
|
|
||||||
|
|
||||||
assert_same_directory(src_files_path, output_path, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proptest(cases = 25)]
|
|
||||||
fn no_smart_unpack_with_single_file(
|
|
||||||
ext: DirectoryExtension,
|
|
||||||
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
|
|
||||||
) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let root_path = temp_dir.path();
|
|
||||||
|
|
||||||
let src_files_path = root_path.join("src_files");
|
|
||||||
fs::create_dir_all(&src_files_path).unwrap();
|
|
||||||
|
|
||||||
["file1.txt"]
|
|
||||||
.into_iter()
|
|
||||||
.map(|f| src_files_path.join(f))
|
|
||||||
.for_each(|path| {
|
|
||||||
let mut file = fs::File::create(&path).unwrap();
|
|
||||||
file.write_all("Some content".as_bytes()).unwrap();
|
|
||||||
});
|
|
||||||
|
|
||||||
let input_files = src_files_path
|
|
||||||
.read_dir()
|
|
||||||
.unwrap()
|
|
||||||
.map(|entry| entry.unwrap().path())
|
|
||||||
.collect::<Vec<PathBuf>>();
|
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
|
||||||
|
|
||||||
let output_path = root_path.join("archive");
|
|
||||||
assert!(!output_path.exists());
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.args(input_files)
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.current_dir(root_path)
|
|
||||||
.arg("decompress")
|
|
||||||
.arg("--no-smart-unpack")
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
assert!(output_path.exists(), "Output directory does not exist");
|
|
||||||
|
|
||||||
assert_same_directory(src_files_path, output_path, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proptest(cases = 25)]
|
|
||||||
fn no_smart_unpack_with_multiple_files(
|
|
||||||
ext: DirectoryExtension,
|
|
||||||
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
|
|
||||||
) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let root_path = temp_dir.path();
|
|
||||||
|
|
||||||
let src_files_path = root_path.join("src_files");
|
|
||||||
fs::create_dir_all(&src_files_path).unwrap();
|
|
||||||
|
|
||||||
["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
|
|
||||||
.into_iter()
|
|
||||||
.map(|f| src_files_path.join(f))
|
|
||||||
.for_each(|path| {
|
|
||||||
let mut file = fs::File::create(&path).unwrap();
|
|
||||||
file.write_all("Some content".as_bytes()).unwrap();
|
|
||||||
});
|
|
||||||
|
|
||||||
let input_files = src_files_path
|
|
||||||
.read_dir()
|
|
||||||
.unwrap()
|
|
||||||
.map(|entry| entry.unwrap().path())
|
|
||||||
.collect::<Vec<PathBuf>>();
|
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
|
||||||
|
|
||||||
let output_path = root_path.join("archive");
|
|
||||||
assert!(!output_path.exists());
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.args(input_files)
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.current_dir(root_path)
|
|
||||||
.arg("decompress")
|
|
||||||
.arg("--no-smart-unpack")
|
|
||||||
.arg(archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
assert!(output_path.exists(), "Output directory does not exist");
|
|
||||||
|
|
||||||
assert_same_directory(src_files_path, output_path, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proptest(cases = 25)]
|
#[proptest(cases = 25)]
|
||||||
fn multiple_files_with_disabled_smart_unpack_by_dir(
|
fn multiple_files_with_disabled_smart_unpack_by_dir(
|
||||||
ext: DirectoryExtension,
|
ext: DirectoryExtension,
|
||||||
@ -592,7 +390,7 @@ fn multiple_files_with_disabled_smart_unpack_by_dir(
|
|||||||
let dest_files_path = root_path.join("dest_files");
|
let dest_files_path = root_path.join("dest_files");
|
||||||
fs::create_dir_all(&dest_files_path).unwrap();
|
fs::create_dir_all(&dest_files_path).unwrap();
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
crate::utils::cargo_bin()
|
||||||
.arg("compress")
|
.arg("compress")
|
||||||
@ -692,9 +490,10 @@ fn symlink_pack_and_unpack(
|
|||||||
let mut files_path = ["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
|
let mut files_path = ["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|f| src_files_path.join(f))
|
.map(|f| src_files_path.join(f))
|
||||||
.inspect(|path| {
|
.map(|path| {
|
||||||
let mut file = fs::File::create(path).unwrap();
|
let mut file = fs::File::create(&path).unwrap();
|
||||||
file.write_all("Some content".as_bytes()).unwrap();
|
file.write_all("Some content".as_bytes()).unwrap();
|
||||||
|
path
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
@ -709,7 +508,7 @@ fn symlink_pack_and_unpack(
|
|||||||
|
|
||||||
files_path.push(symlink_path);
|
files_path.push(symlink_path);
|
||||||
|
|
||||||
let archive = &root_path.join(format!("archive.{}", merge_extensions(ext, &extra_extensions)));
|
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, extra_extensions)));
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
crate::utils::cargo_bin()
|
||||||
.arg("compress")
|
.arg("compress")
|
||||||
@ -805,287 +604,3 @@ fn no_git_folder_after_decompression_with_gitignore_flag_active() {
|
|||||||
".git folder should not exist after decompression"
|
".git folder should not exist after decompression"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "allow_piped_choice")]
|
|
||||||
#[proptest(cases = 25)]
|
|
||||||
fn unpack_multiple_sources_into_the_same_destination_with_merge(
|
|
||||||
ext: DirectoryExtension,
|
|
||||||
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
|
|
||||||
) {
|
|
||||||
let temp_dir = tempdir()?;
|
|
||||||
let root_path = temp_dir.path();
|
|
||||||
let source_path = root_path
|
|
||||||
.join(format!("example_{}", merge_extensions(&ext, &extra_extensions)))
|
|
||||||
.join("sub_a")
|
|
||||||
.join("sub_b")
|
|
||||||
.join("sub_c");
|
|
||||||
|
|
||||||
fs::create_dir_all(&source_path)?;
|
|
||||||
let archive = root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.args([
|
|
||||||
fs::File::create(source_path.join("file1.txt"))?.path(),
|
|
||||||
fs::File::create(source_path.join("file2.txt"))?.path(),
|
|
||||||
fs::File::create(source_path.join("file3.txt"))?.path(),
|
|
||||||
])
|
|
||||||
.arg(&archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
fs::remove_dir_all(&source_path)?;
|
|
||||||
fs::create_dir_all(&source_path)?;
|
|
||||||
let archive1 = root_path.join(format!("archive1.{}", merge_extensions(&ext, &extra_extensions)));
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.args([
|
|
||||||
fs::File::create(source_path.join("file3.txt"))?.path(),
|
|
||||||
fs::File::create(source_path.join("file4.txt"))?.path(),
|
|
||||||
fs::File::create(source_path.join("file5.txt"))?.path(),
|
|
||||||
])
|
|
||||||
.arg(&archive1)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
let out_path = root_path.join(format!("out_{}", merge_extensions(&ext, &extra_extensions)));
|
|
||||||
fs::create_dir_all(&out_path)?;
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("decompress")
|
|
||||||
.arg(archive)
|
|
||||||
.arg("-d")
|
|
||||||
.arg(&out_path)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("decompress")
|
|
||||||
.arg(archive1)
|
|
||||||
.arg("-d")
|
|
||||||
.arg(&out_path)
|
|
||||||
.write_stdin("m")
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
assert_eq!(5, out_path.as_path().read_dir()?.count());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn reading_nested_archives_with_two_archive_extensions_adjacent() {
|
|
||||||
let archive_formats = ["tar", "zip", "7z"].into_iter();
|
|
||||||
|
|
||||||
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let in_dir = |path: &str| format!("{}/{}", temp_dir.path().display(), path);
|
|
||||||
|
|
||||||
fs::write(in_dir("a.txt"), "contents").unwrap();
|
|
||||||
|
|
||||||
let files = [
|
|
||||||
"a.txt",
|
|
||||||
&format!("b.{first_archive}"),
|
|
||||||
&format!("c.{first_archive}.{second_archive}"),
|
|
||||||
];
|
|
||||||
let transformations = [first_archive, second_archive];
|
|
||||||
let compressed_path = in_dir(files.last().unwrap());
|
|
||||||
|
|
||||||
for (window, format) in files.windows(2).zip(transformations.iter()) {
|
|
||||||
let [a, b] = [window[0], window[1]].map(in_dir);
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.args(["compress", &a, &b, "--format", format])
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
}
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args(["list", &compressed_path, "--yes"])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args(["decompress", &compressed_path, "--dir", &in_dir("out"), "--yes"])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn reading_nested_archives_with_two_archive_extensions_interleaved() {
|
|
||||||
let archive_formats = ["tar", "zip", "7z"].into_iter();
|
|
||||||
|
|
||||||
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let in_dir = |path: &str| format!("{}/{}", temp_dir.path().display(), path);
|
|
||||||
|
|
||||||
fs::write(in_dir("a.txt"), "contents").unwrap();
|
|
||||||
|
|
||||||
let files = [
|
|
||||||
"a.txt",
|
|
||||||
&format!("c.{first_archive}"),
|
|
||||||
&format!("d.{first_archive}.zst"),
|
|
||||||
&format!("e.{first_archive}.zst.{second_archive}"),
|
|
||||||
&format!("f.{first_archive}.zst.{second_archive}.lz4"),
|
|
||||||
];
|
|
||||||
let transformations = [first_archive, "zst", second_archive, "lz4"];
|
|
||||||
let compressed_path = in_dir(files.last().unwrap());
|
|
||||||
|
|
||||||
for (window, format) in files.windows(2).zip(transformations.iter()) {
|
|
||||||
let [a, b] = [window[0], window[1]].map(in_dir);
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.args(["compress", &a, &b, "--format", format])
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
}
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args(["list", &compressed_path, "--yes"])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args(["decompress", &compressed_path, "--dir", &in_dir("out"), "--yes"])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn compressing_archive_with_two_archive_formats() {
|
|
||||||
let archive_formats = ["tar", "zip", "7z"].into_iter();
|
|
||||||
|
|
||||||
for (first_archive, second_archive) in archive_formats.clone().cartesian_product(archive_formats.rev()) {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let dir = temp_dir.path().display().to_string();
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args([
|
|
||||||
"compress",
|
|
||||||
"README.md",
|
|
||||||
&format!("{dir}/out.{first_archive}.{second_archive}"),
|
|
||||||
"--yes",
|
|
||||||
])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args([
|
|
||||||
"compress",
|
|
||||||
"README.md",
|
|
||||||
&format!("{dir}/out.{first_archive}.{second_archive}"),
|
|
||||||
"--yes",
|
|
||||||
"--format",
|
|
||||||
&format!("{first_archive}.{second_archive}"),
|
|
||||||
])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(
|
|
||||||
stderr.as_bytes(),
|
|
||||||
b"can only be used at the start of the file extension",
|
|
||||||
)
|
|
||||||
.is_some());
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.args([
|
|
||||||
"compress",
|
|
||||||
"README.md",
|
|
||||||
&format!("{dir}/out.{first_archive}.{second_archive}"),
|
|
||||||
"--yes",
|
|
||||||
"--format",
|
|
||||||
first_archive,
|
|
||||||
])
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn fail_when_compressing_archive_as_the_second_extension() {
|
|
||||||
for archive_format in ["tar", "zip", "7z"] {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let dir = temp_dir.path().display().to_string();
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args([
|
|
||||||
"compress",
|
|
||||||
"README.md",
|
|
||||||
&format!("{dir}/out.zst.{archive_format}"),
|
|
||||||
"--yes",
|
|
||||||
])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(stderr.as_bytes(), b"use `--format` to specify what format to use").is_some());
|
|
||||||
|
|
||||||
let output = crate::utils::cargo_bin()
|
|
||||||
.args([
|
|
||||||
"compress",
|
|
||||||
"README.md",
|
|
||||||
&format!("{dir}/out_file"),
|
|
||||||
"--yes",
|
|
||||||
"--format",
|
|
||||||
&format!("zst.{archive_format}"),
|
|
||||||
])
|
|
||||||
.assert()
|
|
||||||
.failure()
|
|
||||||
.get_output()
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
let stderr = output.stderr.to_str().unwrap();
|
|
||||||
assert!(memmem::find(
|
|
||||||
stderr.as_bytes(),
|
|
||||||
format!("'{archive_format}' can only be used at the start of the file extension").as_bytes(),
|
|
||||||
)
|
|
||||||
.is_some());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn sevenz_list_should_not_failed() {
|
|
||||||
let temp_dir = tempdir().unwrap();
|
|
||||||
let root_path = temp_dir.path();
|
|
||||||
let src_files_path = root_path.join("src_files");
|
|
||||||
fs::create_dir_all(&src_files_path).unwrap();
|
|
||||||
|
|
||||||
let archive = root_path.join("archive.7z.gz");
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("compress")
|
|
||||||
.arg("--yes")
|
|
||||||
.arg(fs::File::create(src_files_path.join("README.md")).unwrap().path())
|
|
||||||
.arg(&archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
|
|
||||||
crate::utils::cargo_bin()
|
|
||||||
.arg("list")
|
|
||||||
.arg("--yes")
|
|
||||||
.arg(&archive)
|
|
||||||
.assert()
|
|
||||||
.success();
|
|
||||||
}
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
///
|
///
|
||||||
/// See CONTRIBUTING.md for a brief guide on how to use [`insta`] for these tests.
|
/// See CONTRIBUTING.md for a brief guide on how to use [`insta`] for these tests.
|
||||||
/// [`insta`]: https://docs.rs/insta
|
/// [`insta`]: https://docs.rs/insta
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user