merge upstream

This commit is contained in:
valoq 2025-05-04 11:56:21 +02:00
commit bc65e9282f
No known key found for this signature in database
GPG Key ID: 19F09A0FB865CBD8
65 changed files with 2512 additions and 914 deletions

View File

@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,4 +1,6 @@
<!--
Make sure to check out CONTRIBUTING.md.
Don't forget to add a CHANGELOG.md entry!
If your code changes text output, you might need to update snapshots
of UI tests, read more about `insta` at CONTRIBUTING.md.
Remember to edit `CHANGELOG.md` after opening the PR.
-->

17
.github/workflows/all-tests-slow.yml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Run tests for all combinations
on:
schedule:
- cron: "0 0 1,15 * *" # biweekly
push:
branches:
- main
paths-ignore:
- "**/*.md"
jobs:
run-tests-for-all-combinations:
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
with:
matrix_all_combinations: true
artifact_upload_mode: none

View File

@ -1,161 +0,0 @@
name: build-and-test
on:
push:
branches:
- main
tags:
- "[0-9]+.[0-9]+.[0-9]+"
pull_request:
jobs:
build:
name: build
runs-on: ${{ matrix.os }}
env:
CARGO: cargo
strategy:
fail-fast: false
matrix:
target:
# native
- x86_64-unknown-linux-gnu
- x86_64-pc-windows-gnu
- x86_64-pc-windows-msvc
- aarch64-pc-windows-msvc
- x86_64-apple-darwin
# cross
- x86_64-unknown-linux-musl
- aarch64-unknown-linux-gnu
- aarch64-unknown-linux-musl
- armv7-unknown-linux-gnueabihf
- armv7-unknown-linux-musleabihf
feature-use-zlib: [true, false]
feature-use-zstd-thin: [true, false]
feature-unrar: [true, false]
include:
# default runner
- os: ubuntu-latest
# runner overrides
- target: x86_64-pc-windows-gnu
os: windows-latest
- target: x86_64-pc-windows-msvc
os: windows-latest
- target: aarch64-pc-windows-msvc
os: windows-latest
- target: x86_64-apple-darwin
os: macos-latest
# targets that use cross
- target: x86_64-unknown-linux-musl
use-cross: true
- target: aarch64-unknown-linux-gnu
use-cross: true
- target: aarch64-unknown-linux-musl
use-cross: true
- target: armv7-unknown-linux-gnueabihf
use-cross: true
- target: armv7-unknown-linux-musleabihf
use-cross: true
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install cross
if: matrix.use-cross
run: |
pushd "$(mktemp -d)"
wget https://github.com/cross-rs/cross/releases/download/v0.2.4/cross-x86_64-unknown-linux-musl.tar.gz
tar xf cross-x86_64-unknown-linux-musl.tar.gz
cp cross ~/.cargo/bin
popd
echo CARGO=cross >> $GITHUB_ENV
- name: Concatenate features
id: concat-features
shell: bash
run: |
FEATURES=()
if [[ ${{ matrix.feature-use-zlib }} == true ]]; then FEATURES+=(use_zlib); fi
if [[ ${{ matrix.feature-use-zstd-thin }} == true ]]; then FEATURES+=(use_zstd_thin); fi
if [[ ${{ matrix.feature-unrar }} == true ]]; then FEATURES+=(unrar); fi
IFS=','
echo "FEATURES=${FEATURES[*]}" >> $GITHUB_OUTPUT
- name: Set up extra cargo flags
env:
FEATURES: ${{steps.concat-features.outputs.FEATURES}}
shell: bash
run: |
FLAGS="--no-default-features"
if [[ -n "$FEATURES" ]]; then FLAGS+=" --features $FEATURES"; fi
echo "EXTRA_CARGO_FLAGS=$FLAGS" >> $GITHUB_ENV
- name: Install Rust
run: |
rustup toolchain install stable nightly --profile minimal -t ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
with:
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zstd-thin }}-${{ matrix.feature-unrar }}"
- name: Test on stable
# there's no way to run tests for ARM64 Windows for now
if: matrix.target != 'aarch64-pc-windows-msvc'
run: |
${{ env.CARGO }} +stable test --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
- name: Release on nightly
run: |
${{ env.CARGO }} +nightly build --release --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
env:
OUCH_ARTIFACTS_FOLDER: artifacts
RUSTFLAGS: -C strip=symbols
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ouch-${{ matrix.target }}-${{ steps.concat-features.outputs.FEATURES }}
path: |
target/${{ matrix.target }}/release/ouch
target/${{ matrix.target }}/release/ouch.exe
artifacts/
clippy-rustfmt:
name: clippy-rustfmt
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: "Cargo: clippy, fmt"
run: |
rustup toolchain install stable --profile minimal -c clippy
rustup toolchain install nightly --profile minimal -c rustfmt
cargo +stable clippy -- -D warnings
cargo +nightly fmt -- --check
github-release:
name: github-release
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
needs: build
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download artifacts
uses: dawidd6/action-download-artifact@v3
with:
path: artifacts
- name: Package release assets
run: scripts/package-release-assets.sh
- name: Create release
uses: softprops/action-gh-release@v2
with:
draft: true
files: release/ouch-*

View File

@ -0,0 +1,160 @@
# This is a reusable workflow
name: Build artifacts and run tests
on:
workflow_dispatch:
inputs:
matrix_all_combinations:
description: "if matrix should have all combinations of targets and features"
type: boolean
required: true
default: true
artifact_upload_mode:
description: "Control what artifacts to upload: 'none' for no uploads, 'with_default_features' to upload artifacts with default features (for releases), or 'all' for all feature combinations."
type: choice
options:
- none
- with_default_features
- all
required: true
workflow_call:
inputs:
matrix_all_combinations:
description: "if matrix should have all combinations of targets and features"
type: boolean
required: true
artifact_upload_mode:
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar+bzip3), or 'all' to upload all feature combinations."
type: string
required: true
jobs:
build-artifacts-and-run-tests:
runs-on: ${{ matrix.os || 'ubuntu-latest' }}
env:
CARGO: cargo
strategy:
fail-fast: false
matrix:
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
feature-use-zstd-thin: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
target:
# native
- x86_64-unknown-linux-gnu
- x86_64-pc-windows-gnu
- x86_64-pc-windows-msvc
- aarch64-pc-windows-msvc
- x86_64-apple-darwin
# cross
- x86_64-unknown-linux-musl
- aarch64-unknown-linux-gnu
- aarch64-unknown-linux-musl
- armv7-unknown-linux-gnueabihf
- armv7-unknown-linux-musleabihf
include:
# runner overrides
- target: x86_64-pc-windows-gnu
os: windows-latest
- target: x86_64-pc-windows-msvc
os: windows-latest
- target: aarch64-pc-windows-msvc
os: windows-latest
- target: x86_64-apple-darwin
os: macos-latest
# targets that use cross
- target: x86_64-unknown-linux-musl
use-cross: true
- target: aarch64-unknown-linux-gnu
use-cross: true
- target: aarch64-unknown-linux-musl
use-cross: true
- target: armv7-unknown-linux-gnueabihf
use-cross: true
- target: armv7-unknown-linux-musleabihf
use-cross: true
# features (unless `matrix_all_combinations` is true, we only run these on linux-gnu)
- feature-unrar: false
target: x86_64-unknown-linux-gnu
- feature-use-zlib: true
target: x86_64-unknown-linux-gnu
- feature-use-zstd-thin: true
target: x86_64-unknown-linux-gnu
- feature-bzip3: false
target: x86_64-unknown-linux-gnu
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install cross
if: matrix.use-cross
run: |
pushd "$(mktemp -d)"
wget https://github.com/cross-rs/cross/releases/download/v0.2.4/cross-x86_64-unknown-linux-musl.tar.gz
tar xf cross-x86_64-unknown-linux-musl.tar.gz
cp cross ~/.cargo/bin
popd
echo CARGO=cross >> $GITHUB_ENV
- name: Concatenate features
id: concat-features
shell: bash
run: |
FEATURES=(allow_piped_choice)
if [[ "${{ matrix.feature-unrar }}" == true ]]; then FEATURES+=(unrar); fi
if [[ "${{ matrix.feature-use-zlib }}" == true ]]; then FEATURES+=(use_zlib); fi
if [[ "${{ matrix.feature-use-zstd-thin }}" == true ]]; then FEATURES+=(use_zstd_thin); fi
if [[ "${{ matrix.feature-bzip3 }}" == true ]]; then FEATURES+=(bzip3); fi
# Output plus-separated list for artifact names
IFS='+'
echo "FEATURES_PLUS=${FEATURES[*]}" >> $GITHUB_OUTPUT
# Output comma-separated list for cargo flags
IFS=','
echo "FEATURES_COMMA=${FEATURES[*]}" >> $GITHUB_OUTPUT
- name: Set up extra cargo flags
env:
FEATURES: ${{steps.concat-features.outputs.FEATURES_COMMA}}
shell: bash
run: |
FLAGS="--no-default-features"
if [[ -n "$FEATURES" ]]; then FLAGS+=" --features $FEATURES"; fi
echo "EXTRA_CARGO_FLAGS=$FLAGS" >> $GITHUB_ENV
- name: Install Rust
run: |
rustup toolchain install stable --profile minimal -t ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
with:
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}-${{ matrix.feature-bzip3 }}"
- name: Test on stable
# there's no way to run tests for ARM64 Windows for now
if: matrix.target != 'aarch64-pc-windows-msvc'
run: |
${{ env.CARGO }} +stable test --profile fast --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
- name: Build release artifacts (binary and completions)
if: ${{ inputs.artifact_upload_mode != 'none' }}
run: |
${{ env.CARGO }} +stable build --release --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
env:
OUCH_ARTIFACTS_FOLDER: man-page-and-completions-artifacts
- name: Upload release artifacts
if: |
${{ inputs.artifact_upload_mode != 'none' &&
(inputs.artifact_upload_mode == 'all' ||
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin && matrix.feature-bzip3)) }}
uses: actions/upload-artifact@v4
with:
name: ouch-${{ matrix.target }}${{ steps.concat-features.outputs.FEATURES_PLUS != '' && format('-{0}', steps.concat-features.outputs.FEATURES_PLUS) || '' }}
path: |
target/${{ matrix.target }}/release/ouch
target/${{ matrix.target }}/release/ouch.exe
man-page-and-completions-artifacts/

View File

@ -1,33 +0,0 @@
name: create-draft-release-with-artifacts
on:
workflow_dispatch:
inputs:
run_id:
description: Run id of the action run to pull artifacts from
required: true
jobs:
github-release:
name: github-release
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download artifacts
uses: dawidd6/action-download-artifact@v3
with:
path: artifacts
workflow: build-and-test.yml
run_id: ${{ github.event.inputs.run_id }}
- name: Package release assets
run: scripts/package-release-assets.sh
- name: Create release
uses: softprops/action-gh-release@v2
with:
draft: true
name: manual release ${{ github.event.inputs.run_id }}
files: release/ouch-*

View File

@ -0,0 +1,36 @@
name: Automatic trigger draft release
on:
push:
tags:
- "[0-9]+.[0-9]+.[0-9]+-rc[0-9]+"
jobs:
call-workflow-build-artifacts-and-run-tests:
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
with:
matrix_all_combinations: true
artifact_upload_mode: with_default_features
automated-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
needs: call-workflow-build-artifacts-and-run-tests
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: downloaded_artifacts
pattern: ouch-*
- name: Package release assets
run: scripts/package-release-assets.sh
- name: Create release
uses: softprops/action-gh-release@v2
with:
draft: true
files: output_assets/ouch-*

35
.github/workflows/pr-workflow.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: PR workflow
on:
pull_request:
paths-ignore:
- "**/*.md"
jobs:
rustfmt-nightly-check:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: "Cargo: fmt"
run: |
rustup toolchain install nightly --profile minimal -c rustfmt
cargo +nightly fmt -- --check
clippy-checks:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: "Cargo: clippy"
run: |
rustup toolchain install stable --profile minimal -c clippy
cargo +stable clippy -- -D warnings
build-and-test:
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
with:
matrix_all_combinations: false
artifact_upload_mode: none

View File

@ -18,12 +18,34 @@ Categories Used:
**Bullet points in chronological order by PR**
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.5.1...HEAD)
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.1...HEAD)
### New Features
- Merge folders in decompression [\#798](https://github.com/ouch-org/ouch/pull/798) ([tommady](https://github.com/tommady))
- Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb))
- Add landlock support for linux filesystem isolation [\#723](https://github.com/ouch-org/ouch/pull/723) ([valoq](https://github.com/valoq))
### Improvements
### Bug Fixes
### Tweaks
- Make `.bz3` opt-out [\#814](https://github.com/ouch-org/ouch/pull/814) ([amyspark](https://github.com/amyspark))
## [0.6.1](https://github.com/ouch-org/ouch/compare/0.6.0...0.6.1)
- Fix .zip crash when file mode isn't present [\#804](https://github.com/ouch-org/ouch/pull/804) ([marcospb19](https://github.com/marcospb19))
## [0.6.0](https://github.com/ouch-org/ouch/compare/0.5.1...0.6.0)
### New Features
- Add multithreading support for `zstd` compression [\#689](https://github.com/ouch-org/ouch/pull/689) ([nalabrie](https://github.com/nalabrie))
- Add landlock support for linux filesystem isolation [\#723](https://github.com/ouch-org/ouch/pull/723) ([valoq](https://github.com/valoq))
- Add `bzip3` support [\#522](https://github.com/ouch-org/ouch/pull/522) ([freijon](https://github.com/freijon))
- Add `--remove` flag for decompression subcommand to remove files after successful decompression [\#757](https://github.com/ouch-org/ouch/pull/757) ([ttys3](https://github.com/ttys3))
- Add `br` (Brotli) support [\#765](https://github.com/ouch-org/ouch/pull/765) ([killercup](https://github.com/killercup))
- Add rename option in overwrite menu [\#779](https://github.com/ouch-org/ouch/pull/779) ([talis-fb](https://github.com/talis-fb))
- Store symlinks by default and add `--follow-symlinks` to store the target files [\#789](https://github.com/ouch-org/ouch/pull/789) ([tommady](https://github.com/tommady))
### Bug Fixes
@ -32,12 +54,19 @@ Categories Used:
### Tweaks
- CI refactor [\#578](https://github.com/ouch-org/ouch/pull/578) ([cyqsimon](https://github.com/cyqsimon))
- Use a prefix `tmp-ouch-` for temporary decompression path name to avoid conflicts [\#725](https://github.com/ouch-org/ouch/pull/725) ([valoq](https://github.com/valoq)) & [\#788](https://github.com/ouch-org/ouch/pull/788) ([talis-fb](https://github.com/talis-fb))
- Ignore `.git/` when `-g/--gitignore` is set [\#507](https://github.com/ouch-org/ouch/pull/507) ([talis-fb](https://github.com/talis-fb))
- Run clippy for tests too [\#738](https://github.com/ouch-org/ouch/pull/738) ([marcospb19](https://github.com/marcospb19))
- Sevenz-rust is unmaintained, switch to sevenz-rust2 [\#796](https://github.com/ouch-org/ouch/pull/796) ([tommady](https://github.com/tommady))
### Improvements
- Fix logging IO bottleneck [\#642](https://github.com/ouch-org/ouch/pull/642) ([AntoniosBarotsis](https://github.com/AntoniosBarotsis))
- Support decompression over stdin [\#692](https://github.com/ouch-org/ouch/pull/692) ([rcorre](https://github.com/rcorre))
- Make `--format` more forgiving with the formatting of the provided format [\#519](https://github.com/ouch-org/ouch/pull/519) ([marcospb19](https://github.com/marcospb19))
- Use buffered writer for list output [\#764](https://github.com/ouch-org/ouch/pull/764) ([killercup](https://github.com/killercup))
- Disable smart unpack when `--dir` flag is provided in decompress command [\#782](https://github.com/ouch-org/ouch/pull/782) ([talis-fb](https://github.com/talis-fb))
- Align file sizes at left for each extracted file to make output clearer [\#792](https://github.com/ouch-org/ouch/pull/792) ([talis-fb](https://github.com/talis-fb))
## [0.5.1](https://github.com/ouch-org/ouch/compare/0.5.0...0.5.1)
@ -54,7 +83,7 @@ Categories Used:
### New Features
- Add support for listing and decompressing `.rar` archives [\#529](https://github.com/ouch-org/ouch/pull/529) ([lmkra](https://github.com/lmkra))
- Add support for 7z [\#555](https://github.com/ouch-org/ouch/pull/555) ([Flat](https://github.com/flat) & [MissileLab](https://github.com/MisileLab))
- Add support for 7z [\#555](https://github.com/ouch-org/ouch/pull/555) ([Flat](https://github.com/flat) & [MisileLab](https://github.com/MisileLab))
### Bug Fixes

837
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,10 @@
[package]
name = "ouch"
version = "0.5.1"
authors = ["Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>", "João M. Bezerra <marcospb19@hotmail.com>"]
version = "0.6.1"
authors = [
"João Marcos <marcospb19@hotmail.com>",
"Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>",
]
edition = "2021"
readme = "README.md"
repository = "https://github.com/ouch-org/ouch"
@ -12,58 +15,79 @@ description = "A command-line utility for easily compressing and decompressing f
[dependencies]
atty = "0.2.14"
brotli = "7.0.0"
bstr = { version = "1.10.0", default-features = false, features = ["std"] }
bytesize = "1.3.0"
bzip2 = "0.4.4"
clap = { version = "4.5.16", features = ["derive", "env"] }
bzip3 = { version = "0.9.0", features = ["bundled"] , optional = true }
clap = { version = "4.5.20", features = ["derive", "env"] }
filetime_creation = "0.2"
flate2 = { version = "1.0.30", default-features = false }
fs-err = "2.11.0"
gzp = { version = "0.11.3", default-features = false, features = ["snappy_default"] }
ignore = "0.4.22"
landlock = "0.4.1"
gzp = { version = "0.11.3", default-features = false, features = [
"snappy_default",
] }
ignore = "0.4.23"
landlock = "0.4.2"
libc = "0.2.155"
linked-hash-map = "0.5.6"
lz4_flex = "0.11.3"
num_cpus = "1.16.0"
once_cell = "1.19.0"
once_cell = "1.20.2"
rayon = "1.10.0"
same-file = "1.0.6"
sevenz-rust = { version = "0.6.1", features = ["compress", "aes256"] }
sevenz-rust2 = { version = "0.13.1", features = ["compress", "aes256"] }
snap = "1.1.1"
tar = "0.4.41"
tar = "0.4.42"
tempfile = "3.10.1"
thiserror = "1.0.64"
time = { version = "0.3.36", default-features = false }
unrar = { version = "0.5.6", optional = true }
unrar = { version = "0.5.7", optional = true }
xz2 = "0.1.7"
zip = { version = "0.6.6", default-features = false, features = ["time", "aes-crypto"] }
zstd = { version = "0.13.2", default-features = false, features = ["zstdmt"]}
zip = { version = "0.6.6", default-features = false, features = [
"time",
"aes-crypto",
] }
zstd = { version = "0.13.2", default-features = false, features = ["zstdmt"] }
[target.'cfg(not(unix))'.dependencies]
is_executable = "1.0.1"
[build-dependencies]
clap = { version = "4.5.16", features = ["derive", "env", "string"] }
clap_complete = "4.5.13"
clap_mangen = "0.2.20"
clap = { version = "4.5.20", features = ["derive", "env", "string"] }
clap_complete = "4.5.28"
clap_mangen = "0.2.24"
[dev-dependencies]
assert_cmd = "2.0.14"
infer = "0.16.0"
insta = { version = "1.39.0", features = ["filters"] }
insta = { version = "1.40.0", features = ["filters"] }
parse-display = "0.9.1"
proptest = "1.5.0"
rand = { version = "0.8.5", default-features = false, features = ["small_rng", "std"] }
rand = { version = "0.8.5", default-features = false, features = [
"small_rng",
"std",
] }
regex = "1.10.4"
test-strategy = "0.4.0"
[features]
default = ["use_zlib", "use_zstd_thin", "unrar"]
default = ["unrar", "use_zlib", "use_zstd_thin", "bzip3"]
use_zlib = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib"]
use_zstd_thin = ["zstd/thin"]
allow_piped_choice = []
# For generating binaries for releases
[profile.release]
lto = true
codegen-units = 1
opt-level = 3
strip = true
# When we need a fast binary that compiles slightly faster `release` (useful for CI)
[profile.fast]
inherits = "release"
lto = false
opt-level = 2
incremental = true
codegen-units = 16

15
LICENSE
View File

@ -20,7 +20,18 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---
Copyright notices from other projects:
Copyright (c) 2019 Bojan
https://github.com/bojand/infer
Infer crate (MIT LICENSE):
> Copyright (c) 2019 Bojan
> Code at https://github.com/bojand/infer
Bzip3-rs crate (LGPL 3.0):
> Code for this crate is available at https://github.com/bczhc/bzip3-rs
> See its license at https://github.com/bczhc/bzip3-rs/blob/master/LICENSE
Bzip3 library (LGPL 3.0):
> Code for this library is available at https://github.com/kspalaiologos/bzip3
> See its license at https://github.com/kspalaiologos/bzip3/blob/master/LICENSE

View File

@ -111,9 +111,9 @@ Output:
# Supported formats
| Format | `.tar` | `.zip` | `7z` | `.gz` | `.xz`, `.lzma` | `.bz`, `.bz2` | `.lz4` | `.sz` (Snappy) | `.zst` | `.rar` |
|:---------:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
| Supported | ✓ | ✓¹ | ✓¹ | ✓² | ✓ | ✓ | ✓ | ✓² | ✓² | ✓³ |
| Format | `.tar` | `.zip` | `7z` | `.gz` | `.xz`, `.lzma` | `.bz`, `.bz2` | `.bz3` | `.lz4` | `.sz` (Snappy) | `.zst` | `.rar` | `.br` |
|:---------:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
| Supported | ✓ | ✓¹ | ✓¹ | ✓² | ✓ | ✓ | ✓ | ✓ | ✓² | ✓² | ✓³ | ✓ |
✓: Supports compression and decompression.
@ -176,6 +176,7 @@ Otherwise, you'll need these libraries installed on your system:
* [liblzma](https://www.7-zip.org/sdk.html)
* [libbz2](https://www.sourceware.org/bzip2)
* [libbz3](https://github.com/kspalaiologos/bzip3)
* [libz](https://www.zlib.net)
These should be available in your system's package manager.

View File

@ -20,8 +20,8 @@
| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |
|:---|---:|---:|---:|---:|
| `zip output.zip -r compiler` | 581.3 ± 9.1 | 573.2 | 600.9 | 1.06 ± 0.02 |
| `ouch compress compiler output.zip` | 549.7 ± 4.3 | 543.6 | 558.6 | 1.00 |
| `zip output.zip -r compiler` | 581.3 ± 9.1 | 573.2 | 600.9 | 1.06 ± 0.02 |
| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |
|:---|---:|---:|---:|---:|

View File

@ -5,18 +5,12 @@
/// Set `OUCH_ARTIFACTS_FOLDER` to the name of the destination folder:
///
/// ```sh
/// OUCH_ARTIFACTS_FOLDER=my-folder cargo build
/// OUCH_ARTIFACTS_FOLDER=man-page-and-completions-artifacts cargo build
/// ```
///
/// All completion files will be generated inside of the folder "my-folder".
/// All completion files will be generated inside of the folder "man-page-and-completions-artifacts".
///
/// If the folder does not exist, it will be created.
///
/// We recommend you naming this folder "artifacts" for the sake of consistency.
///
/// ```sh
/// OUCH_ARTIFACTS_FOLDER=artifacts cargo build
/// ```
use std::{
env,
fs::{create_dir_all, File},

View File

@ -1,28 +1,60 @@
#!/usr/bin/env bash
set -e
mkdir release
cd artifacts
mkdir output_assets
echo "created folder 'output_assets/'"
ls -lA -w 1
cd downloaded_artifacts
echo "entered 'downloaded_artifacts/'"
ls -lA -w 1
for dir in ouch-*; do
cp -r "$dir/artifacts" "$dir/completions"
mkdir "$dir/man"
mv "$dir"/completions/*.1 "$dir/man"
PLATFORMS=(
"aarch64-pc-windows-msvc"
"aarch64-unknown-linux-gnu"
"aarch64-unknown-linux-musl"
"armv7-unknown-linux-gnueabihf"
"armv7-unknown-linux-musleabihf"
"x86_64-apple-darwin"
"x86_64-pc-windows-gnu"
"x86_64-pc-windows-msvc"
"x86_64-unknown-linux-gnu"
"x86_64-unknown-linux-musl"
)
# TODO: remove allow_piped_choice later
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin+bzip3"
cp ../{README.md,LICENSE,CHANGELOG.md} "$dir"
rm -r "$dir/artifacts"
for platform in "${PLATFORMS[@]}"; do
path="ouch-${platform}"
echo "Processing $path"
if [[ "$dir" = *.exe ]]; then
target=${dir%.exe}
mv "$dir/target/${target/ouch-/}/release/ouch.exe" "$dir"
rm -r "$dir/target"
mv "$dir" "$target"
zip -r "../release/$target.zip" "$target"
if [ ! -d "${path}-${DEFAULT_FEATURES}" ]; then
echo "ERROR: Could not find artifact directory for $platform with default features ($path)"
exit 1
fi
mv "${path}-${DEFAULT_FEATURES}" "$path" # remove the annoying suffix
cp ../{README.md,LICENSE,CHANGELOG.md} "$path"
mkdir -p "$path/man"
mkdir -p "$path/completions"
mv "$path"/man-page-and-completions-artifacts/*.1 "$path/man"
mv "$path"/man-page-and-completions-artifacts/* "$path/completions"
rm -r "$path/man-page-and-completions-artifacts"
if [[ "$platform" == *"-windows-"* ]]; then
mv "$path/target/$platform/release/ouch.exe" "$path"
rm -rf "$path/target"
zip -r "../output_assets/${path}.zip" "$path"
echo "Created output_assets/${path}.zip"
else
mv "$dir/target/${dir/ouch-/}/release/ouch" "$dir"
rm -r "$dir/target"
chmod +x "$dir/ouch"
tar czf "../release/$dir.tar.gz" "$dir"
mv "$path/target/$platform/release/ouch" "$path"
rm -rf "$path/target"
chmod +x "$path/ouch"
tar czf "../output_assets/${path}.tar.gz" "$path"
echo "Created output_assets/${path}.tar.gz"
fi
done
echo "Done."

View File

@ -0,0 +1,7 @@
use crate::Error;
pub fn no_support() -> Error {
Error::UnsupportedFormat {
reason: "BZip3 support is disabled for this build, possibly due to missing bindgen-cli dependency.".into(),
}
}

View File

@ -1,5 +1,7 @@
//! Archive compression algorithms
#[cfg(not(feature = "bzip3"))]
pub mod bzip3_stub;
#[cfg(feature = "unrar")]
pub mod rar;
#[cfg(not(feature = "unrar"))]

View File

@ -4,7 +4,11 @@ use std::path::Path;
use unrar::Archive;
use crate::{error::Error, list::FileInArchive, utils::logger::info};
use crate::{
error::{Error, Result},
list::FileInArchive,
utils::{logger::info, Bytes},
};
/// Unpacks the archive given by `archive_path` into the folder given by `output_folder`.
/// Assumes that output_folder is empty
@ -14,8 +18,6 @@ pub fn unpack_archive(
password: Option<&[u8]>,
quiet: bool,
) -> crate::Result<usize> {
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
let archive = match password {
Some(password) => Archive::with_password(archive_path, password),
None => Archive::new(archive_path),
@ -29,9 +31,9 @@ pub fn unpack_archive(
archive = if entry.is_file() {
if !quiet {
info(format!(
"{} extracted. ({})",
"extracted ({}) {}",
Bytes::new(entry.unpacked_size),
entry.filename.display(),
entry.unpacked_size
));
}
unpacked += 1;
@ -48,15 +50,13 @@ pub fn unpack_archive(
pub fn list_archive(
archive_path: &Path,
password: Option<&[u8]>,
) -> crate::Result<impl Iterator<Item = crate::Result<FileInArchive>>> {
) -> Result<impl Iterator<Item = Result<FileInArchive>>> {
let archive = match password {
Some(password) => Archive::with_password(archive_path, password),
None => Archive::new(archive_path),
};
let archive = archive.open_for_listing()?;
Ok(archive.map(|item| {
Ok(archive.open_for_listing()?.map(|item| {
let item = item?;
let is_dir = item.is_directory();
let path = item.filename;

View File

@ -9,13 +9,13 @@ use std::{
use bstr::ByteSlice;
use fs_err as fs;
use same_file::Handle;
use sevenz_rust::SevenZArchiveEntry;
use sevenz_rust2::SevenZArchiveEntry;
use crate::{
error::{Error, FinalError},
error::{Error, FinalError, Result},
list::FileInArchive,
utils::{
self, cd_into_same_dir_as,
cd_into_same_dir_as,
logger::{info, warning},
Bytes, EscapedPathDisplay, FileVisibilityPolicy,
},
@ -31,7 +31,7 @@ pub fn compress_sevenz<W>(
where
W: Write + Seek,
{
let mut writer = sevenz_rust::SevenZWriter::new(writer)?;
let mut writer = sevenz_rust2::SevenZWriter::new(writer)?;
let output_handle = Handle::from_path(output_path);
for filename in files {
@ -49,7 +49,7 @@ where
if let Ok(handle) = &output_handle {
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
warning(format!(
"The output file and the input file are the same: `{}`, skipping...",
"Cannot compress `{}` into itself, skipping",
output_path.display()
));
@ -62,15 +62,14 @@ where
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("Compressing '{}'.", EscapedPathDisplay::new(path)));
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
}
let metadata = match path.metadata() {
Ok(metadata) => metadata,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
// This path is for a broken symlink
// We just ignore it
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
// This path is for a broken symlink, ignore it
continue;
}
return Err(e.into());
@ -82,7 +81,7 @@ where
.detail(format!("File at '{path:?}' has a non-UTF-8 name"))
})?;
let entry = sevenz_rust::SevenZArchiveEntry::from_path(path, entry_name.to_owned());
let entry = sevenz_rust2::SevenZArchiveEntry::from_path(path, entry_name.to_owned());
let entry_data = if metadata.is_dir() {
None
} else {
@ -128,9 +127,9 @@ where
} else {
if !quiet {
info(format!(
"{:?} extracted. ({})",
"extracted ({}) {:?}",
Bytes::new(entry.size()),
file_path.display(),
Bytes::new(entry.size())
));
}
@ -157,15 +156,15 @@ where
};
match password {
Some(password) => sevenz_rust::decompress_with_extract_fn_and_password(
Some(password) => sevenz_rust2::decompress_with_extract_fn_and_password(
reader,
output_path,
sevenz_rust::Password::from(password.to_str().map_err(|err| Error::InvalidPassword {
sevenz_rust2::Password::from(password.to_str().map_err(|err| Error::InvalidPassword {
reason: err.to_string(),
})?),
entry_extract_fn,
)?,
None => sevenz_rust::decompress_with_extract_fn(reader, output_path, entry_extract_fn)?,
None => sevenz_rust2::decompress_with_extract_fn(reader, output_path, entry_extract_fn)?,
}
Ok(count)
@ -175,7 +174,7 @@ where
pub fn list_archive(
archive_path: &Path,
password: Option<&[u8]>,
) -> crate::Result<impl Iterator<Item = crate::Result<FileInArchive>>> {
) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>> {
let reader = fs::File::open(archive_path)?;
let mut files = Vec::new();
@ -188,7 +187,7 @@ pub fn list_archive(
Ok(true)
};
let result = match password {
match password {
Some(password) => {
let password = match password.to_str() {
Ok(p) => p,
@ -198,18 +197,14 @@ pub fn list_archive(
})
}
};
sevenz_rust::decompress_with_extract_fn_and_password(
sevenz_rust2::decompress_with_extract_fn_and_password(
reader,
".",
sevenz_rust::Password::from(password),
sevenz_rust2::Password::from(password),
entry_extract_fn,
)
)?;
}
None => sevenz_rust::decompress_with_extract_fn(reader, ".", entry_extract_fn),
};
if let Err(e) = result {
return Err(e.into());
None => sevenz_rust2::decompress_with_extract_fn(reader, ".", entry_extract_fn)?,
}
Ok(files.into_iter())

View File

@ -24,14 +24,30 @@ use crate::{
/// Unpacks the archive given by `archive` into the folder given by `into`.
/// Assumes that output_folder is empty
pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool) -> crate::Result<usize> {
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
let mut archive = tar::Archive::new(reader);
let mut files_unpacked = 0;
for file in archive.entries()? {
let mut file = file?;
file.unpack_in(output_folder)?;
match file.header().entry_type() {
tar::EntryType::Symlink => {
let relative_path = file.path()?.to_path_buf();
let full_path = output_folder.join(&relative_path);
let target = file
.link_name()?
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::InvalidData, "Missing symlink target"))?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &full_path)?;
#[cfg(windows)]
std::os::windows::fs::symlink_file(&target, &full_path)?;
}
tar::EntryType::Regular | tar::EntryType::Directory => {
file.unpack_in(output_folder)?;
}
_ => continue,
}
// This is printed for every file in the archive and has little
// importance for most users, but would generate lots of
@ -39,9 +55,9 @@ pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool)
// and so on
if !quiet {
info(format!(
"{:?} extracted. ({})",
utils::strip_cur_dir(&output_folder.join(file.path()?)),
"extracted ({}) {:?}",
Bytes::new(file.size()),
utils::strip_cur_dir(&output_folder.join(file.path()?)),
));
files_unpacked += 1;
@ -54,7 +70,7 @@ pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool)
/// List contents of `archive`, returning a vector of archive entries
pub fn list_archive(
mut archive: tar::Archive<impl Read + Send + 'static>,
) -> crate::Result<impl Iterator<Item = crate::Result<FileInArchive>>> {
) -> impl Iterator<Item = crate::Result<FileInArchive>> {
struct Files(Receiver<crate::Result<FileInArchive>>);
impl Iterator for Files {
type Item = crate::Result<FileInArchive>;
@ -77,7 +93,7 @@ pub fn list_archive(
}
});
Ok(Files(rx))
Files(rx)
}
/// Compresses the archives given by `input_filenames` into the file given previously to `writer`.
@ -87,6 +103,7 @@ pub fn build_archive_from_paths<W>(
writer: W,
file_visibility_policy: FileVisibilityPolicy,
quiet: bool,
follow_symlinks: bool,
) -> crate::Result<W>
where
W: Write,
@ -109,7 +126,7 @@ where
if let Ok(handle) = &output_handle {
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
warning(format!(
"The output file and the input file are the same: `{}`, skipping...",
"Cannot compress `{}` into itself, skipping",
output_path.display()
));
@ -122,18 +139,29 @@ where
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("Compressing '{}'.", EscapedPathDisplay::new(path)));
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
}
if path.is_dir() {
builder.append_dir(path, path)?;
} else if path.is_symlink() && !follow_symlinks {
let target_path = path.read_link()?;
let mut header = tar::Header::new_gnu();
header.set_entry_type(tar::EntryType::Symlink);
header.set_size(0);
builder.append_link(&mut header, path, &target_path).map_err(|err| {
FinalError::with_title("Could not create archive")
.detail("Unexpected error while trying to read link")
.detail(format!("Error: {err}."))
})?;
} else {
let mut file = match fs::File::open(path) {
Ok(f) => f,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
// This path is for a broken symlink
// We just ignore it
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
// This path is for a broken symlink, ignore it
continue;
}
return Err(e.into());

View File

@ -20,7 +20,7 @@ use crate::{
error::FinalError,
list::FileInArchive,
utils::{
self, cd_into_same_dir_as, get_invalid_utf8_paths,
cd_into_same_dir_as, get_invalid_utf8_paths,
logger::{info, info_accessible, warning},
pretty_format_list_of_paths, strip_cur_dir, Bytes, EscapedPathDisplay, FileVisibilityPolicy,
},
@ -37,8 +37,6 @@ pub fn unpack_archive<R>(
where
R: Read + Seek,
{
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
let mut unpacked_files = 0;
for idx in 0..archive.len() {
@ -79,14 +77,27 @@ where
// same reason is in _is_dir: long, often not needed text
if !quiet {
info(format!(
"{:?} extracted. ({})",
"extracted ({}) {:?}",
Bytes::new(file.size()),
file_path.display(),
Bytes::new(file.size())
));
}
let mut output_file = fs::File::create(file_path)?;
io::copy(&mut file, &mut output_file)?;
let mode = file.unix_mode();
let is_symlink = mode.is_some_and(|mode| mode & 0o170000 == 0o120000);
if is_symlink {
let mut target = String::new();
file.read_to_string(&mut target)?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, file_path)?;
#[cfg(windows)]
std::os::windows::fs::symlink_file(&target, file_path)?;
} else {
let mut output_file = fs::File::create(file_path)?;
io::copy(&mut file, &mut output_file)?;
}
set_last_modified_time(&file, file_path)?;
}
@ -105,7 +116,7 @@ where
pub fn list_archive<R>(
mut archive: ZipArchive<R>,
password: Option<&[u8]>,
) -> crate::Result<impl Iterator<Item = crate::Result<FileInArchive>>>
) -> impl Iterator<Item = crate::Result<FileInArchive>>
where
R: Read + Seek + Send + 'static,
{
@ -145,7 +156,7 @@ where
}
});
Ok(Files(rx))
Files(rx)
}
/// Compresses the archives given by `input_filenames` into the file given previously to `writer`.
@ -155,6 +166,7 @@ pub fn build_archive_from_paths<W>(
writer: W,
file_visibility_policy: FileVisibilityPolicy,
quiet: bool,
follow_symlinks: bool,
) -> crate::Result<W>
where
W: Write + Seek,
@ -197,7 +209,7 @@ where
if let Ok(handle) = &output_handle {
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
warning(format!(
"The output file and the input file are the same: `{}`, skipping...",
"Cannot compress `{}` into itself, skipping",
output_path.display()
));
}
@ -208,15 +220,14 @@ where
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("Compressing '{}'.", EscapedPathDisplay::new(path)));
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
}
let metadata = match path.metadata() {
Ok(metadata) => metadata,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
// This path is for a broken symlink
// We just ignore it
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
// This path is for a broken symlink, ignore it
continue;
}
return Err(e.into());
@ -224,7 +235,7 @@ where
};
#[cfg(unix)]
let options = options.unix_permissions(metadata.permissions().mode());
let mode = metadata.permissions().mode();
let entry_name = path.to_str().ok_or_else(|| {
FinalError::with_title("Zip requires that all directories names are valid UTF-8")
@ -233,6 +244,21 @@ where
if metadata.is_dir() {
writer.add_directory(entry_name, options)?;
} else if path.is_symlink() && !follow_symlinks {
let target_path = path.read_link()?;
let target_name = target_path.to_str().ok_or_else(|| {
FinalError::with_title("Zip requires that all directories names are valid UTF-8")
.detail(format!("File at '{target_path:?}' has a non-UTF-8 name"))
})?;
// This approach writes the symlink target path as the content of the symlink entry.
// We detect symlinks during extraction by checking for the Unix symlink mode (0o120000) in the entry's permissions.
#[cfg(unix)]
let symlink_options = options.unix_permissions(0o120000 | (mode & 0o777));
#[cfg(windows)]
let symlink_options = options.unix_permissions(0o120777);
writer.add_symlink(entry_name, target_name, symlink_options)?;
} else {
#[cfg(not(unix))]
let options = if is_executable::is_executable(path) {
@ -243,6 +269,8 @@ where
let mut file = fs::File::open(path)?;
#[cfg(unix)]
let options = options.unix_permissions(mode);
// Updated last modified time
let last_modified_time = options.last_modified_time(get_last_modified_time(&file));

View File

@ -35,10 +35,9 @@ pub fn check_mime_type(
if let Some(detected_format) = try_infer_extension(path) {
// Inferring the file extension can have unpredicted consequences (e.g. the user just
// mistyped, ...) which we should always inform the user about.
info_accessible(format!(
"Detected file: `{}` extension as `{}`",
warning(format!(
"We detected a file named `{}`, do you want to decompress it?",
path.display(),
detected_format
));
if user_wants_to_continue(path, question_policy, QuestionAction::Decompression)? {

View File

@ -5,7 +5,7 @@ use clap::{Parser, ValueHint};
// Ouch command line options (docstrings below are part of --help)
/// A command-line utility for easily compressing and decompressing files and directories.
///
/// Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, lz4, sz (Snappy), zst and rar.
/// Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, bz3, lz4, sz (Snappy), zst, rar and br.
///
/// Repository: https://github.com/ouch-org/ouch
#[derive(Parser, Debug, PartialEq)]
@ -13,11 +13,11 @@ use clap::{Parser, ValueHint};
// Disable rustdoc::bare_urls because rustdoc parses URLs differently than Clap
#[allow(rustdoc::bare_urls)]
pub struct CliArgs {
/// Skip [Y/n] questions positively
/// Skip [Y/n] questions, default to yes
#[arg(short, long, conflicts_with = "no", global = true)]
pub yes: bool,
/// Skip [Y/n] questions negatively
/// Skip [Y/n] questions, default to no
#[arg(short, long, global = true)]
pub no: bool,
@ -25,15 +25,15 @@ pub struct CliArgs {
#[arg(short = 'A', long, env = "ACCESSIBLE", global = true)]
pub accessible: bool,
/// Ignores hidden files
/// Ignore hidden files
#[arg(short = 'H', long, global = true)]
pub hidden: bool,
/// Silences output
/// Silence output
#[arg(short = 'q', long, global = true)]
pub quiet: bool,
/// Ignores files matched by git's ignore files
/// Ignore files matched by git's ignore files
#[arg(short = 'g', long, global = true)]
pub gitignore: bool,
@ -41,10 +41,14 @@ pub struct CliArgs {
#[arg(short, long, global = true)]
pub format: Option<OsString>,
/// decompress or list with password
/// Decompress or list with password
#[arg(short = 'p', long = "password", global = true)]
pub password: Option<OsString>,
/// Concurrent working threads
#[arg(short = 'c', long, global = true)]
pub threads: Option<usize>,
// Ouch and claps subcommands
#[command(subcommand)]
pub cmd: Subcommand,
@ -77,6 +81,10 @@ pub enum Subcommand {
/// conflicts with --level and --fast
#[arg(long, group = "compression-level")]
slow: bool,
/// Archive target files instead of storing symlinks (supported by `tar` and `zip`)
#[arg(long, short = 'S')]
follow_symlinks: bool,
},
/// Decompresses one or more files, optionally into another folder
#[command(visible_alias = "d")]
@ -88,6 +96,14 @@ pub enum Subcommand {
/// Place results in a directory other than the current one
#[arg(short = 'd', long = "dir", value_hint = ValueHint::FilePath)]
output_dir: Option<PathBuf>,
/// Remove the source file after successful decompression
#[arg(short = 'r', long)]
remove: bool,
/// Disable Smart Unpack
#[arg(long)]
no_smart_unpack: bool,
},
/// List contents of an archive
#[command(visible_aliases = ["l", "ls"])]
@ -138,10 +154,13 @@ mod tests {
format: None,
// This is usually replaced in assertion tests
password: None,
threads: None,
cmd: Subcommand::Decompress {
// Put a crazy value here so no test can assert it unintentionally
files: vec!["\x00\x11\x22".into()],
output_dir: None,
remove: false,
no_smart_unpack: false,
},
}
}
@ -154,6 +173,8 @@ mod tests {
cmd: Subcommand::Decompress {
files: to_paths(["file.tar.gz"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
@ -164,6 +185,8 @@ mod tests {
cmd: Subcommand::Decompress {
files: to_paths(["file.tar.gz"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
@ -174,6 +197,8 @@ mod tests {
cmd: Subcommand::Decompress {
files: to_paths(["a", "b", "c"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
@ -188,6 +213,7 @@ mod tests {
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
..mock_cli_args()
}
@ -201,6 +227,7 @@ mod tests {
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
..mock_cli_args()
}
@ -214,6 +241,7 @@ mod tests {
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
..mock_cli_args()
}
@ -238,6 +266,7 @@ mod tests {
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
format: Some("tar.gz".into()),
..mock_cli_args()

View File

@ -54,7 +54,7 @@ fn canonicalize_files(files: &[impl AsRef<Path>]) -> io::Result<Vec<PathBuf>> {
files
.iter()
.map(|f| {
if is_path_stdin(f.as_ref()) {
if is_path_stdin(f.as_ref()) || f.as_ref().is_symlink() {
Ok(f.as_ref().to_path_buf())
} else {
fs::canonicalize(f)

View File

@ -31,6 +31,7 @@ pub fn compress_files(
output_file: fs::File,
output_path: &Path,
quiet: bool,
follow_symlinks: bool,
question_policy: QuestionPolicy,
file_visibility_policy: FileVisibilityPolicy,
level: Option<i16>,
@ -56,6 +57,16 @@ pub fn compress_files(
encoder,
level.map_or_else(Default::default, |l| bzip2::Compression::new((l as u32).clamp(1, 9))),
)),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(
// Use block size of 16 MiB
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
)
}
Lz4 => Box::new(lz4_flex::frame::FrameEncoder::new(encoder).auto_finish()),
Lzma => Box::new(xz2::write::XzEncoder::new(
encoder,
@ -79,6 +90,12 @@ pub fn compress_files(
zstd_encoder.multithread(num_cpus::get_physical() as u32)?;
Box::new(zstd_encoder.auto_finish())
}
Brotli => {
let default_level = 11; // Same as brotli CLI, default to highest compression
let level = level.unwrap_or(default_level).clamp(0, 11) as u32;
let win_size = 22; // default to 2^22 = 4 MiB window size
Box::new(brotli::CompressorWriter::new(encoder, BUFFER_CAPACITY, level, win_size))
}
Tar | Zip | Rar | SevenZip => unreachable!(),
};
Ok(encoder)
@ -91,14 +108,21 @@ pub fn compress_files(
}
match first_format {
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
writer = chain_writer_encoder(&first_format, writer)?;
let mut reader = fs::File::open(&files[0]).unwrap();
let mut reader = fs::File::open(&files[0])?;
io::copy(&mut reader, &mut writer)?;
}
Tar => {
archive::tar::build_archive_from_paths(&files, output_path, &mut writer, file_visibility_policy, quiet)?;
archive::tar::build_archive_from_paths(
&files,
output_path,
&mut writer,
file_visibility_policy,
quiet,
follow_symlinks,
)?;
writer.flush()?;
}
Zip => {
@ -121,6 +145,7 @@ pub fn compress_files(
&mut vec_buffer,
file_visibility_policy,
quiet,
follow_symlinks,
)?;
vec_buffer.rewind()?;
io::copy(&mut vec_buffer, &mut writer)?;

View File

@ -6,6 +6,8 @@ use std::{
use fs_err as fs;
#[cfg(not(feature = "bzip3"))]
use crate::archive;
use crate::{
commands::{warn_user_about_loading_sevenz_in_memory, warn_user_about_loading_zip_in_memory},
extension::{
@ -14,8 +16,11 @@ use crate::{
Extension,
},
utils::{
self, io::lock_and_flush_output_stdio, is_path_stdin, logger::info_accessible, nice_directory_display,
user_wants_to_continue,
self,
io::lock_and_flush_output_stdio,
is_path_stdin,
logger::{info, info_accessible},
nice_directory_display, user_wants_to_continue,
},
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
};
@ -23,23 +28,28 @@ use crate::{
trait ReadSeek: Read + io::Seek {}
impl<T: Read + io::Seek> ReadSeek for T {}
pub struct DecompressOptions<'a> {
pub input_file_path: &'a Path,
pub formats: Vec<Extension>,
pub output_dir: &'a Path,
pub output_file_path: PathBuf,
pub is_output_dir_provided: bool,
pub is_smart_unpack: bool,
pub question_policy: QuestionPolicy,
pub quiet: bool,
pub password: Option<&'a [u8]>,
pub remove: bool,
}
/// Decompress a file
///
/// File at input_file_path is opened for reading, example: "archive.tar.gz"
/// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
/// output_dir it's where the file will be decompressed to, this function assumes that the directory exists
/// output_file_path is only used when extracting single file formats, not archive formats like .tar or .zip
pub fn decompress_file(
input_file_path: &Path,
formats: Vec<Extension>,
output_dir: &Path,
output_file_path: PathBuf,
question_policy: QuestionPolicy,
quiet: bool,
password: Option<&[u8]>,
) -> crate::Result<()> {
assert!(output_dir.exists());
let input_is_stdin = is_path_stdin(input_file_path);
pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
assert!(options.output_dir.exists());
let input_is_stdin = is_path_stdin(options.input_file_path);
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
// from decoder chaining.
@ -51,7 +61,7 @@ pub fn decompress_file(
if let [Extension {
compression_formats: [Zip],
..
}] = formats.as_slice()
}] = options.formats.as_slice()
{
let mut vec = vec![];
let reader: Box<dyn ReadSeek> = if input_is_stdin {
@ -59,14 +69,16 @@ pub fn decompress_file(
io::copy(&mut io::stdin(), &mut vec)?;
Box::new(io::Cursor::new(vec))
} else {
Box::new(fs::File::open(input_file_path)?)
Box::new(fs::File::open(options.input_file_path)?)
};
let zip_archive = zip::ZipArchive::new(reader)?;
let files_unpacked = if let ControlFlow::Continue(files) = smart_unpack(
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, password, quiet),
output_dir,
&output_file_path,
question_policy,
let files_unpacked = if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, options.password, options.quiet),
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -78,11 +90,19 @@ pub fn decompress_file(
// as screen readers may not read a commands exit code, making it hard to reason
// about whether the command succeeded without such a message
info_accessible(format!(
"Successfully decompressed archive in {} ({} files).",
nice_directory_display(output_dir),
"Successfully decompressed archive in {} ({} files)",
nice_directory_display(options.output_dir),
files_unpacked
));
if !input_is_stdin && options.remove {
fs::remove_file(options.input_file_path)?;
info(format!(
"Removed input file {}",
nice_directory_display(options.input_file_path)
));
}
return Ok(());
}
@ -90,7 +110,7 @@ pub fn decompress_file(
let reader: Box<dyn Read> = if input_is_stdin {
Box::new(io::stdin())
} else {
Box::new(fs::File::open(input_file_path)?)
Box::new(fs::File::open(options.input_file_path)?)
};
let reader = BufReader::with_capacity(BUFFER_CAPACITY, reader);
let mut reader: Box<dyn Read> = Box::new(reader);
@ -100,26 +120,38 @@ pub fn decompress_file(
let decoder: Box<dyn Read> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(bzip3::read::Bz3Decoder::new(decoder)?)
}
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!(),
};
Ok(decoder)
};
let (first_extension, extensions) = split_first_compression_format(&formats);
let (first_extension, extensions) = split_first_compression_format(&options.formats);
for format in extensions.iter().rev() {
reader = chain_reader_decoder(format, reader)?;
}
let files_unpacked = match first_extension {
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
reader = chain_reader_decoder(&first_extension, reader)?;
let mut writer = match utils::ask_to_create_file(&output_file_path, question_policy)? {
let mut writer = match utils::ask_to_create_file(
&options.output_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
Some(file) => file,
None => return Ok(()),
};
@ -129,11 +161,13 @@ pub fn decompress_file(
1
}
Tar => {
if let ControlFlow::Continue(files) = smart_unpack(
|output_dir| crate::archive::tar::unpack_archive(reader, output_dir, quiet),
output_dir,
&output_file_path,
question_policy,
if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| crate::archive::tar::unpack_archive(reader, output_dir, options.quiet),
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -141,13 +175,17 @@ pub fn decompress_file(
}
}
Zip => {
if formats.len() > 1 {
if options.formats.len() > 1 {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_zip_in_memory();
if !user_wants_to_continue(input_file_path, question_policy, QuestionAction::Decompression)? {
if !user_wants_to_continue(
options.input_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
return Ok(());
}
}
@ -156,11 +194,15 @@ pub fn decompress_file(
io::copy(&mut reader, &mut vec)?;
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
if let ControlFlow::Continue(files) = smart_unpack(
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, password, quiet),
output_dir,
&output_file_path,
question_policy,
if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| {
crate::archive::zip::unpack_archive(zip_archive, output_dir, options.password, options.quiet)
},
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -170,19 +212,31 @@ pub fn decompress_file(
#[cfg(feature = "unrar")]
Rar => {
type UnpackResult = crate::Result<usize>;
let unpack_fn: Box<dyn FnOnce(&Path) -> UnpackResult> = if formats.len() > 1 || input_is_stdin {
let unpack_fn: Box<dyn FnOnce(&Path) -> UnpackResult> = if options.formats.len() > 1 || input_is_stdin {
let mut temp_file = tempfile::NamedTempFile::new()?;
io::copy(&mut reader, &mut temp_file)?;
Box::new(move |output_dir| {
crate::archive::rar::unpack_archive(temp_file.path(), output_dir, password, quiet)
crate::archive::rar::unpack_archive(temp_file.path(), output_dir, options.password, options.quiet)
})
} else {
Box::new(|output_dir| crate::archive::rar::unpack_archive(input_file_path, output_dir, password, quiet))
Box::new(|output_dir| {
crate::archive::rar::unpack_archive(
options.input_file_path,
output_dir,
options.password,
options.quiet,
)
})
};
if let ControlFlow::Continue(files) =
smart_unpack(unpack_fn, output_dir, &output_file_path, question_policy)?
{
if let ControlFlow::Continue(files) = execute_decompression(
unpack_fn,
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
return Ok(());
@ -193,13 +247,17 @@ pub fn decompress_file(
return Err(crate::archive::rar_stub::no_support());
}
SevenZip => {
if formats.len() > 1 {
if options.formats.len() > 1 {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_sevenz_in_memory();
if !user_wants_to_continue(input_file_path, question_policy, QuestionAction::Decompression)? {
if !user_wants_to_continue(
options.input_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
return Ok(());
}
}
@ -207,13 +265,20 @@ pub fn decompress_file(
let mut vec = vec![];
io::copy(&mut reader, &mut vec)?;
if let ControlFlow::Continue(files) = smart_unpack(
if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| {
crate::archive::sevenz::decompress_sevenz(io::Cursor::new(vec), output_dir, password, quiet)
crate::archive::sevenz::decompress_sevenz(
io::Cursor::new(vec),
output_dir,
options.password,
options.quiet,
)
},
output_dir,
&output_file_path,
question_policy,
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
@ -227,14 +292,72 @@ pub fn decompress_file(
// as screen readers may not read a commands exit code, making it hard to reason
// about whether the command succeeded without such a message
info_accessible(format!(
"Successfully decompressed archive in {}.",
nice_directory_display(output_dir)
"Successfully decompressed archive in {}",
nice_directory_display(options.output_dir)
));
info_accessible(format!("Files unpacked: {}", files_unpacked));
if !input_is_stdin && options.remove {
fs::remove_file(options.input_file_path)?;
info(format!(
"Removed input file {}",
nice_directory_display(options.input_file_path)
));
}
Ok(())
}
fn execute_decompression(
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
output_dir: &Path,
output_file_path: &Path,
question_policy: QuestionPolicy,
is_output_dir_provided: bool,
is_smart_unpack: bool,
) -> crate::Result<ControlFlow<(), usize>> {
if is_smart_unpack {
return smart_unpack(unpack_fn, output_dir, output_file_path, question_policy);
}
let target_output_dir = if is_output_dir_provided {
output_dir
} else {
output_file_path
};
unpack(unpack_fn, target_output_dir, question_policy)
}
/// Unpacks an archive creating the output directory, this function will create the output_dir
/// directory or replace it if it already exists. The `output_dir` needs to be empty
/// - If `output_dir` does not exist OR is a empty directory, it will unpack there
/// - If `output_dir` exist OR is a directory not empty, the user will be asked what to do
fn unpack(
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
output_dir: &Path,
question_policy: QuestionPolicy,
) -> crate::Result<ControlFlow<(), usize>> {
let is_valid_output_dir = !output_dir.exists() || (output_dir.is_dir() && output_dir.read_dir()?.next().is_none());
let output_dir_cleaned = if is_valid_output_dir {
output_dir.to_owned()
} else {
match utils::resolve_path_conflict(output_dir, question_policy, QuestionAction::Decompression)? {
Some(path) => path,
None => return Ok(ControlFlow::Break(())),
}
};
if !output_dir_cleaned.exists() {
fs::create_dir(&output_dir_cleaned)?;
}
let files = unpack_fn(&output_dir_cleaned)?;
Ok(ControlFlow::Continue(files))
}
/// Unpacks an archive with some heuristics
/// - If the archive contains only one file, it will be extracted to the `output_dir`
/// - If the archive contains multiple files, it will be extracted to a subdirectory of the
@ -248,18 +371,19 @@ fn smart_unpack(
question_policy: QuestionPolicy,
) -> crate::Result<ControlFlow<(), usize>> {
assert!(output_dir.exists());
let temp_dir = tempfile::tempdir_in(output_dir)?;
let temp_dir = tempfile::Builder::new().prefix("tmp-ouch-").tempdir_in(output_dir)?;
let temp_dir_path = temp_dir.path();
info_accessible(format!(
"Created temporary directory {} to hold decompressed elements.",
"Created temporary directory {} to hold decompressed elements",
nice_directory_display(temp_dir_path)
));
let files = unpack_fn(temp_dir_path)?;
let root_contains_only_one_element = fs::read_dir(temp_dir_path)?.count() == 1;
if root_contains_only_one_element {
let root_contains_only_one_element = fs::read_dir(temp_dir_path)?.take(2).count() == 1;
let (previous_path, mut new_path) = if root_contains_only_one_element {
// Only one file in the root directory, so we can just move it to the output directory
let file = fs::read_dir(temp_dir_path)?.next().expect("item exists")?;
let file_path = file.path();
@ -267,31 +391,26 @@ fn smart_unpack(
.file_name()
.expect("Should be safe because paths in archives should not end with '..'");
let correct_path = output_dir.join(file_name);
// Before moving, need to check if a file with the same name already exists
if !utils::clear_path(&correct_path, question_policy)? {
return Ok(ControlFlow::Break(()));
}
fs::rename(&file_path, &correct_path)?;
info_accessible(format!(
"Successfully moved {} to {}.",
nice_directory_display(&file_path),
nice_directory_display(&correct_path)
));
(file_path, correct_path)
} else {
// Multiple files in the root directory, so:
// Rename the temporary directory to the archive name, which is output_file_path
// One case to handle tough is we need to check if a file with the same name already exists
if !utils::clear_path(output_file_path, question_policy)? {
return Ok(ControlFlow::Break(()));
}
fs::rename(temp_dir_path, output_file_path)?;
info_accessible(format!(
"Successfully moved {} to {}.",
nice_directory_display(temp_dir_path),
nice_directory_display(output_file_path)
));
}
(temp_dir_path.to_owned(), output_file_path.to_owned())
};
// Before moving, need to check if a file with the same name already exists
// If it does, need to ask the user what to do
new_path = match utils::resolve_path_conflict(&new_path, question_policy, QuestionAction::Decompression)? {
Some(path) => path,
None => return Ok(ControlFlow::Break(())),
};
// Rename the temporary directory to the archive name, which is output_file_path
fs::rename(&previous_path, &new_path)?;
info_accessible(format!(
"Successfully moved \"{}\" to \"{}\"",
nice_directory_display(&previous_path),
nice_directory_display(&new_path),
));
Ok(ControlFlow::Continue(files))
}

View File

@ -6,7 +6,7 @@ use std::{
use fs_err as fs;
use crate::{
archive::sevenz,
archive,
commands::warn_user_about_loading_zip_in_memory,
extension::CompressionFormat::{self, *},
list::{self, FileInArchive, ListOptions},
@ -34,7 +34,7 @@ pub fn list_archive_contents(
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
if let &[Zip] = formats.as_slice() {
let zip_archive = zip::ZipArchive::new(reader)?;
let files = crate::archive::zip::list_archive(zip_archive, password)?;
let files = crate::archive::zip::list_archive(zip_archive, password);
list::list_files(archive_path, files, list_options)?;
return Ok(());
@ -50,10 +50,18 @@ pub fn list_archive_contents(
let decoder: Box<dyn Read + Send> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap())
}
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!(),
};
Ok(decoder)
@ -64,7 +72,7 @@ pub fn list_archive_contents(
}
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match formats[0] {
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))?),
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
Zip => {
if formats.len() > 1 {
// Locking necessary to guarantee that warning and question
@ -81,7 +89,7 @@ pub fn list_archive_contents(
io::copy(&mut reader, &mut vec)?;
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
Box::new(crate::archive::zip::list_archive(zip_archive, password)?)
Box::new(crate::archive::zip::list_archive(zip_archive, password))
}
#[cfg(feature = "unrar")]
Rar => {
@ -109,12 +117,12 @@ pub fn list_archive_contents(
}
}
Box::new(sevenz::list_archive(archive_path, password)?)
Box::new(archive::sevenz::list_archive(archive_path, password)?)
}
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
panic!("Not an archive! This should never happen, if it does, something is wrong with `CompressionFormat::is_archive()`. Please report this error!");
}
};
list::list_files(archive_path, files, list_options)?;
Ok(())
list::list_files(archive_path, files, list_options)
}

View File

@ -7,6 +7,7 @@ mod list;
use std::{ops::ControlFlow, path::PathBuf};
use bstr::ByteSlice;
use decompress::DecompressOptions;
use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator};
use utils::colors;
@ -19,6 +20,7 @@ use crate::{
list::ListOptions,
utils::{
self, colors::*, is_path_stdin, logger::info_accessible, path_to_str, EscapedPathDisplay, FileVisibilityPolicy,
QuestionAction,
},
CliArgs, QuestionPolicy,
};
@ -52,6 +54,13 @@ pub fn run(
question_policy: QuestionPolicy,
file_visibility_policy: FileVisibilityPolicy,
) -> crate::Result<()> {
if let Some(threads) = args.threads {
rayon::ThreadPoolBuilder::new()
.num_threads(threads)
.build_global()
.unwrap();
}
match args.cmd {
Subcommand::Compress {
files,
@ -59,6 +68,7 @@ pub fn run(
level,
fast,
slow,
follow_symlinks,
} => {
// After cleaning, if there are no input files left, exit
if files.is_empty() {
@ -82,10 +92,11 @@ pub fn run(
)?;
check::check_archive_formats_position(&formats, &output_path)?;
let output_file = match utils::ask_to_create_file(&output_path, question_policy)? {
Some(writer) => writer,
None => return Ok(()),
};
let output_file =
match utils::ask_to_create_file(&output_path, question_policy, QuestionAction::Compression)? {
Some(writer) => writer,
None => return Ok(()),
};
let level = if fast {
Some(1) // Lowest level of compression
@ -101,6 +112,7 @@ pub fn run(
output_file,
&output_path,
args.quiet,
follow_symlinks,
question_policy,
file_visibility_policy,
level,
@ -111,7 +123,7 @@ pub fn run(
// having a final status message is important especially in an accessibility context
// as screen readers may not read a commands exit code, making it hard to reason
// about whether the command succeeded without such a message
info_accessible(format!("Successfully compressed '{}'.", path_to_str(&output_path)));
info_accessible(format!("Successfully compressed '{}'", path_to_str(&output_path)));
} else {
// If Ok(false) or Err() occurred, delete incomplete file at `output_path`
//
@ -134,7 +146,12 @@ pub fn run(
compress_result.map(|_| ())
}
Subcommand::Decompress { files, output_dir } => {
Subcommand::Decompress {
files,
output_dir,
remove,
no_smart_unpack,
} => {
let mut output_paths = vec![];
let mut formats = vec![];
@ -162,6 +179,9 @@ pub fn run(
check::check_missing_formats_when_decompressing(&files, &formats)?;
let is_output_dir_provided = output_dir.is_some();
let is_smart_unpack = !is_output_dir_provided && !no_smart_unpack;
// The directory that will contain the output files
// We default to the current directory if the user didn't specify an output directory with --dir
let output_dir = if let Some(dir) = output_dir {
@ -182,17 +202,20 @@ pub fn run(
} else {
output_dir.join(file_name)
};
decompress_file(
input_path,
decompress_file(DecompressOptions {
input_file_path: input_path,
formats,
&output_dir,
is_output_dir_provided,
output_dir: &output_dir,
output_file_path,
is_smart_unpack,
question_policy,
args.quiet,
args.password.as_deref().map(|str| {
quiet: args.quiet,
password: args.password.as_deref().map(|str| {
<[u8] as ByteSlice>::from_os_str(str).expect("convert password to bytes failed")
}),
)
remove,
})
})
}
Subcommand::List { archives: files, tree } => {

View File

@ -32,7 +32,7 @@ pub enum Error {
PermissionDenied { error_title: String },
/// From zip::result::ZipError::UnsupportedArchive
UnsupportedZipArchive(&'static str),
/// TO BE REMOVED
/// We don't support compressing the root folder.
CompressingRootFolder,
/// Specialized walkdir's io::Error wrapper with additional information on the error
WalkdirError { reason: String },
@ -200,6 +200,19 @@ impl From<std::io::Error> for Error {
}
}
#[cfg(feature = "bzip3")]
impl From<bzip3::Error> for Error {
fn from(err: bzip3::Error) -> Self {
use bzip3::Error as Bz3Error;
match err {
Bz3Error::Io(inner) => inner.into(),
Bz3Error::BlockSize | Bz3Error::ProcessBlock(_) | Bz3Error::InvalidSignature => {
FinalError::with_title("bzip3 error").detail(err.to_string()).into()
}
}
}
}
impl From<zip::result::ZipError> for Error {
fn from(err: zip::result::ZipError) -> Self {
use zip::result::ZipError;
@ -223,8 +236,8 @@ impl From<unrar::error::UnrarError> for Error {
}
}
impl From<sevenz_rust::Error> for Error {
fn from(err: sevenz_rust::Error) -> Self {
impl From<sevenz_rust2::Error> for Error {
fn from(err: sevenz_rust2::Error) -> Self {
Self::SevenzipError {
reason: err.to_string(),
}

View File

@ -21,14 +21,15 @@ pub const SUPPORTED_EXTENSIONS: &[&str] = &[
#[cfg(feature = "unrar")]
"rar",
"7z",
"br",
];
pub const SUPPORTED_ALIASES: &[&str] = &["tgz", "tbz", "tlz4", "txz", "tzlma", "tsz", "tzst"];
#[cfg(not(feature = "unrar"))]
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z";
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z";
#[cfg(feature = "unrar")]
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z";
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z";
pub const PRETTY_SUPPORTED_ALIASES: &str = "tgz, tbz, tlz4, txz, tzlma, tsz, tzst";
@ -77,13 +78,15 @@ pub enum CompressionFormat {
Gzip,
/// .bz .bz2
Bzip,
/// .bz3
Bzip3,
/// .lz4
Lz4,
/// .xz .lzma
Lzma,
/// .sz
Snappy,
/// tar, tgz, tbz, tbz2, txz, tlz4, tlzma, tsz, tzst
/// tar, tgz, tbz, tbz2, tbz3, txz, tlz4, tlzma, tsz, tzst
Tar,
/// .zst
Zstd,
@ -94,6 +97,8 @@ pub enum CompressionFormat {
Rar,
/// .7z
SevenZip,
/// .br
Brotli,
}
impl CompressionFormat {
@ -104,10 +109,12 @@ impl CompressionFormat {
Tar | Zip | Rar | SevenZip => true,
Gzip => false,
Bzip => false,
Bzip3 => false,
Lz4 => false,
Lzma => false,
Snappy => false,
Zstd => false,
Brotli => false,
}
}
}
@ -118,12 +125,14 @@ fn to_extension(ext: &[u8]) -> Option<Extension> {
b"tar" => &[Tar],
b"tgz" => &[Tar, Gzip],
b"tbz" | b"tbz2" => &[Tar, Bzip],
b"tbz3" => &[Tar, Bzip3],
b"tlz4" => &[Tar, Lz4],
b"txz" | b"tlzma" => &[Tar, Lzma],
b"tsz" => &[Tar, Snappy],
b"tzst" => &[Tar, Zstd],
b"zip" => &[Zip],
b"bz" | b"bz2" => &[Bzip],
b"bz3" => &[Bzip3],
b"gz" => &[Gzip],
b"lz4" => &[Lz4],
b"xz" | b"lzma" => &[Lzma],
@ -131,6 +140,7 @@ fn to_extension(ext: &[u8]) -> Option<Extension> {
b"zst" => &[Zstd],
b"rar" => &[Rar],
b"7z" => &[SevenZip],
b"br" => &[Brotli],
_ => return None,
},
ext.to_str_lossy(),
@ -195,7 +205,7 @@ pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension
let file_stem = name.trim_matches('.');
if SUPPORTED_EXTENSIONS.contains(&file_stem) || SUPPORTED_ALIASES.contains(&file_stem) {
warning(format!(
"Received a file with name '{file_stem}', but {file_stem} was expected as the extension."
"Received a file with name '{file_stem}', but {file_stem} was expected as the extension"
));
}
}
@ -276,7 +286,7 @@ mod tests {
#[test]
/// Test extension parsing for input/output files
fn test_separate_known_extensions_from_name() {
let _handler = spawn_logger_thread();
spawn_logger_thread();
assert_eq!(
separate_known_extensions_from_name("file".as_ref()),
("file".as_ref(), vec![])

View File

@ -1,7 +1,7 @@
//! Some implementation helpers related to the 'list' command.
use std::{
io::{stdout, Write},
io::{stdout, BufWriter, Write},
path::{Path, PathBuf},
};
@ -32,16 +32,16 @@ pub fn list_files(
files: impl IntoIterator<Item = crate::Result<FileInArchive>>,
list_options: ListOptions,
) -> crate::Result<()> {
let out = &mut stdout().lock();
let mut out = BufWriter::new(stdout().lock());
let _ = writeln!(out, "Archive: {}", EscapedPathDisplay::new(archive));
if list_options.tree {
let tree = files.into_iter().collect::<crate::Result<Tree>>()?;
tree.print(out);
tree.print(&mut out);
} else {
for file in files {
let FileInArchive { path, is_dir } = file?;
print_entry(out, EscapedPathDisplay::new(&path), is_dir);
print_entry(&mut out, EscapedPathDisplay::new(&path), is_dir);
}
}
Ok(())
@ -143,7 +143,7 @@ mod tree {
false => draw::FINAL_BRANCH,
};
print!("{prefix}{final_part}");
let _ = write!(out, "{prefix}{final_part}");
let is_dir = match self.file {
Some(FileInArchive { is_dir, .. }) => is_dir,
None => true,

View File

@ -12,11 +12,15 @@ pub mod sandbox;
use std::{env, path::PathBuf};
use cli::CliArgs;
use error::{Error, Result};
use once_cell::sync::Lazy;
use utils::{QuestionAction, QuestionPolicy};
use crate::utils::logger::spawn_logger_thread;
use self::{
error::{Error, Result},
utils::{
logger::{shutdown_logger_and_wait, spawn_logger_thread},
QuestionAction, QuestionPolicy,
},
};
// Used in BufReader and BufWriter to perform less syscalls
const BUFFER_CAPACITY: usize = 1024 * 32;
@ -28,17 +32,19 @@ static CURRENT_DIRECTORY: Lazy<PathBuf> = Lazy::new(|| env::current_dir().unwrap
pub const EXIT_FAILURE: i32 = libc::EXIT_FAILURE;
fn main() {
let handler = spawn_logger_thread();
spawn_logger_thread();
//restrict write permissions to the current workign directory
let working_dir = get_current_working_dir().expect("Cannot get current working dir");
let path_str = working_dir.to_str().expect("Cannot convert path");
let status = sandbox::restrict_paths(&[path_str]).expect("failed to build the ruleset");
//todo: check status and report error or warnign if landlock restriction failed
//todo: check status and report error or warning if landlock restriction failed
spawn_logger_thread();
let result = run();
handler.shutdown_and_wait();
shutdown_logger_and_wait();
if let Err(err) = result {
eprintln!("{err}");

View File

@ -69,11 +69,18 @@ impl FileVisibilityPolicy {
/// Walks through a directory using [`ignore::Walk`]
pub fn build_walker(&self, path: impl AsRef<Path>) -> ignore::Walk {
ignore::WalkBuilder::new(path)
let mut builder = ignore::WalkBuilder::new(path);
builder
.git_exclude(self.read_git_exclude)
.git_ignore(self.read_git_ignore)
.ignore(self.read_ignore)
.hidden(self.read_hidden)
.build()
.hidden(self.read_hidden);
if self.read_git_ignore {
builder.filter_entry(|p| p.path().file_name().is_some_and(|name| name != ".git"));
}
builder.build()
}
}

View File

@ -105,11 +105,11 @@ impl Bytes {
impl std::fmt::Display for Bytes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let &Self(num) = self;
let num = self.0;
debug_assert!(num >= 0.0);
if num < 1_f64 {
return write!(f, "{} B", num);
return write!(f, "{:>6.2} B", num);
}
let delimiter = 1000_f64;
@ -117,9 +117,9 @@ impl std::fmt::Display for Bytes {
write!(
f,
"{:.2} {}B",
"{:>6.2} {:>2}B",
num / delimiter.powi(exponent),
Bytes::UNIT_PREFIXES[exponent as usize]
Bytes::UNIT_PREFIXES[exponent as usize],
)
}
}
@ -138,33 +138,33 @@ mod tests {
let mb = kb * 1000;
let gb = mb * 1000;
assert_eq!("0 B", format_bytes(0)); // This is weird
assert_eq!("1.00 B", format_bytes(b));
assert_eq!("999.00 B", format_bytes(b * 999));
assert_eq!("12.00 MiB", format_bytes(mb * 12));
assert_eq!(" 0.00 B", format_bytes(0)); // This is weird
assert_eq!(" 1.00 B", format_bytes(b));
assert_eq!("999.00 B", format_bytes(b * 999));
assert_eq!(" 12.00 MiB", format_bytes(mb * 12));
assert_eq!("123.00 MiB", format_bytes(mb * 123));
assert_eq!("5.50 MiB", format_bytes(mb * 5 + kb * 500));
assert_eq!("7.54 GiB", format_bytes(gb * 7 + 540 * mb));
assert_eq!("1.20 TiB", format_bytes(gb * 1200));
assert_eq!(" 5.50 MiB", format_bytes(mb * 5 + kb * 500));
assert_eq!(" 7.54 GiB", format_bytes(gb * 7 + 540 * mb));
assert_eq!(" 1.20 TiB", format_bytes(gb * 1200));
// bytes
assert_eq!("234.00 B", format_bytes(234));
assert_eq!("999.00 B", format_bytes(999));
assert_eq!("234.00 B", format_bytes(234));
assert_eq!("999.00 B", format_bytes(999));
// kilobytes
assert_eq!("2.23 kiB", format_bytes(2234));
assert_eq!("62.50 kiB", format_bytes(62500));
assert_eq!(" 2.23 kiB", format_bytes(2234));
assert_eq!(" 62.50 kiB", format_bytes(62500));
assert_eq!("329.99 kiB", format_bytes(329990));
// megabytes
assert_eq!("2.75 MiB", format_bytes(2750000));
assert_eq!("55.00 MiB", format_bytes(55000000));
assert_eq!(" 2.75 MiB", format_bytes(2750000));
assert_eq!(" 55.00 MiB", format_bytes(55000000));
assert_eq!("987.65 MiB", format_bytes(987654321));
// gigabytes
assert_eq!("5.28 GiB", format_bytes(5280000000));
assert_eq!("95.20 GiB", format_bytes(95200000000));
assert_eq!(" 5.28 GiB", format_bytes(5280000000));
assert_eq!(" 95.20 GiB", format_bytes(95200000000));
assert_eq!("302.00 GiB", format_bytes(302000000000));
assert_eq!("302.99 GiB", format_bytes(302990000000));
// Weird aproximation cases:
assert_eq!("999.90 GiB", format_bytes(999900000000));
assert_eq!("1.00 TiB", format_bytes(999990000000));
assert_eq!(" 1.00 TiB", format_bytes(999990000000));
}
}

View File

@ -8,10 +8,10 @@ use std::{
use fs_err as fs;
use super::user_wants_to_overwrite;
use super::{question::FileConflitOperation, user_wants_to_overwrite};
use crate::{
extension::Extension,
utils::{logger::info_accessible, EscapedPathDisplay},
utils::{logger::info_accessible, EscapedPathDisplay, QuestionAction},
QuestionPolicy,
};
@ -19,19 +19,34 @@ pub fn is_path_stdin(path: &Path) -> bool {
path.as_os_str() == "-"
}
/// Remove `path` asking the user to overwrite if necessary.
/// Check if &Path exists, if it does then ask the user if they want to overwrite or rename it.
/// If the user want to overwrite then the file or directory will be removed and returned the same input path
/// If the user want to rename then nothing will be removed and a new path will be returned with a new name
///
/// * `Ok(true)` means the path is clear,
/// * `Ok(false)` means the user doesn't want to overwrite
/// * `Ok(None)` means the user wants to cancel the operation
/// * `Ok(Some(path))` returns a valid PathBuf without any another file or directory with the same name
/// * `Err(_)` is an error
pub fn clear_path(path: &Path, question_policy: QuestionPolicy) -> crate::Result<bool> {
if path.exists() && !user_wants_to_overwrite(path, question_policy)? {
return Ok(false);
pub fn resolve_path_conflict(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> crate::Result<Option<PathBuf>> {
if path.exists() {
match user_wants_to_overwrite(path, question_policy, question_action)? {
FileConflitOperation::Cancel => Ok(None),
FileConflitOperation::Overwrite => {
remove_file_or_dir(path)?;
Ok(Some(path.to_path_buf()))
}
FileConflitOperation::Rename => {
let renamed_path = rename_for_available_filename(path);
Ok(Some(renamed_path))
}
FileConflitOperation::Merge => Ok(Some(path.to_path_buf())),
}
} else {
Ok(Some(path.to_path_buf()))
}
remove_file_or_dir(path)?;
Ok(true)
}
pub fn remove_file_or_dir(path: &Path) -> crate::Result<()> {
@ -43,13 +58,48 @@ pub fn remove_file_or_dir(path: &Path) -> crate::Result<()> {
Ok(())
}
/// Create a new path renaming the "filename" from &Path for a available name in the same directory
pub fn rename_for_available_filename(path: &Path) -> PathBuf {
let mut renamed_path = rename_or_increment_filename(path);
while renamed_path.exists() {
renamed_path = rename_or_increment_filename(&renamed_path);
}
renamed_path
}
/// Create a new path renaming the "filename" from &Path to `filename_1`
/// if its name already ends with `_` and some number, then it increments the number
/// Example:
/// - `file.txt` -> `file_1.txt`
/// - `file_1.txt` -> `file_2.txt`
pub fn rename_or_increment_filename(path: &Path) -> PathBuf {
let parent = path.parent().unwrap_or_else(|| Path::new(""));
let filename = path.file_stem().and_then(|s| s.to_str()).unwrap_or("");
let extension = path.extension().and_then(|s| s.to_str()).unwrap_or("");
let new_filename = match filename.rsplit_once('_') {
Some((base, number_str)) if number_str.chars().all(char::is_numeric) => {
let number = number_str.parse::<u32>().unwrap_or(0);
format!("{}_{}", base, number + 1)
}
_ => format!("{}_1", filename),
};
let mut new_path = parent.join(new_filename);
if !extension.is_empty() {
new_path.set_extension(extension);
}
new_path
}
/// Creates a directory at the path, if there is nothing there.
pub fn create_dir_if_non_existent(path: &Path) -> crate::Result<()> {
if !path.exists() {
fs::create_dir_all(path)?;
// creating a directory is an important change to the file system we
// should always inform the user about
info_accessible(format!("Directory {} created.", EscapedPathDisplay::new(path)));
info_accessible(format!("Directory {} created", EscapedPathDisplay::new(path)));
}
Ok(())
}
@ -82,6 +132,9 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
fn is_bz2(buf: &[u8]) -> bool {
buf.starts_with(&[0x42, 0x5A, 0x68])
}
fn is_bz3(buf: &[u8]) -> bool {
buf.starts_with(b"BZ3v1")
}
fn is_xz(buf: &[u8]) -> bool {
buf.starts_with(&[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])
}
@ -95,6 +148,9 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
buf.starts_with(&[0x28, 0xB5, 0x2F, 0xFD])
}
fn is_rar(buf: &[u8]) -> bool {
// ref https://www.rarlab.com/technote.htm#rarsign
// RAR 5.0 8 bytes length signature: 0x52 0x61 0x72 0x21 0x1A 0x07 0x01 0x00
// RAR 4.x 7 bytes length signature: 0x52 0x61 0x72 0x21 0x1A 0x07 0x00
buf.len() >= 7
&& buf.starts_with(&[0x52, 0x61, 0x72, 0x21, 0x1A, 0x07])
&& (buf[6] == 0x00 || (buf.len() >= 8 && buf[6..=7] == [0x01, 0x00]))
@ -125,6 +181,8 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
Some(Extension::new(&[Gzip], "gz"))
} else if is_bz2(&buf) {
Some(Extension::new(&[Bzip], "bz2"))
} else if is_bz3(&buf) {
Some(Extension::new(&[Bzip3], "bz3"))
} else if is_xz(&buf) {
Some(Extension::new(&[Lzma], "xz"))
} else if is_lz4(&buf) {
@ -141,12 +199,3 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
None
}
}
/// Returns true if a path is a symlink.
/// This is the same as the nightly <https://doc.rust-lang.org/std/path/struct.Path.html#method.is_symlink>
/// Useful to detect broken symlinks when compressing. (So we can safely ignore them)
pub fn is_symlink(path: &Path) -> bool {
fs::symlink_metadata(path)
.map(|m| m.file_type().is_symlink())
.unwrap_or(false)
}

View File

@ -1,8 +1,12 @@
use std::io::{self, stderr, stdout, StderrLock, StdoutLock, Write};
use crate::utils::logger;
type StdioOutputLocks = (StdoutLock<'static>, StderrLock<'static>);
pub fn lock_and_flush_output_stdio() -> io::Result<StdioOutputLocks> {
logger::flush_messages();
let mut stdout = stdout().lock();
stdout.flush()?;
let mut stderr = stderr().lock();

View File

@ -1,10 +1,25 @@
use std::sync::{mpsc, OnceLock};
use std::{
sync::{mpsc, Arc, Barrier, OnceLock},
thread,
};
pub use logger_thread::spawn_logger_thread;
use super::colors::{ORANGE, RESET, YELLOW};
use crate::accessible::is_running_in_accessible_mode;
/// Asks logger to shutdown and waits till it flushes all pending messages.
#[track_caller]
pub fn shutdown_logger_and_wait() {
logger_thread::send_shutdown_command_and_wait();
}
/// Asks logger to flush all messages, useful before starting STDIN interaction.
#[track_caller]
pub fn flush_messages() {
logger_thread::send_flush_command_and_wait();
}
/// An `[INFO]` log to be displayed if we're not running accessibility mode.
///
/// Same as `.info_accessible()`, but only displayed if accessibility mode
@ -30,7 +45,7 @@ pub fn info_accessible(contents: String) {
#[track_caller]
fn info_with_accessibility(contents: String, accessible: bool) {
logger_thread::send_log_message(PrintMessage {
logger_thread::send_print_command(PrintMessage {
contents,
accessible,
level: MessageLevel::Info,
@ -39,7 +54,7 @@ fn info_with_accessibility(contents: String, accessible: bool) {
#[track_caller]
pub fn warning(contents: String) {
logger_thread::send_log_message(PrintMessage {
logger_thread::send_print_command(PrintMessage {
contents,
// Warnings are important and unlikely to flood, so they should be displayed
accessible: true,
@ -48,9 +63,10 @@ pub fn warning(contents: String) {
}
#[derive(Debug)]
enum Message {
FlushAndShutdown,
PrintMessage(PrintMessage),
enum LoggerCommand {
Print(PrintMessage),
Flush { finished_barrier: Arc<Barrier> },
FlushAndShutdown { finished_barrier: Arc<Barrier> },
}
/// Message object used for sending logs from worker threads to a logging thread via channels.
@ -63,7 +79,7 @@ struct PrintMessage {
}
impl PrintMessage {
fn to_processed_message(&self) -> Option<String> {
fn to_formatted_message(&self) -> Option<String> {
match self.level {
MessageLevel::Info => {
if self.accessible {
@ -103,8 +119,8 @@ mod logger_thread {
use super::*;
type LogReceiver = mpsc::Receiver<Message>;
type LogSender = mpsc::Sender<Message>;
type LogReceiver = mpsc::Receiver<LoggerCommand>;
type LogSender = mpsc::Sender<LoggerCommand>;
static SENDER: OnceLock<LogSender> = OnceLock::new();
@ -121,59 +137,45 @@ mod logger_thread {
}
#[track_caller]
pub(super) fn send_log_message(msg: PrintMessage) {
pub(super) fn send_print_command(msg: PrintMessage) {
get_sender()
.send(Message::PrintMessage(msg))
.expect("Failed to send print message");
.send(LoggerCommand::Print(msg))
.expect("Failed to send print command");
}
#[track_caller]
fn send_shutdown_message() {
pub(super) fn send_flush_command_and_wait() {
let barrier = Arc::new(Barrier::new(2));
get_sender()
.send(Message::FlushAndShutdown)
.expect("Failed to send shutdown message");
.send(LoggerCommand::Flush {
finished_barrier: barrier.clone(),
})
.expect("Failed to send flush command");
barrier.wait();
}
pub struct LoggerThreadHandle {
shutdown_barrier: Arc<Barrier>,
#[track_caller]
pub(super) fn send_shutdown_command_and_wait() {
let barrier = Arc::new(Barrier::new(2));
get_sender()
.send(LoggerCommand::FlushAndShutdown {
finished_barrier: barrier.clone(),
})
.expect("Failed to send shutdown command");
barrier.wait();
}
impl LoggerThreadHandle {
/// Tell logger to shutdown and waits till it does.
pub fn shutdown_and_wait(self) {
// Signal the shutdown
send_shutdown_message();
// Wait for confirmation
self.shutdown_barrier.wait();
}
}
#[cfg(test)]
// shutdown_and_wait must be called manually, but to keep 'em clean, in
// case of tests just do it on drop
impl Drop for LoggerThreadHandle {
fn drop(&mut self) {
send_shutdown_message();
self.shutdown_barrier.wait();
}
}
pub fn spawn_logger_thread() -> LoggerThreadHandle {
pub fn spawn_logger_thread() {
let log_receiver = setup_channel();
let shutdown_barrier = Arc::new(Barrier::new(2));
let handle = LoggerThreadHandle {
shutdown_barrier: shutdown_barrier.clone(),
};
rayon::spawn(move || run_logger(log_receiver, shutdown_barrier));
handle
thread::spawn(move || run_logger(log_receiver));
}
fn run_logger(log_receiver: LogReceiver, shutdown_barrier: Arc<Barrier>) {
const FLUSH_TIMEOUT: Duration = Duration::from_millis(250);
fn run_logger(log_receiver: LogReceiver) {
const FLUSH_TIMEOUT: Duration = Duration::from_millis(200);
let mut buffer = Vec::<String>::with_capacity(16);
@ -188,9 +190,9 @@ mod logger_thread {
};
match msg {
Message::PrintMessage(msg) => {
LoggerCommand::Print(msg) => {
// Append message to buffer
if let Some(msg) = msg.to_processed_message() {
if let Some(msg) = msg.to_formatted_message() {
buffer.push(msg);
}
@ -198,14 +200,17 @@ mod logger_thread {
flush_logs_to_stderr(&mut buffer);
}
}
Message::FlushAndShutdown => {
LoggerCommand::Flush { finished_barrier } => {
flush_logs_to_stderr(&mut buffer);
break;
finished_barrier.wait();
}
LoggerCommand::FlushAndShutdown { finished_barrier } => {
flush_logs_to_stderr(&mut buffer);
finished_barrier.wait();
return;
}
}
}
shutdown_barrier.wait();
}
fn flush_logs_to_stderr(buffer: &mut Vec<String>) {

View File

@ -18,10 +18,13 @@ pub use self::{
EscapedPathDisplay,
},
fs::{
cd_into_same_dir_as, clear_path, create_dir_if_non_existent, is_path_stdin, is_symlink, remove_file_or_dir,
try_infer_extension,
cd_into_same_dir_as, create_dir_if_non_existent, is_path_stdin, remove_file_or_dir,
rename_for_available_filename, resolve_path_conflict, try_infer_extension,
},
question::{
ask_to_create_file, user_wants_to_continue, user_wants_to_overwrite, FileConflitOperation, QuestionAction,
QuestionPolicy,
},
question::{ask_to_create_file, user_wants_to_continue, user_wants_to_overwrite, QuestionAction, QuestionPolicy},
utf8::{get_invalid_utf8_paths, is_invalid_utf8},
};

View File

@ -37,31 +37,91 @@ pub enum QuestionAction {
Decompression,
}
#[derive(Default)]
/// Determines which action to do when there is a file conflict
pub enum FileConflitOperation {
#[default]
/// Cancel the operation
Cancel,
/// Overwrite the existing file with the new one
Overwrite,
/// Rename the file
/// It'll be put "_1" at the end of the filename or "_2","_3","_4".. if already exists
Rename,
/// Merge conflicting folders
Merge,
}
/// Check if QuestionPolicy flags were set, otherwise, ask user if they want to overwrite.
pub fn user_wants_to_overwrite(path: &Path, question_policy: QuestionPolicy) -> crate::Result<bool> {
pub fn user_wants_to_overwrite(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> crate::Result<FileConflitOperation> {
use FileConflitOperation as Op;
match question_policy {
QuestionPolicy::AlwaysYes => Ok(true),
QuestionPolicy::AlwaysNo => Ok(false),
QuestionPolicy::Ask => {
let path = path_to_str(strip_cur_dir(path));
let path = Some(&*path);
let placeholder = Some("FILE");
Confirmation::new("Do you want to overwrite 'FILE'?", placeholder).ask(path)
}
QuestionPolicy::AlwaysYes => Ok(Op::Overwrite),
QuestionPolicy::AlwaysNo => Ok(Op::Cancel),
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action),
}
}
/// Ask the user if they want to overwrite or rename the &Path
pub fn ask_file_conflict_operation(path: &Path, question_action: QuestionAction) -> Result<FileConflitOperation> {
use FileConflitOperation as Op;
let path = path_to_str(strip_cur_dir(path));
match question_action {
QuestionAction::Compression => ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
],
)
.ask(),
QuestionAction::Decompression => ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
("merge", Op::Merge, *colors::ORANGE),
],
)
.ask(),
}
}
/// Create the file if it doesn't exist and if it does then ask to overwrite it.
/// If the user doesn't want to overwrite then we return [`Ok(None)`]
pub fn ask_to_create_file(path: &Path, question_policy: QuestionPolicy) -> Result<Option<fs::File>> {
pub fn ask_to_create_file(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> Result<Option<fs::File>> {
match fs::OpenOptions::new().write(true).create_new(true).open(path) {
Ok(w) => Ok(Some(w)),
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {
if user_wants_to_overwrite(path, question_policy)? {
utils::remove_file_or_dir(path)?;
Ok(Some(fs::File::create(path)?))
} else {
Ok(None)
let action = match question_policy {
QuestionPolicy::AlwaysYes => FileConflitOperation::Overwrite,
QuestionPolicy::AlwaysNo => FileConflitOperation::Cancel,
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action)?,
};
match action {
FileConflitOperation::Merge => Ok(Some(fs::File::create(path)?)),
FileConflitOperation::Overwrite => {
utils::remove_file_or_dir(path)?;
Ok(Some(fs::File::create(path)?))
}
FileConflitOperation::Cancel => Ok(None),
FileConflitOperation::Rename => {
let renamed_file_path = utils::rename_for_available_filename(path);
Ok(Some(fs::File::create(renamed_file_path)?))
}
}
}
Err(e) => Err(Error::from(e)),
@ -90,6 +150,108 @@ pub fn user_wants_to_continue(
}
}
/// Choise dialog for end user with [option1/option2/...] question.
/// Each option is a [Choice] entity, holding a value "T" returned when that option is selected
pub struct ChoicePrompt<'a, T: Default> {
/// The message to be displayed before the options
/// e.g.: "Do you want to overwrite 'FILE'?"
pub prompt: String,
pub choises: Vec<Choice<'a, T>>,
}
/// A single choice showed as a option to user in a [ChoicePrompt]
/// It holds a label and a color to display to user and a real value to be returned
pub struct Choice<'a, T: Default> {
label: &'a str,
value: T,
color: &'a str,
}
impl<'a, T: Default> ChoicePrompt<'a, T> {
/// Creates a new Confirmation.
pub fn new(prompt: impl Into<String>, choises: impl IntoIterator<Item = (&'a str, T, &'a str)>) -> Self {
Self {
prompt: prompt.into(),
choises: choises
.into_iter()
.map(|(label, value, color)| Choice { label, value, color })
.collect(),
}
}
/// Creates user message and receives a input to be compared with choises "label"
/// and returning the real value of the choise selected
pub fn ask(mut self) -> crate::Result<T> {
let message = self.prompt;
#[cfg(not(feature = "allow_piped_choice"))]
if !stdin().is_terminal() {
eprintln!("{}", message);
eprintln!("Pass --yes to proceed");
return Ok(T::default());
}
let _locks = lock_and_flush_output_stdio()?;
let mut stdin_lock = stdin().lock();
// Ask the same question to end while no valid answers are given
loop {
let choice_prompt = if is_running_in_accessible_mode() {
self.choises
.iter()
.map(|choise| format!("{}{}{}", choise.color, choise.label, *colors::RESET))
.collect::<Vec<_>>()
.join("/")
} else {
let choises = self
.choises
.iter()
.map(|choise| {
format!(
"{}{}{}",
choise.color,
choise
.label
.chars()
.nth(0)
.expect("dev error, should be reported, we checked this won't happen"),
*colors::RESET
)
})
.collect::<Vec<_>>()
.join("/");
format!("[{}]", choises)
};
eprintln!("{} {}", message, choice_prompt);
let mut answer = String::new();
let bytes_read = stdin_lock.read_line(&mut answer)?;
if bytes_read == 0 {
let error = FinalError::with_title("Unexpected EOF when asking question.")
.detail("When asking the user:")
.detail(format!(" \"{message}\""))
.detail("Expected one of the options as answer, but found EOF instead.")
.hint("If using Ouch in scripting, consider using `--yes` and `--no`.");
return Err(error.into());
}
answer.make_ascii_lowercase();
let answer = answer.trim();
let chosen_index = self.choises.iter().position(|choise| choise.label.starts_with(answer));
if let Some(i) = chosen_index {
return Ok(self.choises.remove(i).value);
}
}
}
}
/// Confirmation dialog for end user with [Y/n] question.
///
/// If the placeholder is found in the prompt text, it will be replaced to form the final message.
@ -120,6 +282,7 @@ impl<'a> Confirmation<'a> {
(Some(placeholder), Some(subs)) => Cow::Owned(self.prompt.replace(placeholder, subs)),
};
#[cfg(not(feature = "allow_piped_choice"))]
if !stdin().is_terminal() {
eprintln!("{}", message);
eprintln!("Pass --yes to proceed");

View File

@ -1,7 +1,11 @@
#[macro_use]
mod utils;
use std::{iter::once, path::PathBuf};
use std::{
io::Write,
iter::once,
path::{Path, PathBuf},
};
use fs_err as fs;
use parse_display::Display;
@ -21,6 +25,8 @@ enum DirectoryExtension {
Tar,
Tbz,
Tbz2,
#[cfg(feature = "bzip3")]
Tbz3,
Tgz,
Tlz4,
Tlzma,
@ -36,12 +42,15 @@ enum DirectoryExtension {
enum FileExtension {
Bz,
Bz2,
#[cfg(feature = "bzip3")]
Bz3,
Gz,
Lz4,
Lzma,
Sz,
Xz,
Zst,
Br,
}
#[derive(Arbitrary, Debug, Display)]
@ -52,9 +61,9 @@ enum Extension {
}
/// Converts a list of extension structs to string
fn merge_extensions(ext: impl ToString, exts: Vec<FileExtension>) -> String {
fn merge_extensions(ext: impl ToString, exts: &[FileExtension]) -> String {
once(ext.to_string())
.chain(exts.into_iter().map(|x| x.to_string()))
.chain(exts.iter().map(|x| x.to_string()))
.collect::<Vec<_>>()
.join(".")
}
@ -77,7 +86,26 @@ fn create_random_files(dir: impl Into<PathBuf>, depth: u8, rng: &mut SmallRng) {
// create more random files in 0 to 2 new directories
for _ in 0..rng.gen_range(0..=2u32) {
create_random_files(&tempfile::tempdir_in(dir).unwrap().into_path(), depth - 1, rng);
create_random_files(tempfile::tempdir_in(dir).unwrap().into_path(), depth - 1, rng);
}
}
/// Create n random files on directory dir
#[cfg_attr(not(feature = "allow_piped_choice"), allow(dead_code))]
fn create_n_random_files(n: usize, dir: impl Into<PathBuf>, rng: &mut SmallRng) {
let dir: &PathBuf = &dir.into();
for _ in 0..n {
write_random_content(
&mut tempfile::Builder::new()
.prefix("file")
.tempfile_in(dir)
.unwrap()
.keep()
.unwrap()
.0,
rng,
);
}
}
@ -89,7 +117,7 @@ fn single_empty_file(ext: Extension, #[any(size_range(0..8).lift())] exts: Vec<F
let before = &dir.join("before");
fs::create_dir(before).unwrap();
let before_file = &before.join("file");
let archive = &dir.join(format!("file.{}", merge_extensions(ext, exts)));
let archive = &dir.join(format!("file.{}", merge_extensions(ext, &exts)));
let after = &dir.join("after");
fs::write(before_file, []).unwrap();
ouch!("-A", "c", before_file, archive);
@ -98,14 +126,13 @@ fn single_empty_file(ext: Extension, #[any(size_range(0..8).lift())] exts: Vec<F
}
/// Compress and decompress a single file
#[proptest(cases = 250)]
#[proptest(cases = 150)]
fn single_file(
ext: Extension,
#[any(size_range(0..8).lift())] exts: Vec<FileExtension>,
#[cfg_attr(not(target_arch = "arm"), strategy(proptest::option::of(0i16..12)))]
// Decrease the value of --level flag for `arm` systems, because our GitHub
// Actions CI runs QEMU which makes the memory consumption higher.
#[cfg_attr(target_arch = "arm", strategy(proptest::option::of(0i16..8)))]
#[any(size_range(0..6).lift())] exts: Vec<FileExtension>,
// Use faster --level for slower CI targets
#[cfg_attr(not(any(target_arch = "arm", target_abi = "eabihf")), strategy(proptest::option::of(0i16..12)))]
#[cfg_attr(target_arch = "arm", strategy(proptest::option::of(0i16..6)))]
level: Option<i16>,
) {
let dir = tempdir().unwrap();
@ -113,7 +140,7 @@ fn single_file(
let before = &dir.join("before");
fs::create_dir(before).unwrap();
let before_file = &before.join("file");
let archive = &dir.join(format!("file.{}", merge_extensions(ext, exts)));
let archive = &dir.join(format!("file.{}", merge_extensions(ext, &exts)));
let after = &dir.join("after");
write_random_content(
&mut fs::File::create(before_file).unwrap(),
@ -133,10 +160,9 @@ fn single_file(
fn single_file_stdin(
ext: Extension,
#[any(size_range(0..8).lift())] exts: Vec<FileExtension>,
#[cfg_attr(not(target_arch = "arm"), strategy(proptest::option::of(0i16..12)))]
// Decrease the value of --level flag for `arm` systems, because our GitHub
// Actions CI runs QEMU which makes the memory consumption higher.
#[cfg_attr(target_arch = "arm", strategy(proptest::option::of(0i16..8)))]
// Use faster --level for slower CI targets
#[cfg_attr(not(any(target_arch = "arm", target_abi = "eabihf")), strategy(proptest::option::of(0i16..12)))]
#[cfg_attr(target_arch = "arm", strategy(proptest::option::of(0i16..6)))]
level: Option<i16>,
) {
let dir = tempdir().unwrap();
@ -144,7 +170,7 @@ fn single_file_stdin(
let before = &dir.join("before");
fs::create_dir(before).unwrap();
let before_file = &before.join("file");
let format = merge_extensions(&ext, exts);
let format = merge_extensions(&ext, &exts);
let archive = &dir.join(format!("file.{}", format));
let after = &dir.join("after");
write_random_content(
@ -173,22 +199,19 @@ fn single_file_stdin(
assert_same_directory(before, after, false);
}
/// Compress and decompress a directory with random content generated with create_random_files
///
/// This one runs only 50 times because there are only `.zip` and `.tar` to be tested, and
/// single-file formats testing is done in the other test
#[proptest(cases = 50)]
/// Compress and decompress a directory with random content generated with `create_random_files`
#[proptest(cases = 25)]
fn multiple_files(
ext: DirectoryExtension,
#[any(size_range(0..5).lift())] exts: Vec<FileExtension>,
#[strategy(0u8..4)] depth: u8,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
#[strategy(0u8..3)] depth: u8,
) {
let dir = tempdir().unwrap();
let dir = dir.path();
let before = &dir.join("before");
let before_dir = &before.join("dir");
fs::create_dir_all(before_dir).unwrap();
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, exts)));
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
let after = &dir.join("after");
create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
ouch!("-A", "c", before_dir, archive);
@ -196,6 +219,396 @@ fn multiple_files(
assert_same_directory(before, after, !matches!(ext, DirectoryExtension::Zip));
}
#[proptest(cases = 25)]
fn multiple_files_with_conflict_and_choice_to_overwrite(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
#[strategy(0u8..3)] depth: u8,
) {
let dir = tempdir().unwrap();
let dir = dir.path();
let before = &dir.join("before");
let before_dir = &before.join("dir");
fs::create_dir_all(before_dir).unwrap();
create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
let after = &dir.join("after");
let after_dir = &after.join("dir");
fs::create_dir_all(after_dir).unwrap();
create_random_files(after_dir, depth, &mut SmallRng::from_entropy());
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
ouch!("-A", "c", before_dir, archive);
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(after)
.arg("--yes")
.assert()
.success();
assert_same_directory(before, after, false);
}
#[proptest(cases = 25)]
fn multiple_files_with_conflict_and_choice_to_not_overwrite(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
#[strategy(0u8..3)] depth: u8,
) {
let dir = tempdir().unwrap();
let dir = dir.path();
let before = &dir.join("before");
let before_dir = &before.join("dir");
fs::create_dir_all(before_dir).unwrap();
create_random_files(before_dir, depth, &mut SmallRng::from_entropy());
let after = &dir.join("after");
let after_dir = &after.join("dir");
fs::create_dir_all(after_dir).unwrap();
let after_backup = &dir.join("after_backup");
let after_backup_dir = &after_backup.join("dir");
fs::create_dir_all(after_backup_dir).unwrap();
// Create a file with the same name as one of the files in the after directory
fs::write(after_dir.join("something.txt"), "Some content").unwrap();
fs::copy(after_dir.join("something.txt"), after_backup_dir.join("something.txt")).unwrap();
let archive = &dir.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
ouch!("-A", "c", before_dir, archive);
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(after)
.arg("--no")
.assert()
.success();
assert_same_directory(after, after_backup, false);
}
#[cfg(feature = "allow_piped_choice")]
#[proptest(cases = 25)]
fn multiple_files_with_conflict_and_choice_to_rename(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
create_n_random_files(5, &src_files_path, &mut SmallRng::from_entropy());
// Make destiny already filled to force a conflict
let dest_files_path = root_path.join("dest_files");
fs::create_dir_all(&dest_files_path).unwrap();
create_n_random_files(5, &dest_files_path, &mut SmallRng::from_entropy());
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
ouch!("-A", "c", &src_files_path, archive);
let dest_files_path_renamed = &root_path.join("dest_files_1");
assert_eq!(false, dest_files_path_renamed.exists());
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&dest_files_path)
.write_stdin("r")
.assert()
.success();
assert_same_directory(src_files_path, dest_files_path_renamed.join("src_files"), false);
}
#[cfg(feature = "allow_piped_choice")]
#[proptest(cases = 25)]
fn multiple_files_with_conflict_and_choice_to_rename_with_already_a_renamed(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
create_n_random_files(5, &src_files_path, &mut SmallRng::from_entropy());
// Make destiny already filled and destiny with '_1'
let dest_files_path = root_path.join("dest_files");
fs::create_dir_all(&dest_files_path).unwrap();
create_n_random_files(5, &dest_files_path, &mut SmallRng::from_entropy());
let dest_files_path_1 = root_path.join("dest_files_1");
fs::create_dir_all(&dest_files_path_1).unwrap();
create_n_random_files(5, &dest_files_path_1, &mut SmallRng::from_entropy());
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
ouch!("-A", "c", &src_files_path, archive);
let dest_files_path_renamed = &root_path.join("dest_files_2");
assert_eq!(false, dest_files_path_renamed.exists());
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&dest_files_path)
.write_stdin("r")
.assert()
.success();
assert_same_directory(src_files_path, dest_files_path_renamed.join("src_files"), false);
}
#[proptest(cases = 25)]
fn smart_unpack_with_single_file(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
let files_path = ["file1.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.inspect(|path| {
let mut file = fs::File::create(path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
})
.collect::<Vec<_>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args(files_path)
.arg(archive)
.assert()
.success();
let output_file = root_path.join("file1.txt");
assert!(!output_file.exists());
// Decompress the archive with Smart Unpack
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg(archive)
.assert()
.success();
assert!(output_file.exists());
let output_content = fs::read_to_string(&output_file).unwrap();
assert_eq!(output_content, "Some content");
}
#[proptest(cases = 25)]
fn smart_unpack_with_multiple_files(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.for_each(|path| {
let mut file = fs::File::create(&path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
});
let input_files = src_files_path
.read_dir()
.unwrap()
.map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
let output_path = root_path.join("archive");
assert!(!output_path.exists());
crate::utils::cargo_bin()
.arg("compress")
.args(input_files)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg(archive)
.assert()
.success();
assert!(output_path.exists(), "Output directory does not exist");
assert_same_directory(src_files_path, output_path, false);
}
#[proptest(cases = 25)]
fn no_smart_unpack_with_single_file(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
["file1.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.for_each(|path| {
let mut file = fs::File::create(&path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
});
let input_files = src_files_path
.read_dir()
.unwrap()
.map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
let output_path = root_path.join("archive");
assert!(!output_path.exists());
crate::utils::cargo_bin()
.arg("compress")
.args(input_files)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg("--no-smart-unpack")
.arg(archive)
.assert()
.success();
assert!(output_path.exists(), "Output directory does not exist");
assert_same_directory(src_files_path, output_path, false);
}
#[proptest(cases = 25)]
fn no_smart_unpack_with_multiple_files(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.for_each(|path| {
let mut file = fs::File::create(&path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
});
let input_files = src_files_path
.read_dir()
.unwrap()
.map(|entry| entry.unwrap().path())
.collect::<Vec<PathBuf>>();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
let output_path = root_path.join("archive");
assert!(!output_path.exists());
crate::utils::cargo_bin()
.arg("compress")
.args(input_files)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.current_dir(root_path)
.arg("decompress")
.arg("--no-smart-unpack")
.arg(archive)
.assert()
.success();
assert!(output_path.exists(), "Output directory does not exist");
assert_same_directory(src_files_path, output_path, false);
}
#[proptest(cases = 25)]
fn multiple_files_with_disabled_smart_unpack_by_dir(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir().unwrap();
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path).unwrap();
let files_path = ["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.inspect(|path| {
let mut file = fs::File::create(path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
})
.collect::<Vec<_>>();
let dest_files_path = root_path.join("dest_files");
fs::create_dir_all(&dest_files_path).unwrap();
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args(files_path)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&dest_files_path)
.write_stdin("r")
.assert()
.success();
assert_same_directory(src_files_path, dest_files_path, false);
}
#[cfg(feature = "unrar")]
#[test]
fn unpack_rar() -> Result<(), Box<dyn std::error::Error>> {
@ -255,3 +668,200 @@ fn unpack_rar_stdin() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}
#[proptest(cases = 25)]
fn symlink_pack_and_unpack(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
if matches!(ext, DirectoryExtension::SevenZ) {
// Skip 7z because the 7z format does not support symlinks
return Ok(());
}
let temp_dir = tempdir()?;
let root_path = temp_dir.path();
let src_files_path = root_path.join("src_files");
fs::create_dir_all(&src_files_path)?;
let mut files_path = ["file1.txt", "file2.txt", "file3.txt", "file4.txt", "file5.txt"]
.into_iter()
.map(|f| src_files_path.join(f))
.inspect(|path| {
let mut file = fs::File::create(path).unwrap();
file.write_all("Some content".as_bytes()).unwrap();
})
.collect::<Vec<_>>();
let dest_files_path = root_path.join("dest_files");
fs::create_dir_all(&dest_files_path)?;
let symlink_path = src_files_path.join(Path::new("symlink"));
#[cfg(unix)]
std::os::unix::fs::symlink(&files_path[0], &symlink_path)?;
#[cfg(windows)]
std::os::windows::fs::symlink_file(&files_path[0], &symlink_path)?;
files_path.push(symlink_path);
let archive = &root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args(files_path.clone())
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&dest_files_path)
.assert()
.success();
assert_same_directory(&src_files_path, &dest_files_path, false);
// check the symlink stand still
for f in dest_files_path.as_path().read_dir()? {
let f = f?;
if f.file_name() == "symlink" {
assert!(f.file_type()?.is_symlink())
}
}
fs::remove_file(archive)?;
fs::remove_dir_all(&dest_files_path)?;
crate::utils::cargo_bin()
.arg("compress")
.arg("--follow-symlinks")
.args(files_path)
.arg(archive)
.assert()
.success();
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&dest_files_path)
.assert()
.success();
// check there is no symlinks
for f in dest_files_path.as_path().read_dir()? {
let f = f?;
assert!(!f.file_type().unwrap().is_symlink())
}
}
#[test]
fn no_git_folder_after_decompression_with_gitignore_flag_active() {
use std::process::Command;
let dir = tempdir().unwrap();
let dir_path = dir.path();
let before = dir_path.join("before");
let decompressed = dir_path.join("decompressed");
// Create directory and a dummy file
fs::create_dir(&before).unwrap();
fs::write(before.join("hello.txt"), b"Hello, world!").unwrap();
// Run `git init` inside it
Command::new("git")
.arg("init")
.current_dir(&before)
.output()
.expect("failed to run git init");
assert!(before.join(".git").exists(), ".git folder should exist after git init");
// Compress it
let archive = dir_path.join("archive.zip");
ouch!("c", &before, &archive, "--gitignore");
// Decompress it
ouch!("d", &archive, "-d", &decompressed);
// Find the subdirectory inside decompressed (e.g., "before")
let decompressed_subdir = fs::read_dir(&decompressed)
.unwrap()
.find_map(Result::ok)
.map(|entry| entry.path())
.expect("Expected one directory inside decompressed");
// Assert that the decompressed folder does not include `.git/`
assert!(
!decompressed_subdir.join(".git").exists(),
".git folder should not exist after decompression"
);
}
#[cfg(feature = "allow_piped_choice")]
#[proptest(cases = 25)]
fn unpack_multiple_sources_into_the_same_destination_with_merge(
ext: DirectoryExtension,
#[any(size_range(0..1).lift())] extra_extensions: Vec<FileExtension>,
) {
let temp_dir = tempdir()?;
let root_path = temp_dir.path();
let source_path = root_path
.join(format!("example_{}", merge_extensions(&ext, &extra_extensions)))
.join("sub_a")
.join("sub_b")
.join("sub_c");
fs::create_dir_all(&source_path)?;
let archive = root_path.join(format!("archive.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args([
fs::File::create(source_path.join("file1.txt"))?.path(),
fs::File::create(source_path.join("file2.txt"))?.path(),
fs::File::create(source_path.join("file3.txt"))?.path(),
])
.arg(&archive)
.assert()
.success();
fs::remove_dir_all(&source_path)?;
fs::create_dir_all(&source_path)?;
let archive1 = root_path.join(format!("archive1.{}", merge_extensions(&ext, &extra_extensions)));
crate::utils::cargo_bin()
.arg("compress")
.args([
fs::File::create(source_path.join("file3.txt"))?.path(),
fs::File::create(source_path.join("file4.txt"))?.path(),
fs::File::create(source_path.join("file5.txt"))?.path(),
])
.arg(&archive1)
.assert()
.success();
let out_path = root_path.join(format!("out_{}", merge_extensions(&ext, &extra_extensions)));
fs::create_dir_all(&out_path)?;
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive)
.arg("-d")
.arg(&out_path)
.assert()
.success();
crate::utils::cargo_bin()
.arg("decompress")
.arg(archive1)
.arg("-d")
.arg(&out_path)
.write_stdin("m")
.assert()
.success();
assert_eq!(5, out_path.as_path().read_dir()?.count());
}

View File

@ -6,7 +6,7 @@ expression: "run_ouch(\"ouch decompress a\", dir)"
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:

View File

@ -7,5 +7,5 @@ expression: "run_ouch(\"ouch decompress a b.unknown\", dir)"
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst

View File

@ -6,7 +6,7 @@ expression: "run_ouch(\"ouch decompress b.unknown\", dir)"
- Files with unsupported extensions: <TMP_DIR>/b.unknown
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:

View File

@ -6,7 +6,7 @@ expression: "run_ouch(\"ouch decompress a\", dir)"
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:

View File

@ -7,5 +7,5 @@ expression: "run_ouch(\"ouch decompress a b.unknown\", dir)"
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst

View File

@ -6,7 +6,7 @@ expression: "run_ouch(\"ouch decompress b.unknown\", dir)"
- Files with unsupported extensions: <TMP_DIR>/b.unknown
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:

View File

@ -5,7 +5,7 @@ expression: "run_ouch(\"ouch compress input output --format tar.gz.unknown\", di
[ERROR] Failed to parse `--format tar.gz.unknown`
- Unsupported extension 'unknown'
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:

View File

@ -5,7 +5,7 @@ expression: "run_ouch(\"ouch compress input output --format targz\", dir)"
[ERROR] Failed to parse `--format targz`
- Unsupported extension 'targz'
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:

View File

@ -5,7 +5,7 @@ expression: "run_ouch(\"ouch compress input output --format .tar.$#!@.rest\", di
[ERROR] Failed to parse `--format .tar.$#!@.rest`
- Unsupported extension '$#!@'
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:

View File

@ -5,7 +5,7 @@ expression: "run_ouch(\"ouch compress input output --format tar.gz.unknown\", di
[ERROR] Failed to parse `--format tar.gz.unknown`
- Unsupported extension 'unknown'
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:

View File

@ -5,7 +5,7 @@ expression: "run_ouch(\"ouch compress input output --format targz\", dir)"
[ERROR] Failed to parse `--format targz`
- Unsupported extension 'targz'
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:

View File

@ -5,7 +5,7 @@ expression: "run_ouch(\"ouch compress input output --format .tar.$#!@.rest\", di
[ERROR] Failed to parse `--format .tar.$#!@.rest`
- Unsupported extension '$#!@'
hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:

View File

@ -2,4 +2,4 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output.gz\", dir)"
---
[INFO] Successfully compressed 'output.gz'.
[INFO] Successfully compressed 'output.gz'

View File

@ -2,5 +2,5 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output.zip\", dir)"
---
[INFO] Compressing 'input'.
[INFO] Successfully compressed 'output.zip'.
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output.zip'

View File

@ -2,5 +2,5 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress output.zst\", dir)"
---
[INFO] Successfully decompressed archive in current directory.
[INFO] Successfully decompressed archive in current directory
[INFO] Files unpacked: 1

View File

@ -0,0 +1,13 @@
---
source: tests/ui.rs
expression: stdout_lines
---
{
"",
"[INFO] Files unpacked: 4",
"[INFO] Successfully decompressed archive in <TMP_DIR>/outputs",
"[INFO] extracted ( 0.00 B) \"outputs/inputs\"",
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input\"",
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input2\"",
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input3\"",
}

View File

@ -2,5 +2,5 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output1 --format tar.gz\", dir)"
---
[INFO] Compressing 'input'.
[INFO] Successfully compressed 'output1'.
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output1'

View File

@ -2,5 +2,5 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output2 --format .tar.gz\", dir)"
---
[INFO] Compressing 'input'.
[INFO] Successfully compressed 'output2'.
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output2'

View File

@ -2,5 +2,5 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output1 --format tar.gz\", dir)"
---
[INFO] Compressing 'input'.
[INFO] Successfully compressed 'output1'.
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output1'

View File

@ -2,5 +2,5 @@
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output2 --format .tar.gz\", dir)"
---
[INFO] Compressing 'input'.
[INFO] Successfully compressed 'output2'.
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output2'

View File

@ -1,6 +1,7 @@
---
source: tests/ui.rs
expression: "output_to_string(ouch!(\"-h\"))"
snapshot_kind: text
---
A command-line utility for easily compressing and decompressing files and directories.
@ -13,13 +14,14 @@ Commands:
help Print this message or the help of the given subcommand(s)
Options:
-y, --yes Skip [Y/n] questions positively
-n, --no Skip [Y/n] questions negatively
-y, --yes Skip [Y/n] questions, default to yes
-n, --no Skip [Y/n] questions, default to no
-A, --accessible Activate accessibility mode, reducing visual noise [env: ACCESSIBLE=]
-H, --hidden Ignores hidden files
-q, --quiet Silences output
-g, --gitignore Ignores files matched by git's ignore files
-H, --hidden Ignore hidden files
-q, --quiet Silence output
-g, --gitignore Ignore files matched by git's ignore files
-f, --format <FORMAT> Specify the format of the archive
-p, --password <PASSWORD> decompress or list with password
-p, --password <PASSWORD> Decompress or list with password
-c, --threads <THREADS> Concurrent working threads
-h, --help Print help (see more with '--help')
-V, --version Print version

View File

@ -1,10 +1,11 @@
---
source: tests/ui.rs
expression: "output_to_string(ouch!(\"--help\"))"
snapshot_kind: text
---
A command-line utility for easily compressing and decompressing files and directories.
Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, lz4, sz (Snappy), zst and rar.
Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, bz3, lz4, sz (Snappy), zst, rar and br.
Repository: https://github.com/ouch-org/ouch
@ -18,10 +19,10 @@ Commands:
Options:
-y, --yes
Skip [Y/n] questions positively
Skip [Y/n] questions, default to yes
-n, --no
Skip [Y/n] questions negatively
Skip [Y/n] questions, default to no
-A, --accessible
Activate accessibility mode, reducing visual noise
@ -29,19 +30,22 @@ Options:
[env: ACCESSIBLE=]
-H, --hidden
Ignores hidden files
Ignore hidden files
-q, --quiet
Silences output
Silence output
-g, --gitignore
Ignores files matched by git's ignore files
Ignore files matched by git's ignore files
-f, --format <FORMAT>
Specify the format of the archive
-p, --password <PASSWORD>
decompress or list with password
Decompress or list with password
-c, --threads <THREADS>
Concurrent working threads
-h, --help
Print help (see a summary with '-h')

View File

@ -2,11 +2,10 @@
///
/// See CONTRIBUTING.md for a brief guide on how to use [`insta`] for these tests.
/// [`insta`]: https://docs.rs/insta
#[macro_use]
mod utils;
use std::{ffi::OsStr, io, path::Path, process::Output};
use std::{collections::BTreeSet, ffi::OsStr, io, path::Path, process::Output};
use insta::assert_snapshot as ui;
use regex::Regex;
@ -142,6 +141,29 @@ fn ui_test_ok_decompress() {
ui!(run_ouch("ouch decompress output.zst", dir));
}
#[cfg(target_os = "linux")]
#[test]
fn ui_test_ok_decompress_multiple_files() {
let (_dropper, dir) = testdir().unwrap();
let inputs_dir = dir.join("inputs");
std::fs::create_dir(&inputs_dir).unwrap();
let outputs_dir = dir.join("outputs");
std::fs::create_dir(&outputs_dir).unwrap();
// prepare
create_files_in(&inputs_dir, &["input", "input2", "input3"]);
let compress_command = format!("ouch compress {} output.tar.zst", inputs_dir.to_str().unwrap());
run_ouch(&compress_command, dir);
let decompress_command = format!("ouch decompress output.tar.zst --dir {}", outputs_dir.to_str().unwrap());
let stdout = run_ouch(&decompress_command, dir);
let stdout_lines = stdout.split('\n').collect::<BTreeSet<_>>();
insta::assert_debug_snapshot!(stdout_lines);
}
#[test]
fn ui_test_usage_help_flag() {
insta::with_settings!({filters => vec![

View File

@ -51,7 +51,7 @@ pub fn create_files_in(dir: &Path, files: &[&str]) {
/// Write random content to a file
pub fn write_random_content(file: &mut impl Write, rng: &mut impl RngCore) {
let mut data = vec![0; rng.gen_range(0..4096)];
let mut data = vec![0; rng.gen_range(0..8192)];
rng.fill_bytes(&mut data);
file.write_all(&data).unwrap();
@ -88,7 +88,7 @@ pub fn assert_same_directory(x: impl Into<PathBuf>, y: impl Into<PathBuf>, prese
if ft_x.is_dir() && ft_y.is_dir() {
assert_same_directory(x.path(), y.path(), preserve_permissions);
} else if ft_x.is_file() && ft_y.is_file() {
} else if (ft_x.is_file() && ft_y.is_file()) || (ft_x.is_symlink() && ft_y.is_symlink()) {
assert_eq!(meta_x.len(), meta_y.len());
assert_eq!(fs::read(x.path()).unwrap(), fs::read(y.path()).unwrap());
} else {