Compare commits

..

No commits in common. "main" and "0.4.2" have entirely different histories.
main ... 0.4.2

78 changed files with 1177 additions and 5347 deletions

12
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,12 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,6 +1,4 @@
<!--
If your code changes text output, you might need to update snapshots
of UI tests, read more about `insta` at CONTRIBUTING.md.
Remember to edit `CHANGELOG.md` after opening the PR.
Make sure to check out CONTRIBUTING.md.
Don't forget to add a CHANGELOG.md entry!
-->

View File

@ -1,17 +0,0 @@
name: Run tests for all combinations
on:
schedule:
- cron: "0 0 1,15 * *" # biweekly
push:
branches:
- main
paths-ignore:
- "**/*.md"
jobs:
run-tests-for-all-combinations:
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
with:
matrix_all_combinations: true
artifact_upload_mode: none

148
.github/workflows/build-and-test.yml vendored Normal file
View File

@ -0,0 +1,148 @@
name: build-and-test
on:
push:
branches:
- main
tags:
- "[0-9]+.[0-9]+.[0-9]+"
pull_request:
jobs:
build:
name: build
runs-on: ${{ matrix.os }}
env:
CARGO: cargo
strategy:
matrix:
include:
- target: aarch64-unknown-linux-gnu
os: ubuntu-latest
no-zstd-thin: true
- target: aarch64-unknown-linux-musl
os: ubuntu-latest
no-zstd-thin: true
- target: armv7-unknown-linux-gnueabihf
os: ubuntu-latest
no-zstd-thin: true
- target: armv7-unknown-linux-musleabihf
os: ubuntu-latest
no-zstd-thin: true
- target: x86_64-apple-darwin
os: macos-latest
- target: x86_64-pc-windows-gnu
os: windows-latest
no-zstd-thin: true
ext: .exe
- target: x86_64-pc-windows-msvc
os: windows-latest
ext: .exe
- target: aarch64-pc-windows-msvc
os: windows-latest
ext: .exe
skip-test: true
- target: x86_64-unknown-linux-gnu
os: ubuntu-latest
- target: x86_64-unknown-linux-musl
os: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install cross (non-x86_64 linux)
if: "!contains(matrix.target, 'x86_64') && runner.os == 'Linux'"
run: |
pushd "$(mktemp -d)"
wget https://github.com/cross-rs/cross/releases/download/v0.2.4/cross-x86_64-unknown-linux-musl.tar.gz
tar xf cross-x86_64-unknown-linux-musl.tar.gz
cp cross ~/.cargo/bin
popd
echo CARGO=cross >> $GITHUB_ENV
- name: Install dependencies (x86_64-unknown-linux-musl)
if: matrix.target == 'x86_64-unknown-linux-musl'
run: |
sudo apt-get update
sudo apt-get install musl-tools
- name: Set up extra cargo flags
if: matrix.no-zstd-thin
run: |
echo "EXTRA_CARGO_FLAGS=--no-default-features --features flate2/zlib,zip/deflate-zlib" >> $GITHUB_ENV
- name: Install Rust
run: |
rustup toolchain install stable nightly --profile minimal -t ${{ matrix.target }}
- name: Test on stable
if: ${{ ! matrix.skip-test }}
run: |
${{ env.CARGO }} +stable test --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
- name: Release on nightly
run: |
${{ env.CARGO }} +nightly build --release --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
env:
OUCH_ARTIFACTS_FOLDER: artifacts
RUSTFLAGS: -C strip=symbols
- name: Upload binary
uses: actions/upload-artifact@v3
with:
name: ouch-${{ matrix.target }}${{ matrix.ext }}
path: target/${{ matrix.target }}/release/ouch${{ matrix.ext }}
- name: Upload artifacts (musl)
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: actions/upload-artifact@v3
with:
name: artifacts
path: artifacts
clippy-rustfmt:
name: clippy-rustfmt
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: "Cargo: clippy, fmt"
run: |
rustup toolchain install stable --profile minimal -c clippy
rustup toolchain install nightly --profile minimal -c rustfmt
cargo +stable clippy -- -D warnings
cargo +nightly fmt -- --check
github-release:
name: github-release
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
needs: build
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Download artifacts
uses: dawidd6/action-download-artifact@v2
with:
path: artifacts
- name: Package release assets
run: scripts/package-release-assets.sh
- name: Create release
uses: softprops/action-gh-release@v1
with:
draft: true
files: release/ouch-*

View File

@ -1,162 +0,0 @@
# This is a reusable workflow
name: Build artifacts and run tests
on:
workflow_dispatch:
inputs:
matrix_all_combinations:
description: "if matrix should have all combinations of targets and features"
type: boolean
required: true
default: true
artifact_upload_mode:
description: "Control what artifacts to upload: 'none' for no uploads, 'with_default_features' to upload artifacts with default features (for releases), or 'all' for all feature combinations."
type: choice
options:
- none
- with_default_features
- all
required: true
workflow_call:
inputs:
matrix_all_combinations:
description: "if matrix should have all combinations of targets and features"
type: boolean
required: true
artifact_upload_mode:
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar+bzip3), or 'all' to upload all feature combinations."
type: string
required: true
jobs:
build-artifacts-and-run-tests:
runs-on: ${{ matrix.os || 'ubuntu-latest' }}
env:
CARGO: cargo
strategy:
fail-fast: false
matrix:
# TODO: avoid exploding the matrix by removing unrar and bzip3 from the all combinations runs
# I can add a monthly run with all combinations
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
feature-use-zstd-thin: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
target:
# native
- x86_64-unknown-linux-gnu
- x86_64-pc-windows-gnu
- x86_64-pc-windows-msvc
- aarch64-pc-windows-msvc
- x86_64-apple-darwin
# cross
- x86_64-unknown-linux-musl
- aarch64-unknown-linux-gnu
- aarch64-unknown-linux-musl
- armv7-unknown-linux-gnueabihf
- armv7-unknown-linux-musleabihf
include:
# runner overrides
- target: x86_64-pc-windows-gnu
os: windows-latest
- target: x86_64-pc-windows-msvc
os: windows-latest
- target: aarch64-pc-windows-msvc
os: windows-latest
- target: x86_64-apple-darwin
os: macos-latest
# targets that use cross
- target: x86_64-unknown-linux-musl
use-cross: true
- target: aarch64-unknown-linux-gnu
use-cross: true
- target: aarch64-unknown-linux-musl
use-cross: true
- target: armv7-unknown-linux-gnueabihf
use-cross: true
- target: armv7-unknown-linux-musleabihf
use-cross: true
# features (unless `matrix_all_combinations` is true, we only run these on linux-gnu)
- feature-unrar: false
target: x86_64-unknown-linux-gnu
- feature-use-zlib: true
target: x86_64-unknown-linux-gnu
- feature-use-zstd-thin: true
target: x86_64-unknown-linux-gnu
- feature-bzip3: false
target: x86_64-unknown-linux-gnu
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install cross
if: matrix.use-cross
run: |
pushd "$(mktemp -d)"
wget https://github.com/cross-rs/cross/releases/download/v0.2.4/cross-x86_64-unknown-linux-musl.tar.gz
tar xf cross-x86_64-unknown-linux-musl.tar.gz
cp cross ~/.cargo/bin
popd
echo CARGO=cross >> $GITHUB_ENV
- name: Concatenate features
id: concat-features
shell: bash
run: |
FEATURES=(allow_piped_choice)
if [[ "${{ matrix.feature-unrar }}" == true ]]; then FEATURES+=(unrar); fi
if [[ "${{ matrix.feature-use-zlib }}" == true ]]; then FEATURES+=(use_zlib); fi
if [[ "${{ matrix.feature-use-zstd-thin }}" == true ]]; then FEATURES+=(use_zstd_thin); fi
if [[ "${{ matrix.feature-bzip3 }}" == true ]]; then FEATURES+=(bzip3); fi
# Output plus-separated list for artifact names
IFS='+'
echo "FEATURES_PLUS=${FEATURES[*]}" >> $GITHUB_OUTPUT
# Output comma-separated list for cargo flags
IFS=','
echo "FEATURES_COMMA=${FEATURES[*]}" >> $GITHUB_OUTPUT
- name: Set up extra cargo flags
env:
FEATURES: ${{steps.concat-features.outputs.FEATURES_COMMA}}
shell: bash
run: |
FLAGS="--no-default-features"
if [[ -n "$FEATURES" ]]; then FLAGS+=" --features $FEATURES"; fi
echo "EXTRA_CARGO_FLAGS=$FLAGS" >> $GITHUB_ENV
- name: Install Rust
run: |
rustup toolchain install stable --profile minimal -t ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
with:
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}-${{ matrix.feature-bzip3 }}"
- name: Test on stable
# there's no way to run tests for ARM64 Windows for now
if: matrix.target != 'aarch64-pc-windows-msvc'
run: |
${{ env.CARGO }} +stable test --profile fast --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
- name: Build release artifacts (binary and completions)
if: ${{ inputs.artifact_upload_mode != 'none' }}
run: |
${{ env.CARGO }} +stable build --release --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
env:
OUCH_ARTIFACTS_FOLDER: man-page-and-completions-artifacts
- name: Upload release artifacts
if: |
${{ inputs.artifact_upload_mode != 'none' &&
(inputs.artifact_upload_mode == 'all' ||
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin && matrix.feature-bzip3)) }}
uses: actions/upload-artifact@v4
with:
name: ouch-${{ matrix.target }}${{ steps.concat-features.outputs.FEATURES_PLUS != '' && format('-{0}', steps.concat-features.outputs.FEATURES_PLUS) || '' }}
path: |
target/${{ matrix.target }}/release/ouch
target/${{ matrix.target }}/release/ouch.exe
man-page-and-completions-artifacts/

View File

@ -1,36 +0,0 @@
name: Automatic trigger draft release
on:
push:
tags:
- "[0-9]+.[0-9]+.[0-9]+-rc[0-9]+"
jobs:
call-workflow-build-artifacts-and-run-tests:
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
with:
matrix_all_combinations: true
artifact_upload_mode: with_default_features
automated-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
needs: call-workflow-build-artifacts-and-run-tests
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: downloaded_artifacts
pattern: ouch-*
- name: Package release assets
run: scripts/package-release-assets.sh
- name: Create release
uses: softprops/action-gh-release@v2
with:
draft: true
files: output_assets/ouch-*

33
.github/workflows/manual-release.yml vendored Normal file
View File

@ -0,0 +1,33 @@
name: manual-release
on:
workflow_dispatch:
inputs:
run_id:
description: Run id of the action run to pull artifacts from
required: true
jobs:
github-release:
name: github-release
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Download artifacts
uses: dawidd6/action-download-artifact@v2
with:
path: artifacts
workflow: build-and-test.yml
run_id: ${{ github.event.inputs.run_id }}
- name: Package release assets
run: scripts/package-release-assets.sh
- name: Create release
uses: softprops/action-gh-release@v1
with:
draft: true
name: manual release ${{ github.event.inputs.run_id }}
files: release/ouch-*

View File

@ -1,35 +0,0 @@
name: PR workflow
on:
pull_request:
paths-ignore:
- "**/*.md"
jobs:
rustfmt-nightly-check:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: "Cargo: fmt"
run: |
rustup toolchain install nightly --profile minimal -c rustfmt
cargo +nightly fmt -- --check
clippy-checks:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: "Cargo: clippy"
run: |
rustup toolchain install stable --profile minimal -c clippy
cargo +stable clippy -- -D warnings
build-and-test:
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
with:
matrix_all_combinations: false
artifact_upload_mode: none

4
.gitignore vendored
View File

@ -16,7 +16,3 @@ artifacts/
/benchmarks/input.*
/benchmarks/*.md
!/benchmarks/results.md
# IDE-specific setting
.vscode
.idea

View File

@ -5,7 +5,7 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
_This changelog was created after v0.3.1. As a result, there may be slight inaccuracies with prior versions._
_This changelog was created after v0.3.1 was released. As a result, there may be slight inaccuracies with versions <= v0.3.1._
Categories Used:
@ -18,87 +18,7 @@ Categories Used:
**Bullet points in chronological order by PR**
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.1...HEAD)
### New Features
- Merge folders in decompression [\#798](https://github.com/ouch-org/ouch/pull/798) ([tommady](https://github.com/tommady))
- Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb))
### Improvements
- Give better error messages when archive extensions are invalid [\#817](https://github.com/ouch-org/ouch/pull/817) ([marcospb19](https://github.com/marcospb19))
### Bug Fixes
- Fix tar extraction count when --quiet [\#824](https://github.com/ouch-org/ouch/pull/824) ([marcospb19](https://github.com/marcospb19))
- Fix 7z BadSignature error when compressing and then listing [\#819](https://github.com/ouch-org/ouch/pull/819) ([tommady](https://github.com/tommady))
### Tweaks
- Make `.bz3` opt-out [\#814](https://github.com/ouch-org/ouch/pull/814) ([amyspark](https://github.com/amyspark))
## [0.6.1](https://github.com/ouch-org/ouch/compare/0.6.0...0.6.1)
- Fix .zip crash when file mode isn't present [\#804](https://github.com/ouch-org/ouch/pull/804) ([marcospb19](https://github.com/marcospb19))
## [0.6.0](https://github.com/ouch-org/ouch/compare/0.5.1...0.6.0)
### New Features
- Add multithreading support for `zstd` compression [\#689](https://github.com/ouch-org/ouch/pull/689) ([nalabrie](https://github.com/nalabrie))
- Add `bzip3` support [\#522](https://github.com/ouch-org/ouch/pull/522) ([freijon](https://github.com/freijon))
- Add `--remove` flag for decompression subcommand to remove files after successful decompression [\#757](https://github.com/ouch-org/ouch/pull/757) ([ttys3](https://github.com/ttys3))
- Add `br` (Brotli) support [\#765](https://github.com/ouch-org/ouch/pull/765) ([killercup](https://github.com/killercup))
- Add rename option in overwrite menu [\#779](https://github.com/ouch-org/ouch/pull/779) ([talis-fb](https://github.com/talis-fb))
- Store symlinks by default and add `--follow-symlinks` to store the target files [\#789](https://github.com/ouch-org/ouch/pull/789) ([tommady](https://github.com/tommady))
### Bug Fixes
- Fix output corrupted on parallel decompression [\#642](https://github.com/ouch-org/ouch/pull/642) ([AntoniosBarotsis](https://github.com/AntoniosBarotsis))
### Tweaks
- CI refactor [\#578](https://github.com/ouch-org/ouch/pull/578) ([cyqsimon](https://github.com/cyqsimon))
- Use a prefix `tmp-ouch-` for temporary decompression path name to avoid conflicts [\#725](https://github.com/ouch-org/ouch/pull/725) ([valoq](https://github.com/valoq)) & [\#788](https://github.com/ouch-org/ouch/pull/788) ([talis-fb](https://github.com/talis-fb))
- Ignore `.git/` when `-g/--gitignore` is set [\#507](https://github.com/ouch-org/ouch/pull/507) ([talis-fb](https://github.com/talis-fb))
- Run clippy for tests too [\#738](https://github.com/ouch-org/ouch/pull/738) ([marcospb19](https://github.com/marcospb19))
- Sevenz-rust is unmaintained, switch to sevenz-rust2 [\#796](https://github.com/ouch-org/ouch/pull/796) ([tommady](https://github.com/tommady))
### Improvements
- Fix logging IO bottleneck [\#642](https://github.com/ouch-org/ouch/pull/642) ([AntoniosBarotsis](https://github.com/AntoniosBarotsis))
- Support decompression over stdin [\#692](https://github.com/ouch-org/ouch/pull/692) ([rcorre](https://github.com/rcorre))
- Make `--format` more forgiving with the formatting of the provided format [\#519](https://github.com/ouch-org/ouch/pull/519) ([marcospb19](https://github.com/marcospb19))
- Use buffered writer for list output [\#764](https://github.com/ouch-org/ouch/pull/764) ([killercup](https://github.com/killercup))
- Disable smart unpack when `--dir` flag is provided in decompress command [\#782](https://github.com/ouch-org/ouch/pull/782) ([talis-fb](https://github.com/talis-fb))
- Align file sizes at left for each extracted file to make output clearer [\#792](https://github.com/ouch-org/ouch/pull/792) ([talis-fb](https://github.com/talis-fb))
## [0.5.1](https://github.com/ouch-org/ouch/compare/0.5.0...0.5.1)
### Improvements
- Explicitly declare feature flags `use_zlib` & `use_zstd_thin` [\#564](https://github.com/ouch-org/ouch/pull/564) ([cyqsimon](https://github.com/cyqsimon))
### Tweaks
- Mention support for `7z` and `rar` in help message.
## [0.5.0](https://github.com/ouch-org/ouch/compare/0.4.2...0.5.0)
### New Features
- Add support for listing and decompressing `.rar` archives [\#529](https://github.com/ouch-org/ouch/pull/529) ([lmkra](https://github.com/lmkra))
- Add support for 7z [\#555](https://github.com/ouch-org/ouch/pull/555) ([Flat](https://github.com/flat) & [MisileLab](https://github.com/MisileLab))
### Bug Fixes
- Fix mime type detection [\#529](https://github.com/ouch-org/ouch/pull/529) ([lmkra](https://github.com/lmkra))
- Fix size unit inconsistency [\#502](https://github.com/ouch-org/ouch/pull/502) ([marcospb19](https://github.com/marcospb19))
### Improvements
- Hint completions generator to expand file paths [\#508](https://github.com/ouch-org/ouch/pull/508) ([marcospb19](https://github.com/marcospb19))
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.4.2...HEAD)
## [0.4.2](https://github.com/ouch-org/ouch/compare/0.4.1...0.4.2)

View File

@ -1,48 +1,19 @@
Thanks for your interest in contributing to `ouch`!
# Table of contents:
- [Code of Conduct](#code-of-conduct)
- [I want to ask a question or provide feedback](#i-want-to-ask-a-question-or-provide-feedback)
- [Adding a new feature](#adding-a-new-feature)
- [PRs](#prs)
- [Dealing with UI tests](#dealing-with-ui-tests)
## Code of Conduct
# Code of Conduct
We follow the [Rust Official Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct).
## I want to ask a question or provide feedback
# I want to ask a question or provide feedback
Create [an issue](https://github.com/ouch-org/ouch/issues) or go to [Ouch Discussions](https://github.com/ouch-org/ouch/discussions).
## Adding a new feature
# Adding a new feature
Before opening the PR, open an issue to discuss your addition, this increases the chance of your PR being accepted.
Before creating a PR with a new feature, please, open an issue to suggest your addition.
## PRs
This allows us to discuss the problem and solution, increasing the chance of your PR to be accepted.
- Pass all CI checks.
- After opening the PR, add a [CHANGELOG.md] entry.
# Don't forget to
[CHANGELOG.md]: https://github.com/ouch-org/ouch
## Dealing with UI tests
We use snapshots to do UI testing and guarantee a consistent output, this way, you can catch accidental changes or see what output changed in the PR diff.
- Run tests with `cargo` normally, or with a filter:
```sh
cargo test
# Only run UI tests
cargo test -- ui
```
- If some UI test failed, you should review it:
```sh
cargo insta review
```
- After addressing all, you should be able to `git add` and `commit` accordingly.
- In your PR, add a CHANGELOG.md entry.

1445
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,7 @@
[package]
name = "ouch"
version = "0.6.1"
authors = [
"João Marcos <marcospb19@hotmail.com>",
"Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>",
]
version = "0.4.2"
authors = ["Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>", "João M. Bezerra <marcospb19@hotmail.com>"]
edition = "2021"
readme = "README.md"
repository = "https://github.com/ouch-org/ouch"
@ -15,82 +12,49 @@ description = "A command-line utility for easily compressing and decompressing f
[dependencies]
atty = "0.2.14"
brotli = "7.0.0"
bstr = { version = "1.10.0", default-features = false, features = ["std"] }
bytesize = "1.3.0"
bstr = { version = "1.6.0", default-features = false, features = ["std"] }
bzip2 = "0.4.4"
bzip3 = { version = "0.9.0", features = ["bundled"], optional = true }
clap = { version = "4.5.20", features = ["derive", "env"] }
filetime_creation = "0.2"
flate2 = { version = "1.0.30", default-features = false }
fs-err = "2.11.0"
gzp = { version = "0.11.3", default-features = false, features = [
"snappy_default",
] }
ignore = "0.4.23"
libc = "0.2.155"
clap = { version = "4.3.19", features = ["derive", "env"] }
filetime = "0.2.22"
flate2 = { version = "1.0.26", default-features = false }
fs-err = "2.9.0"
gzp = { version = "0.11.3", default-features = false, features = ["snappy_default"] }
ignore = "0.4.20"
libc = "0.2.147"
linked-hash-map = "0.5.6"
lz4_flex = "0.11.3"
num_cpus = "1.16.0"
once_cell = "1.20.2"
rayon = "1.10.0"
lzzzz = "1.0.4"
once_cell = "1.18.0"
rayon = "1.7.0"
same-file = "1.0.6"
sevenz-rust2 = { version = "0.13.1", features = ["compress", "aes256"] }
snap = "1.1.1"
tar = "0.4.42"
tempfile = "3.10.1"
time = { version = "0.3.36", default-features = false }
unrar = { version = "0.5.7", optional = true }
snap = "1.1.0"
tar = "0.4.39"
tempfile = "3.7.0"
time = { version = "0.3.25", default-features = false }
ubyte = { version = "0.10.3", default-features = false }
xz2 = "0.1.7"
zip = { version = "0.6.6", default-features = false, features = [
"time",
"aes-crypto",
] }
zstd = { version = "0.13.2", default-features = false, features = ["zstdmt"] }
zip = { version = "0.6.6", default-features = false, features = ["time"] }
zstd = { version = "0.12.4", default-features = false }
[target.'cfg(not(unix))'.dependencies]
is_executable = "1.0.1"
[build-dependencies]
clap = { version = "4.5.20", features = ["derive", "env", "string"] }
clap_complete = "4.5.28"
clap_mangen = "0.2.24"
clap = { version = "4.3.19", features = ["derive", "env", "string"] }
clap_complete = "4.3.2"
clap_mangen = "0.2.12"
[dev-dependencies]
assert_cmd = "2.0.14"
glob = "0.3.2"
infer = "0.16.0"
insta = { version = "1.40.0", features = ["filters"] }
itertools = "0.14.0"
memchr = "2.7.4"
parse-display = "0.9.1"
pretty_assertions = "1.4.1"
proptest = "1.5.0"
rand = { version = "0.8.5", default-features = false, features = [
"small_rng",
"std",
] }
regex = "1.10.4"
test-strategy = "0.4.0"
assert_cmd = "2.0.12"
infer = "0.15.0"
parse-display = "0.8.2"
proptest = "1.2.0"
rand = { version = "0.8.5", default-features = false, features = ["small_rng", "std"] }
test-strategy = "0.3.1"
[features]
default = ["unrar", "use_zlib", "use_zstd_thin", "bzip3"]
use_zlib = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib"]
use_zstd_thin = ["zstd/thin"]
allow_piped_choice = []
default = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib", "zstd/thin"]
# For generating binaries for releases
[profile.release]
lto = true
codegen-units = 1
opt-level = 3
strip = true
# When we need a fast binary that compiles slightly faster `release` (useful for CI)
[profile.fast]
inherits = "release"
lto = false
opt-level = 2
incremental = true
codegen-units = 32
strip = false

View File

@ -1,8 +1,2 @@
[build.env]
passthrough = ["RUSTFLAGS", "OUCH_ARTIFACTS_FOLDER"]
[target.aarch64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:edge"
[target.armv7-unknown-linux-gnueabihf]
image = "ghcr.io/cross-rs/armv7-unknown-linux-gnueabihf:edge"
passthrough = ["RUSTFLAGS"]

15
LICENSE
View File

@ -20,18 +20,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---
Copyright notices from other projects:
Infer crate (MIT LICENSE):
> Copyright (c) 2019 Bojan
> Code at https://github.com/bojand/infer
Bzip3-rs crate (LGPL 3.0):
> Code for this crate is available at https://github.com/bczhc/bzip3-rs
> See its license at https://github.com/bczhc/bzip3-rs/blob/master/LICENSE
Bzip3 library (LGPL 3.0):
> Code for this library is available at https://github.com/kspalaiologos/bzip3
> See its license at https://github.com/kspalaiologos/bzip3/blob/master/LICENSE
Copyright (c) 2019 Bojan
https://github.com/bojand/infer

View File

@ -111,28 +111,25 @@ Output:
# Supported formats
| Format | `.tar` | `.zip` | `7z` | `.gz` | `.xz`, `.lzma` | `.bz`, `.bz2` | `.bz3` | `.lz4` | `.sz` (Snappy) | `.zst` | `.rar` | `.br` |
|:---------:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
| Supported | ✓ | ✓¹ | ✓¹ | ✓² | ✓ | ✓ | ✓ | ✓ | ✓² | ✓² | ✓³ | ✓ |
| Format | `.tar` | `.zip` | `.gz` | `.xz`, `.lzma` | `.bz`, `.bz2` | `.lz4` | `.sz` | `.zst` |
|:---------:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
| Supported | ✓ | ✓¹ | ✓² | ✓ | ✓ | ✓ | ✓² | ✓ |
✓: Supports compression and decompression.
✓¹: Due to limitations of the compression format itself, (de)compression can't be done with streaming.
✓¹: Due to limitations of `.zip`, it doesn't support streaming (de)compression.
✓²: Supported, and compression runs in parallel.
✓³: Due to RAR's restrictive license, only decompression and listing can be supported.
If you wish to exclude non-free code from your build, you can disable RAR support
by building without the `unrar` feature.
`tar` aliases are also supported: `tgz`, `tbz`, `tbz2`, `tlz4`, `txz`, `tlzma`, `tsz`, `tzst`.
Formats can be chained:
- `.tar.gz`
- `.tar.gz.xz.zst.gz.lz4.sz`
- `.zst.gz`
- `.tar.gz.gz`
- `.tar.gz.gz.gz.zst.xz.bz.lz4`
If the filename has no extensions, `Ouch` will try to infer the format by the [file signature](https://en.wikipedia.org/wiki/List_of_file_signatures) and ask the user for confirmation.
If the filename has no extensions, `Ouch` will try to infer the format by the [file signature](https://en.wikipedia.org/wiki/List_of_file_signatures).
# Installation
@ -166,20 +163,17 @@ Check the [releases page](https://github.com/ouch-org/ouch/releases).
Check the [wiki guide on compiling](https://github.com/ouch-org/ouch/wiki/Compiling-and-installing-from-source-code).
# Runtime Dependencies
# Dependencies
If running `ouch` results in a linking error, it means you're missing a runtime dependency.
If you're downloading binaries from the [releases page](https://github.com/ouch-org/ouch/releases), try the `musl` variants, those are static binaries that require no runtime dependencies.
If you installed `ouch` using the download script, you will need no dependencies (static MUSL binary).
Otherwise, you'll need these libraries installed on your system:
* [liblzma](https://www.7-zip.org/sdk.html)
* [libbz2](https://www.sourceware.org/bzip2)
* [libbz3](https://github.com/kspalaiologos/bzip3)
* [libz](https://www.zlib.net)
* [libbz2](https://www.sourceware.org/bzip2/)
* [libz](https://www.zlib.net/)
These should be available in your system's package manager.
These are available on all mainstream _Linux_ distributions and on _macOS_.
# Benchmarks
@ -201,14 +195,12 @@ Versions used:
# Contributing
`ouch` is made out of voluntary work, contributors are very welcome! Contributions of all sizes are appreciated.
`ouch` is made out of voluntary work, contributors are very welcome! No contribution is too small and all contributions are valued.
- Open an [issue](https://github.com/ouch-org/ouch/issues).
- Package it for your favorite distribution or package manager.
- Share it with a friend!
- Open a pull request.
If you're creating a Pull Request, check [CONTRIBUTING.md](./CONTRIBUTING.md).
- Share it with a friend!
[`tar`]: https://www.gnu.org/software/tar/
[infozip]: http://www.info-zip.org/

View File

@ -20,8 +20,8 @@
| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |
|:---|---:|---:|---:|---:|
| `ouch compress compiler output.zip` | 549.7 ± 4.3 | 543.6 | 558.6 | 1.00 |
| `zip output.zip -r compiler` | 581.3 ± 9.1 | 573.2 | 600.9 | 1.06 ± 0.02 |
| `ouch compress compiler output.zip` | 549.7 ± 4.3 | 543.6 | 558.6 | 1.00 |
| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |
|:---|---:|---:|---:|---:|

View File

@ -5,12 +5,18 @@
/// Set `OUCH_ARTIFACTS_FOLDER` to the name of the destination folder:
///
/// ```sh
/// OUCH_ARTIFACTS_FOLDER=man-page-and-completions-artifacts cargo build
/// OUCH_ARTIFACTS_FOLDER=my-folder cargo build
/// ```
///
/// All completion files will be generated inside of the folder "man-page-and-completions-artifacts".
/// All completion files will be generated inside of the folder "my-folder".
///
/// If the folder does not exist, it will be created.
///
/// We recommend you naming this folder "artifacts" for the sake of consistency.
///
/// ```sh
/// OUCH_ARTIFACTS_FOLDER=artifacts cargo build
/// ```
use std::{
env,
fs::{create_dir_all, File},

View File

@ -1,60 +1,22 @@
#!/usr/bin/env bash
set -e
mkdir output_assets
echo "created folder 'output_assets/'"
ls -lA -w 1
cd downloaded_artifacts
echo "entered 'downloaded_artifacts/'"
ls -lA -w 1
mkdir release
cd artifacts
PLATFORMS=(
"aarch64-pc-windows-msvc"
"aarch64-unknown-linux-gnu"
"aarch64-unknown-linux-musl"
"armv7-unknown-linux-gnueabihf"
"armv7-unknown-linux-musleabihf"
"x86_64-apple-darwin"
"x86_64-pc-windows-gnu"
"x86_64-pc-windows-msvc"
"x86_64-unknown-linux-gnu"
"x86_64-unknown-linux-musl"
)
# TODO: remove allow_piped_choice later
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin+bzip3"
for dir in ouch-*; do
cp -r artifacts "$dir/completions"
mkdir "$dir/man"
mv "$dir"/completions/*.1 "$dir/man"
cp ../{README.md,LICENSE,CHANGELOG.md} "$dir"
for platform in "${PLATFORMS[@]}"; do
path="ouch-${platform}"
echo "Processing $path"
if [ ! -d "${path}-${DEFAULT_FEATURES}" ]; then
echo "ERROR: Could not find artifact directory for $platform with default features ($path)"
exit 1
fi
mv "${path}-${DEFAULT_FEATURES}" "$path" # remove the annoying suffix
cp ../{README.md,LICENSE,CHANGELOG.md} "$path"
mkdir -p "$path/man"
mkdir -p "$path/completions"
mv "$path"/man-page-and-completions-artifacts/*.1 "$path/man"
mv "$path"/man-page-and-completions-artifacts/* "$path/completions"
rm -r "$path/man-page-and-completions-artifacts"
if [[ "$platform" == *"-windows-"* ]]; then
mv "$path/target/$platform/release/ouch.exe" "$path"
rm -rf "$path/target"
zip -r "../output_assets/${path}.zip" "$path"
echo "Created output_assets/${path}.zip"
if [[ "$dir" = *.exe ]]; then
target=${dir%.exe}
mv "$dir" "$target"
zip -r "../release/$target.zip" "$target"
else
mv "$path/target/$platform/release/ouch" "$path"
rm -rf "$path/target"
chmod +x "$path/ouch"
tar czf "../output_assets/${path}.tar.gz" "$path"
echo "Created output_assets/${path}.tar.gz"
chmod +x "$dir/ouch"
tar czf "../release/$dir.tar.gz" "$dir"
fi
done
echo "Done."

View File

@ -1,51 +1,13 @@
//! Accessibility mode functions.
//!
//! # Problem
//!
//! `Ouch`'s default output contains symbols which make it visually easier to
//! read, but harder for people who are visually impaired and rely on
//! text-to-voice readers.
//!
//! On top of that, people who use text-to-voice tools can't easily skim
//! through verbose lines of text, so they strongly benefit from fewer lines
//! of output.
//!
//! # Solution
//!
//! To tackle that, `Ouch` has an accessibility mode that filters out most of
//! the verbose logging, displaying only the most important pieces of
//! information.
//!
//! Accessible mode also changes how logs are displayed, to remove symbols
//! which are "noise" to text-to-voice tools and change formatting of error
//! messages.
//!
//! # Are impaired people actually benefiting from this?
//!
//! So far we don't know. Most CLI tools aren't accessible, so we can't expect
//! many impaired people to be using the terminal and CLI tools, including
//! `Ouch`.
//!
//! I consider this to be an experiment, and a tiny step towards the right
//! direction, `Ouch` shows that this is possible and easy to do, hopefully
//! we can use our experience to later create guides or libraries for other
//! developers.
use once_cell::sync::OnceCell;
/// Global flag for accessible mode.
/// Whether to enable accessible output (removes info output and reduces other
/// output, removes visual markers like '[' and ']').
pub static ACCESSIBLE: OnceCell<bool> = OnceCell::new();
/// Check if `Ouch` is running in accessible mode.
///
/// Check the module-level documentation for more details.
pub fn is_running_in_accessible_mode() -> bool {
ACCESSIBLE.get().copied().unwrap_or(false)
}
/// Set the value of the global [`ACCESSIBLE`] flag.
///
/// Check the module-level documentation for more details.
pub fn set_accessible(value: bool) {
if ACCESSIBLE.get().is_none() {
ACCESSIBLE.set(value).unwrap();

View File

@ -1,7 +0,0 @@
use crate::Error;
pub fn no_support() -> Error {
Error::UnsupportedFormat {
reason: "BZip3 support is disabled for this build, possibly due to missing bindgen-cli dependency.".into(),
}
}

View File

@ -1,11 +1,4 @@
//! Archive compression algorithms
#[cfg(not(feature = "bzip3"))]
pub mod bzip3_stub;
#[cfg(feature = "unrar")]
pub mod rar;
#[cfg(not(feature = "unrar"))]
pub mod rar_stub;
pub mod sevenz;
pub mod tar;
pub mod zip;

View File

@ -1,72 +0,0 @@
//! Contains RAR-specific building and unpacking functions
use std::path::Path;
use unrar::Archive;
use crate::{
error::{Error, Result},
list::FileInArchive,
utils::{logger::info, Bytes},
};
/// Unpacks the archive given by `archive_path` into the folder given by `output_folder`.
/// Assumes that output_folder is empty
pub fn unpack_archive(
archive_path: &Path,
output_folder: &Path,
password: Option<&[u8]>,
quiet: bool,
) -> crate::Result<usize> {
let archive = match password {
Some(password) => Archive::with_password(archive_path, password),
None => Archive::new(archive_path),
};
let mut archive = archive.open_for_processing()?;
let mut unpacked = 0;
while let Some(header) = archive.read_header()? {
let entry = header.entry();
archive = if entry.is_file() {
if !quiet {
info(format!(
"extracted ({}) {}",
Bytes::new(entry.unpacked_size),
entry.filename.display(),
));
}
unpacked += 1;
header.extract_with_base(output_folder)?
} else {
header.skip()?
};
}
Ok(unpacked)
}
/// List contents of `archive_path`, returning a vector of archive entries
pub fn list_archive(
archive_path: &Path,
password: Option<&[u8]>,
) -> Result<impl Iterator<Item = Result<FileInArchive>>> {
let archive = match password {
Some(password) => Archive::with_password(archive_path, password),
None => Archive::new(archive_path),
};
Ok(archive.open_for_listing()?.map(|item| {
let item = item?;
let is_dir = item.is_directory();
let path = item.filename;
Ok(FileInArchive { path, is_dir })
}))
}
pub fn no_compression() -> Error {
Error::UnsupportedFormat {
reason: "Creating RAR archives is not allowed due to licensing restrictions.".into(),
}
}

View File

@ -1,7 +0,0 @@
use crate::Error;
pub fn no_support() -> Error {
Error::UnsupportedFormat {
reason: "RAR support is disabled for this build, possibly due to licensing restrictions.".into(),
}
}

View File

@ -1,209 +0,0 @@
//! SevenZip archive format compress function
use std::{
env,
io::{self, Read, Seek, Write},
path::{Path, PathBuf},
};
use bstr::ByteSlice;
use fs_err as fs;
use same_file::Handle;
use sevenz_rust2::SevenZArchiveEntry;
use crate::{
error::{Error, FinalError, Result},
list::FileInArchive,
utils::{
cd_into_same_dir_as,
logger::{info, warning},
Bytes, EscapedPathDisplay, FileVisibilityPolicy,
},
};
pub fn compress_sevenz<W>(
files: &[PathBuf],
output_path: &Path,
writer: W,
file_visibility_policy: FileVisibilityPolicy,
quiet: bool,
) -> crate::Result<W>
where
W: Write + Seek,
{
let mut writer = sevenz_rust2::SevenZWriter::new(writer)?;
let output_handle = Handle::from_path(output_path);
for filename in files {
let previous_location = cd_into_same_dir_as(filename)?;
// Unwrap safety:
// paths should be canonicalized by now, and the root directory rejected.
let filename = filename.file_name().unwrap();
for entry in file_visibility_policy.build_walker(filename) {
let entry = entry?;
let path = entry.path();
// If the output_path is the same as the input file, warn the user and skip the input (in order to avoid compression recursion)
if let Ok(handle) = &output_handle {
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
warning(format!(
"Cannot compress `{}` into itself, skipping",
output_path.display()
));
continue;
}
}
// This is printed for every file in `input_filenames` and has
// little importance for most users, but would generate lots of
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
}
let metadata = match path.metadata() {
Ok(metadata) => metadata,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
// This path is for a broken symlink, ignore it
continue;
}
return Err(e.into());
}
};
let entry_name = path.to_str().ok_or_else(|| {
FinalError::with_title("7z requires that all entry names are valid UTF-8")
.detail(format!("File at '{path:?}' has a non-UTF-8 name"))
})?;
let entry = sevenz_rust2::SevenZArchiveEntry::from_path(path, entry_name.to_owned());
let entry_data = if metadata.is_dir() {
None
} else {
Some(fs::File::open(path)?)
};
writer.push_archive_entry::<fs::File>(entry, entry_data)?;
}
env::set_current_dir(previous_location)?;
}
let bytes = writer.finish()?;
Ok(bytes)
}
pub fn decompress_sevenz<R>(reader: R, output_path: &Path, password: Option<&[u8]>, quiet: bool) -> crate::Result<usize>
where
R: Read + Seek,
{
let mut count: usize = 0;
let entry_extract_fn = |entry: &SevenZArchiveEntry, reader: &mut dyn Read, path: &PathBuf| {
count += 1;
// Manually handle writing all files from 7z archive, due to library exluding empty files
use std::io::BufWriter;
use filetime_creation as ft;
let file_path = output_path.join(entry.name());
if entry.is_directory() {
if !quiet {
info(format!(
"File {} extracted to \"{}\"",
entry.name(),
file_path.display()
));
}
if !path.exists() {
fs::create_dir_all(path)?;
}
} else {
if !quiet {
info(format!(
"extracted ({}) {:?}",
Bytes::new(entry.size()),
file_path.display(),
));
}
if let Some(parent) = path.parent() {
if !parent.exists() {
fs::create_dir_all(parent)?;
}
}
let file = fs::File::create(path)?;
let mut writer = BufWriter::new(file);
io::copy(reader, &mut writer)?;
ft::set_file_handle_times(
writer.get_ref().file(),
Some(ft::FileTime::from_system_time(entry.access_date().into())),
Some(ft::FileTime::from_system_time(entry.last_modified_date().into())),
Some(ft::FileTime::from_system_time(entry.creation_date().into())),
)
.unwrap_or_default();
}
Ok(true)
};
match password {
Some(password) => sevenz_rust2::decompress_with_extract_fn_and_password(
reader,
output_path,
sevenz_rust2::Password::from(password.to_str().map_err(|err| Error::InvalidPassword {
reason: err.to_string(),
})?),
entry_extract_fn,
)?,
None => sevenz_rust2::decompress_with_extract_fn(reader, output_path, entry_extract_fn)?,
}
Ok(count)
}
/// List contents of `archive_path`, returning a vector of archive entries
pub fn list_archive<R>(reader: R, password: Option<&[u8]>) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>>
where
R: Read + Seek,
{
let mut files = Vec::new();
let entry_extract_fn = |entry: &SevenZArchiveEntry, _: &mut dyn Read, _: &PathBuf| {
files.push(Ok(FileInArchive {
path: entry.name().into(),
is_dir: entry.is_directory(),
}));
Ok(true)
};
match password {
Some(password) => {
let password = match password.to_str() {
Ok(p) => p,
Err(err) => {
return Err(Error::InvalidPassword {
reason: err.to_string(),
})
}
};
sevenz_rust2::decompress_with_extract_fn_and_password(
reader,
".",
sevenz_rust2::Password::from(password),
entry_extract_fn,
)?;
}
None => sevenz_rust2::decompress_with_extract_fn(reader, ".", entry_extract_fn)?,
}
Ok(files.into_iter())
}

View File

@ -10,58 +10,43 @@ use std::{
use fs_err as fs;
use same_file::Handle;
use ubyte::ToByteUnit;
use crate::{
error::FinalError,
info,
list::FileInArchive,
utils::{
self,
logger::{info, warning},
Bytes, EscapedPathDisplay, FileVisibilityPolicy,
},
utils::{self, EscapedPathDisplay, FileVisibilityPolicy},
warning,
};
/// Unpacks the archive given by `archive` into the folder given by `into`.
/// Assumes that output_folder is empty
pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool) -> crate::Result<usize> {
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
let mut archive = tar::Archive::new(reader);
let mut files_unpacked = 0;
for file in archive.entries()? {
let mut file = file?;
match file.header().entry_type() {
tar::EntryType::Symlink => {
let relative_path = file.path()?.to_path_buf();
let full_path = output_folder.join(&relative_path);
let target = file
.link_name()?
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::InvalidData, "Missing symlink target"))?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &full_path)?;
#[cfg(windows)]
std::os::windows::fs::symlink_file(&target, &full_path)?;
}
tar::EntryType::Regular | tar::EntryType::Directory => {
file.unpack_in(output_folder)?;
}
_ => continue,
}
// This is printed for every file in the archive and has little
// importance for most users, but would generate lots of
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!(
"extracted ({}) {:?}",
Bytes::new(file.size()),
info!(
inaccessible,
"{:?} extracted. ({})",
utils::strip_cur_dir(&output_folder.join(file.path()?)),
));
}
file.size().bytes(),
);
files_unpacked += 1;
}
}
Ok(files_unpacked)
}
@ -102,7 +87,6 @@ pub fn build_archive_from_paths<W>(
writer: W,
file_visibility_policy: FileVisibilityPolicy,
quiet: bool,
follow_symlinks: bool,
) -> crate::Result<W>
where
W: Write,
@ -113,8 +97,7 @@ where
for filename in input_filenames {
let previous_location = utils::cd_into_same_dir_as(filename)?;
// Unwrap safety:
// paths should be canonicalized by now, and the root directory rejected.
// Safe unwrap, input shall be treated before
let filename = filename.file_name().unwrap();
for entry in file_visibility_policy.build_walker(filename) {
@ -122,13 +105,12 @@ where
let path = entry.path();
// If the output_path is the same as the input file, warn the user and skip the input (in order to avoid compression recursion)
if let Ok(handle) = &output_handle {
if let Ok(ref handle) = output_handle {
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
warning(format!(
"Cannot compress `{}` into itself, skipping",
warning!(
"The output file and the input file are the same: `{}`, skipping...",
output_path.display()
));
);
continue;
}
}
@ -138,29 +120,18 @@ where
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
info!(inaccessible, "Compressing '{}'.", EscapedPathDisplay::new(path));
}
if path.is_dir() {
builder.append_dir(path, path)?;
} else if path.is_symlink() && !follow_symlinks {
let target_path = path.read_link()?;
let mut header = tar::Header::new_gnu();
header.set_entry_type(tar::EntryType::Symlink);
header.set_size(0);
builder.append_link(&mut header, path, &target_path).map_err(|err| {
FinalError::with_title("Could not create archive")
.detail("Unexpected error while trying to read link")
.detail(format!("Error: {err}."))
})?;
} else {
let mut file = match fs::File::open(path) {
Ok(f) => f,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
// This path is for a broken symlink, ignore it
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
// This path is for a broken symlink
// We just ignore it
continue;
}
return Err(e.into());

View File

@ -10,42 +10,36 @@ use std::{
thread,
};
use filetime_creation::{set_file_mtime, FileTime};
use filetime::{set_file_mtime, FileTime};
use fs_err as fs;
use same_file::Handle;
use time::OffsetDateTime;
use ubyte::ToByteUnit;
use zip::{self, read::ZipFile, DateTime, ZipArchive};
use crate::{
error::FinalError,
info,
list::FileInArchive,
utils::{
cd_into_same_dir_as, get_invalid_utf8_paths,
logger::{info, info_accessible, warning},
pretty_format_list_of_paths, strip_cur_dir, Bytes, EscapedPathDisplay, FileVisibilityPolicy,
self, cd_into_same_dir_as, get_invalid_utf8_paths, pretty_format_list_of_paths, strip_cur_dir,
EscapedPathDisplay, FileVisibilityPolicy,
},
warning,
};
/// Unpacks the archive given by `archive` into the folder given by `output_folder`.
/// Assumes that output_folder is empty
pub fn unpack_archive<R>(
mut archive: ZipArchive<R>,
output_folder: &Path,
password: Option<&[u8]>,
quiet: bool,
) -> crate::Result<usize>
pub fn unpack_archive<R>(mut archive: ZipArchive<R>, output_folder: &Path, quiet: bool) -> crate::Result<usize>
where
R: Read + Seek,
{
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
let mut unpacked_files = 0;
for idx in 0..archive.len() {
let mut file = match password {
Some(password) => archive
.by_index_decrypt(idx, password)?
.map_err(|_| zip::result::ZipError::UnsupportedArchive("Password required to decrypt file"))?,
None => archive.by_index(idx)?,
};
let mut file = archive.by_index(idx)?;
let file_path = match file.enclosed_name() {
Some(path) => path.to_owned(),
None => continue,
@ -62,7 +56,7 @@ where
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("File {} extracted to \"{}\"", idx, file_path.display()));
info!(inaccessible, "File {} extracted to \"{}\"", idx, file_path.display());
}
fs::create_dir_all(&file_path)?;
}
@ -76,28 +70,16 @@ where
// same reason is in _is_dir: long, often not needed text
if !quiet {
info(format!(
"extracted ({}) {:?}",
Bytes::new(file.size()),
info!(
inaccessible,
"{:?} extracted. ({})",
file_path.display(),
));
file.size().bytes()
);
}
let mode = file.unix_mode();
let is_symlink = mode.is_some_and(|mode| mode & 0o170000 == 0o120000);
if is_symlink {
let mut target = String::new();
file.read_to_string(&mut target)?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, file_path)?;
#[cfg(windows)]
std::os::windows::fs::symlink_file(&target, file_path)?;
} else {
let mut output_file = fs::File::create(file_path)?;
io::copy(&mut file, &mut output_file)?;
}
set_last_modified_time(&file, file_path)?;
}
@ -113,10 +95,7 @@ where
}
/// List contents of `archive`, returning a vector of archive entries
pub fn list_archive<R>(
mut archive: ZipArchive<R>,
password: Option<&[u8]>,
) -> impl Iterator<Item = crate::Result<FileInArchive>>
pub fn list_archive<R>(mut archive: ZipArchive<R>) -> impl Iterator<Item = crate::Result<FileInArchive>>
where
R: Read + Seek + Send + 'static,
{
@ -129,31 +108,24 @@ where
}
}
let password = password.map(|p| p.to_owned());
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
for idx in 0..archive.len() {
let file_in_archive = (|| {
let zip_result = match password.clone() {
Some(password) => archive
.by_index_decrypt(idx, &password)?
.map_err(|_| zip::result::ZipError::UnsupportedArchive("Password required to decrypt file")),
None => archive.by_index(idx),
};
let file = match zip_result {
let maybe_file_in_archive = (|| {
let file = match archive.by_index(idx) {
Ok(f) => f,
Err(e) => return Err(e.into()),
Err(e) => return Some(Err(e.into())),
};
let path = file.enclosed_name().unwrap_or(&*file.mangled_name()).to_owned();
let path = file.enclosed_name()?.to_owned();
let is_dir = file.is_dir();
Ok(FileInArchive { path, is_dir })
Some(Ok(FileInArchive { path, is_dir }))
})();
if let Some(file_in_archive) = maybe_file_in_archive {
tx.send(file_in_archive).unwrap();
}
}
});
Files(rx)
@ -166,7 +138,6 @@ pub fn build_archive_from_paths<W>(
writer: W,
file_visibility_policy: FileVisibilityPolicy,
quiet: bool,
follow_symlinks: bool,
) -> crate::Result<W>
where
W: Write + Seek,
@ -197,8 +168,7 @@ where
for filename in input_filenames {
let previous_location = cd_into_same_dir_as(filename)?;
// Unwrap safety:
// paths should be canonicalized by now, and the root directory rejected.
// Safe unwrap, input shall be treated before
let filename = filename.file_name().unwrap();
for entry in file_visibility_policy.build_walker(filename) {
@ -206,12 +176,13 @@ where
let path = entry.path();
// If the output_path is the same as the input file, warn the user and skip the input (in order to avoid compression recursion)
if let Ok(handle) = &output_handle {
if let Ok(ref handle) = output_handle {
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
warning(format!(
"Cannot compress `{}` into itself, skipping",
warning!(
"The output file and the input file are the same: `{}`, skipping...",
output_path.display()
));
);
continue;
}
}
@ -220,14 +191,15 @@ where
// spoken text for users using screen readers, braille displays
// and so on
if !quiet {
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
info!(inaccessible, "Compressing '{}'.", EscapedPathDisplay::new(path));
}
let metadata = match path.metadata() {
Ok(metadata) => metadata,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
// This path is for a broken symlink, ignore it
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
// This path is for a broken symlink
// We just ignore it
continue;
}
return Err(e.into());
@ -235,30 +207,10 @@ where
};
#[cfg(unix)]
let mode = metadata.permissions().mode();
let entry_name = path.to_str().ok_or_else(|| {
FinalError::with_title("Zip requires that all directories names are valid UTF-8")
.detail(format!("File at '{path:?}' has a non-UTF-8 name"))
})?;
let options = options.unix_permissions(metadata.permissions().mode());
if metadata.is_dir() {
writer.add_directory(entry_name, options)?;
} else if path.is_symlink() && !follow_symlinks {
let target_path = path.read_link()?;
let target_name = target_path.to_str().ok_or_else(|| {
FinalError::with_title("Zip requires that all directories names are valid UTF-8")
.detail(format!("File at '{target_path:?}' has a non-UTF-8 name"))
})?;
// This approach writes the symlink target path as the content of the symlink entry.
// We detect symlinks during extraction by checking for the Unix symlink mode (0o120000) in the entry's permissions.
#[cfg(unix)]
let symlink_options = options.unix_permissions(0o120000 | (mode & 0o777));
#[cfg(windows)]
let symlink_options = options.unix_permissions(0o120777);
writer.add_symlink(entry_name, target_name, symlink_options)?;
writer.add_directory(path.to_str().unwrap().to_owned(), options)?;
} else {
#[cfg(not(unix))]
let options = if is_executable::is_executable(path) {
@ -268,13 +220,10 @@ where
};
let mut file = fs::File::open(path)?;
#[cfg(unix)]
let options = options.unix_permissions(mode);
// Updated last modified time
let last_modified_time = options.last_modified_time(get_last_modified_time(&file));
writer.start_file(entry_name, last_modified_time)?;
writer.start_file(
path.to_str().unwrap(),
options.last_modified_time(get_last_modified_time(&file)),
)?;
io::copy(&mut file, &mut writer)?;
}
}
@ -299,7 +248,7 @@ fn display_zip_comment_if_exists(file: &ZipFile) {
// the future, maybe asking the user if he wants to display the comment
// (informing him of its size) would be sensible for both normal and
// accessibility mode..
info_accessible(format!("Found comment in {}: {}", file.name(), comment));
info!(accessible, "Found comment in {}: {}", file.name(), comment);
}
}

View File

@ -11,11 +11,9 @@ use std::{
use crate::{
error::FinalError,
extension::{build_archive_file_suggestion, Extension},
utils::{
logger::{info_accessible, warning},
pretty_format_list_of_paths, try_infer_extension, user_wants_to_continue, EscapedPathDisplay,
},
QuestionAction, QuestionPolicy, Result,
info,
utils::{pretty_format_list_of_paths, try_infer_extension, user_wants_to_continue, EscapedPathDisplay},
warning, QuestionAction, QuestionPolicy, Result,
};
/// Check if the mime type matches the detected extensions.
@ -35,11 +33,12 @@ pub fn check_mime_type(
if let Some(detected_format) = try_infer_extension(path) {
// Inferring the file extension can have unpredicted consequences (e.g. the user just
// mistyped, ...) which we should always inform the user about.
warning(format!(
"We detected a file named `{}`, do you want to decompress it?",
info!(
accessible,
"Detected file: `{}` extension as `{}`",
path.display(),
));
detected_format
);
if user_wants_to_continue(path, question_policy, QuestionAction::Decompression)? {
formats.push(detected_format);
} else {
@ -55,11 +54,11 @@ pub fn check_mime_type(
.compression_formats
.ends_with(detected_format.compression_formats)
{
warning(format!(
warning!(
"The file extension: `{}` differ from the detected extension: `{}`",
outer_ext, detected_format
));
outer_ext,
detected_format
);
if !user_wants_to_continue(path, question_policy, QuestionAction::Decompression)? {
return Ok(ControlFlow::Break(()));
}
@ -67,10 +66,7 @@ pub fn check_mime_type(
} else {
// NOTE: If this actually produces no false positives, we can upgrade it in the future
// to a warning and ask the user if he wants to continue decompressing.
info_accessible(format!(
"Failed to confirm the format of `{}` by sniffing the contents, file might be misnamed",
path.display()
));
info!(accessible, "Could not detect the extension of `{}`", path.display());
}
Ok(ControlFlow::Continue(()))
}
@ -127,53 +123,32 @@ pub fn check_archive_formats_position(formats: &[Extension], output_path: &Path)
/// Check if all provided files have formats to decompress.
pub fn check_missing_formats_when_decompressing(files: &[PathBuf], formats: &[Vec<Extension>]) -> Result<()> {
let files_with_broken_extension: Vec<&PathBuf> = files
let files_missing_format: Vec<PathBuf> = files
.iter()
.zip(formats)
.filter(|(_, format)| format.is_empty())
.map(|(input_path, _)| input_path)
.map(|(input_path, _)| PathBuf::from(input_path))
.collect();
if files_with_broken_extension.is_empty() {
return Ok(());
}
let (files_with_unsupported_extensions, files_missing_extension): (Vec<&PathBuf>, Vec<&PathBuf>) =
files_with_broken_extension
.iter()
.partition(|path| path.extension().is_some());
let mut error = FinalError::with_title("Cannot decompress files");
if !files_with_unsupported_extensions.is_empty() {
error = error.detail(format!(
"Files with unsupported extensions: {}",
pretty_format_list_of_paths(&files_with_unsupported_extensions)
));
}
if !files_missing_extension.is_empty() {
error = error.detail(format!(
"Files with missing extensions: {}",
pretty_format_list_of_paths(&files_missing_extension)
));
}
error = error.detail("Decompression formats are detected automatically from file extension");
error = error.hint_all_supported_formats();
// If there's exactly one file, give a suggestion to use `--format`
if let &[path] = files_with_broken_extension.as_slice() {
error = error
if let Some(path) = files_missing_format.first() {
let error = FinalError::with_title("Cannot decompress files without extensions")
.detail(format!(
"Files without supported extensions: {}",
pretty_format_list_of_paths(&files_missing_format)
))
.detail("Decompression formats are detected automatically by the file extension")
.hint("Provide a file with a supported extension:")
.hint(" ouch decompress example.tar.gz")
.hint("")
.hint("Alternatively, you can pass an extension to the '--format' flag:")
.hint("Or overwrite this option with the '--format' flag:")
.hint(format!(
" ouch decompress {} --format tar.gz",
EscapedPathDisplay::new(path),
));
}
Err(error.into())
return Err(error.into());
}
Ok(())
}
/// Check if there is a first format when compressing, and returns it.

View File

@ -5,19 +5,19 @@ use clap::{Parser, ValueHint};
// Ouch command line options (docstrings below are part of --help)
/// A command-line utility for easily compressing and decompressing files and directories.
///
/// Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, bz3, lz4, sz (Snappy), zst, rar and br.
/// Supported formats: tar, zip, gz, xz/lzma, bz/bz2, lz4, sz, zst.
///
/// Repository: https://github.com/ouch-org/ouch
#[derive(Parser, Debug, PartialEq)]
#[derive(Parser, Debug)]
#[command(about, version)]
// Disable rustdoc::bare_urls because rustdoc parses URLs differently than Clap
#[allow(rustdoc::bare_urls)]
pub struct CliArgs {
/// Skip [Y/n] questions, default to yes
/// Skip [Y/n] questions positively
#[arg(short, long, conflicts_with = "no", global = true)]
pub yes: bool,
/// Skip [Y/n] questions, default to no
/// Skip [Y/n] questions negatively
#[arg(short, long, global = true)]
pub no: bool,
@ -25,15 +25,15 @@ pub struct CliArgs {
#[arg(short = 'A', long, env = "ACCESSIBLE", global = true)]
pub accessible: bool,
/// Ignore hidden files
/// Ignores hidden files
#[arg(short = 'H', long, global = true)]
pub hidden: bool,
/// Silence output
/// Silences output
#[arg(short = 'q', long, global = true)]
pub quiet: bool,
/// Ignore files matched by git's ignore files
/// Ignores files matched by git's ignore files
#[arg(short = 'g', long, global = true)]
pub gitignore: bool,
@ -41,15 +41,7 @@ pub struct CliArgs {
#[arg(short, long, global = true)]
pub format: Option<OsString>,
/// Decompress or list with password
#[arg(short = 'p', long = "password", global = true)]
pub password: Option<OsString>,
/// Concurrent working threads
#[arg(short = 'c', long, global = true)]
pub threads: Option<usize>,
// Ouch and claps subcommands
/// Ouch and claps subcommands
#[command(subcommand)]
pub cmd: Subcommand,
}
@ -61,7 +53,7 @@ pub enum Subcommand {
#[command(visible_alias = "c")]
Compress {
/// Files to be compressed
#[arg(required = true, value_hint = ValueHint::FilePath)]
#[arg(required = true, num_args = 1..)]
files: Vec<PathBuf>,
/// The resulting file. Its extensions can be used to specify the compression formats
@ -81,35 +73,23 @@ pub enum Subcommand {
/// conflicts with --level and --fast
#[arg(long, group = "compression-level")]
slow: bool,
/// Archive target files instead of storing symlinks (supported by `tar` and `zip`)
#[arg(long, short = 'S')]
follow_symlinks: bool,
},
/// Decompresses one or more files, optionally into another folder
#[command(visible_alias = "d")]
Decompress {
/// Files to be decompressed, or "-" for stdin
#[arg(required = true, num_args = 1.., value_hint = ValueHint::FilePath)]
/// Files to be decompressed
#[arg(required = true, num_args = 1..)]
files: Vec<PathBuf>,
/// Place results in a directory other than the current one
#[arg(short = 'd', long = "dir", value_hint = ValueHint::FilePath)]
#[arg(short = 'd', long = "dir", value_hint = ValueHint::DirPath)]
output_dir: Option<PathBuf>,
/// Remove the source file after successful decompression
#[arg(short = 'r', long)]
remove: bool,
/// Disable Smart Unpack
#[arg(long)]
no_smart_unpack: bool,
},
/// List contents of an archive
#[command(visible_aliases = ["l", "ls"])]
List {
/// Archives whose contents should be listed
#[arg(required = true, num_args = 1.., value_hint = ValueHint::FilePath)]
#[arg(required = true, num_args = 1..)]
archives: Vec<PathBuf>,
/// Show archive contents as a tree
@ -117,169 +97,3 @@ pub enum Subcommand {
tree: bool,
},
}
#[cfg(test)]
mod tests {
use super::*;
fn args_splitter(input: &str) -> impl Iterator<Item = &str> {
input.split_whitespace()
}
fn to_paths(iter: impl IntoIterator<Item = &'static str>) -> Vec<PathBuf> {
iter.into_iter().map(PathBuf::from).collect()
}
macro_rules! test {
($args:expr, $expected:expr) => {
let result = match CliArgs::try_parse_from(args_splitter($args)) {
Ok(result) => result,
Err(err) => panic!(
"CLI result is Err, expected Ok, input: '{}'.\nResult: '{err}'",
$args
),
};
assert_eq!(result, $expected, "CLI result mismatched, input: '{}'.", $args);
};
}
fn mock_cli_args() -> CliArgs {
CliArgs {
yes: false,
no: false,
accessible: false,
hidden: false,
quiet: false,
gitignore: false,
format: None,
// This is usually replaced in assertion tests
password: None,
threads: None,
cmd: Subcommand::Decompress {
// Put a crazy value here so no test can assert it unintentionally
files: vec!["\x00\x11\x22".into()],
output_dir: None,
remove: false,
no_smart_unpack: false,
},
}
}
#[test]
fn test_clap_cli_ok() {
test!(
"ouch decompress file.tar.gz",
CliArgs {
cmd: Subcommand::Decompress {
files: to_paths(["file.tar.gz"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
);
test!(
"ouch d file.tar.gz",
CliArgs {
cmd: Subcommand::Decompress {
files: to_paths(["file.tar.gz"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
);
test!(
"ouch d a b c",
CliArgs {
cmd: Subcommand::Decompress {
files: to_paths(["a", "b", "c"]),
output_dir: None,
remove: false,
no_smart_unpack: false,
},
..mock_cli_args()
}
);
test!(
"ouch compress file file.tar.gz",
CliArgs {
cmd: Subcommand::Compress {
files: to_paths(["file"]),
output: PathBuf::from("file.tar.gz"),
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
..mock_cli_args()
}
);
test!(
"ouch compress a b c archive.tar.gz",
CliArgs {
cmd: Subcommand::Compress {
files: to_paths(["a", "b", "c"]),
output: PathBuf::from("archive.tar.gz"),
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
..mock_cli_args()
}
);
test!(
"ouch compress a b c archive.tar.gz",
CliArgs {
cmd: Subcommand::Compress {
files: to_paths(["a", "b", "c"]),
output: PathBuf::from("archive.tar.gz"),
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
..mock_cli_args()
}
);
let inputs = [
"ouch compress a b c output --format tar.gz",
// https://github.com/clap-rs/clap/issues/5115
// "ouch compress a b c --format tar.gz output",
// "ouch compress a b --format tar.gz c output",
// "ouch compress a --format tar.gz b c output",
"ouch compress --format tar.gz a b c output",
"ouch --format tar.gz compress a b c output",
];
for input in inputs {
test!(
input,
CliArgs {
cmd: Subcommand::Compress {
files: to_paths(["a", "b", "c"]),
output: PathBuf::from("output"),
level: None,
fast: false,
slow: false,
follow_symlinks: false,
},
format: Some("tar.gz".into()),
..mock_cli_args()
}
);
}
}
#[test]
fn test_clap_cli_err() {
assert!(CliArgs::try_parse_from(args_splitter("ouch c")).is_err());
assert!(CliArgs::try_parse_from(args_splitter("ouch c input")).is_err());
assert!(CliArgs::try_parse_from(args_splitter("ouch d")).is_err());
assert!(CliArgs::try_parse_from(args_splitter("ouch l")).is_err());
}
}

View File

@ -5,17 +5,14 @@ mod args;
use std::{
io,
path::{Path, PathBuf},
vec::Vec,
};
use clap::Parser;
use fs_err as fs;
pub use self::args::{CliArgs, Subcommand};
use crate::{
accessible::set_accessible,
utils::{is_path_stdin, FileVisibilityPolicy},
QuestionPolicy,
};
use crate::{accessible::set_accessible, utils::FileVisibilityPolicy, QuestionPolicy};
impl CliArgs {
/// A helper method that calls `clap::Parser::parse`.
@ -23,7 +20,7 @@ impl CliArgs {
/// And:
/// 1. Make paths absolute.
/// 2. Checks the QuestionPolicy.
pub fn parse_and_validate_args() -> crate::Result<(Self, QuestionPolicy, FileVisibilityPolicy)> {
pub fn parse_args() -> crate::Result<(Self, QuestionPolicy, FileVisibilityPolicy)> {
let mut args = Self::parse();
set_accessible(args.accessible);
@ -51,14 +48,5 @@ impl CliArgs {
}
fn canonicalize_files(files: &[impl AsRef<Path>]) -> io::Result<Vec<PathBuf>> {
files
.iter()
.map(|f| {
if is_path_stdin(f.as_ref()) || f.as_ref().is_symlink() {
Ok(f.as_ref().to_path_buf())
} else {
fs::canonicalize(f)
}
})
.collect()
files.iter().map(fs::canonicalize).collect()
}

View File

@ -5,12 +5,11 @@ use std::{
use fs_err as fs;
use super::warn_user_about_loading_sevenz_in_memory;
use crate::{
archive,
commands::warn_user_about_loading_zip_in_memory,
extension::{split_first_compression_format, CompressionFormat::*, Extension},
utils::{io::lock_and_flush_output_stdio, user_wants_to_continue, FileVisibilityPolicy},
utils::{user_wants_to_continue, FileVisibilityPolicy},
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
};
@ -31,7 +30,6 @@ pub fn compress_files(
output_file: fs::File,
output_path: &Path,
quiet: bool,
follow_symlinks: bool,
question_policy: QuestionPolicy,
file_visibility_policy: FileVisibilityPolicy,
level: Option<i16>,
@ -57,17 +55,12 @@ pub fn compress_files(
encoder,
level.map_or_else(Default::default, |l| bzip2::Compression::new((l as u32).clamp(1, 9))),
)),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(
// Use block size of 16 MiB
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
)
}
Lz4 => Box::new(lz4_flex::frame::FrameEncoder::new(encoder).auto_finish()),
Lz4 => Box::new(lzzzz::lz4f::WriteCompressor::new(
encoder,
lzzzz::lz4f::PreferencesBuilder::new()
.compression_level(level.map_or(1, |l| (l as i32).clamp(1, lzzzz::lz4f::CLEVEL_MAX)))
.build(),
)?),
Lzma => Box::new(xz2::write::XzEncoder::new(
encoder,
level.map_or(6, |l| (l as u32).clamp(0, 9)),
@ -80,23 +73,18 @@ pub fn compress_files(
.from_writer(encoder),
),
Zstd => {
let mut zstd_encoder = zstd::stream::write::Encoder::new(
let zstd_encoder = zstd::stream::write::Encoder::new(
encoder,
level.map_or(zstd::DEFAULT_COMPRESSION_LEVEL, |l| {
(l as i32).clamp(zstd::zstd_safe::min_c_level(), zstd::zstd_safe::max_c_level())
}),
)?;
// Use all available PHYSICAL cores for compression
zstd_encoder.multithread(num_cpus::get_physical() as u32)?;
Box::new(zstd_encoder.auto_finish())
);
// Safety:
// Encoder::new() can only fail if `level` is invalid, but the level
// is `clamp`ed and therefore guaranteed to be valid
Box::new(zstd_encoder.unwrap().auto_finish())
}
Brotli => {
let default_level = 11; // Same as brotli CLI, default to highest compression
let level = level.unwrap_or(default_level).clamp(0, 11) as u32;
let win_size = 22; // default to 2^22 = 4 MiB window size
Box::new(brotli::CompressorWriter::new(encoder, BUFFER_CAPACITY, level, win_size))
}
Tar | Zip | Rar | SevenZip => unreachable!(),
Tar | Zip => unreachable!(),
};
Ok(encoder)
};
@ -108,30 +96,20 @@ pub fn compress_files(
}
match first_format {
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
writer = chain_writer_encoder(&first_format, writer)?;
let mut reader = fs::File::open(&files[0])?;
let mut reader = fs::File::open(&files[0]).unwrap();
io::copy(&mut reader, &mut writer)?;
}
Tar => {
archive::tar::build_archive_from_paths(
&files,
output_path,
&mut writer,
file_visibility_policy,
quiet,
follow_symlinks,
)?;
archive::tar::build_archive_from_paths(&files, output_path, &mut writer, file_visibility_policy, quiet)?;
writer.flush()?;
}
Zip => {
if !formats.is_empty() {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_zip_in_memory();
if !user_wants_to_continue(output_path, question_policy, QuestionAction::Compression)? {
return Ok(false);
}
@ -145,35 +123,10 @@ pub fn compress_files(
&mut vec_buffer,
file_visibility_policy,
quiet,
follow_symlinks,
)?;
vec_buffer.rewind()?;
io::copy(&mut vec_buffer, &mut writer)?;
}
Rar => {
#[cfg(feature = "unrar")]
return Err(archive::rar::no_compression());
#[cfg(not(feature = "unrar"))]
return Err(archive::rar_stub::no_support());
}
SevenZip => {
if !formats.is_empty() {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_sevenz_in_memory();
if !user_wants_to_continue(output_path, question_policy, QuestionAction::Compression)? {
return Ok(false);
}
}
let mut vec_buffer = Cursor::new(vec![]);
archive::sevenz::compress_sevenz(&files, output_path, &mut vec_buffer, file_visibility_policy, quiet)?;
vec_buffer.rewind()?;
io::copy(&mut vec_buffer, &mut writer)?;
}
}
Ok(true)

View File

@ -6,50 +6,34 @@ use std::{
use fs_err as fs;
#[cfg(not(feature = "bzip3"))]
use crate::archive;
use crate::{
commands::{warn_user_about_loading_sevenz_in_memory, warn_user_about_loading_zip_in_memory},
commands::warn_user_about_loading_zip_in_memory,
extension::{
split_first_compression_format,
CompressionFormat::{self, *},
Extension,
},
utils::{
self,
io::lock_and_flush_output_stdio,
is_path_stdin,
logger::{info, info_accessible},
nice_directory_display, user_wants_to_continue,
},
info,
utils::{self, nice_directory_display, user_wants_to_continue},
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
};
trait ReadSeek: Read + io::Seek {}
impl<T: Read + io::Seek> ReadSeek for T {}
pub struct DecompressOptions<'a> {
pub input_file_path: &'a Path,
pub formats: Vec<Extension>,
pub output_dir: &'a Path,
pub output_file_path: PathBuf,
pub is_output_dir_provided: bool,
pub is_smart_unpack: bool,
pub question_policy: QuestionPolicy,
pub quiet: bool,
pub password: Option<&'a [u8]>,
pub remove: bool,
}
/// Decompress a file
///
/// File at input_file_path is opened for reading, example: "archive.tar.gz"
/// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
/// output_dir it's where the file will be decompressed to, this function assumes that the directory exists
/// output_file_path is only used when extracting single file formats, not archive formats like .tar or .zip
pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
assert!(options.output_dir.exists());
let input_is_stdin = is_path_stdin(options.input_file_path);
// Decompress a file
//
// File at input_file_path is opened for reading, example: "archive.tar.gz"
// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
// output_dir it's where the file will be decompressed to, this function assumes that the directory exists
// output_file_path is only used when extracting single file formats, not archive formats like .tar or .zip
pub fn decompress_file(
input_file_path: &Path,
formats: Vec<Extension>,
output_dir: &Path,
output_file_path: PathBuf,
question_policy: QuestionPolicy,
quiet: bool,
) -> crate::Result<()> {
assert!(output_dir.exists());
let reader = fs::File::open(input_file_path)?;
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
// from decoder chaining.
@ -61,24 +45,14 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
if let [Extension {
compression_formats: [Zip],
..
}] = options.formats.as_slice()
}] = formats.as_slice()
{
let mut vec = vec![];
let reader: Box<dyn ReadSeek> = if input_is_stdin {
warn_user_about_loading_zip_in_memory();
io::copy(&mut io::stdin(), &mut vec)?;
Box::new(io::Cursor::new(vec))
} else {
Box::new(fs::File::open(options.input_file_path)?)
};
let zip_archive = zip::ZipArchive::new(reader)?;
let files_unpacked = if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, options.password, options.quiet),
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
let files_unpacked = if let ControlFlow::Continue(files) = smart_unpack(
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, quiet),
output_dir,
&output_file_path,
question_policy,
)? {
files
} else {
@ -89,29 +63,17 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
// having a final status message is important especially in an accessibility context
// as screen readers may not read a commands exit code, making it hard to reason
// about whether the command succeeded without such a message
info_accessible(format!(
"Successfully decompressed archive in {} ({} files)",
nice_directory_display(options.output_dir),
info!(
accessible,
"Successfully decompressed archive in {} ({} files).",
nice_directory_display(output_dir),
files_unpacked
));
if !input_is_stdin && options.remove {
fs::remove_file(options.input_file_path)?;
info(format!(
"Removed input file {}",
nice_directory_display(options.input_file_path)
));
}
);
return Ok(());
}
// Will be used in decoder chaining
let reader: Box<dyn Read> = if input_is_stdin {
Box::new(io::stdin())
} else {
Box::new(fs::File::open(options.input_file_path)?)
};
let reader = BufReader::with_capacity(BUFFER_CAPACITY, reader);
let mut reader: Box<dyn Read> = Box::new(reader);
@ -120,38 +82,26 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
let decoder: Box<dyn Read> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(bzip3::read::Bz3Decoder::new(decoder)?)
}
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
Lz4 => Box::new(lzzzz::lz4f::ReadDecompressor::new(decoder)?),
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => decoder,
Tar | Zip => unreachable!(),
};
Ok(decoder)
};
let (first_extension, extensions) = split_first_compression_format(&options.formats);
let (first_extension, extensions) = split_first_compression_format(&formats);
for format in extensions.iter().rev() {
reader = chain_reader_decoder(format, reader)?;
}
let files_unpacked = match first_extension {
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
reader = chain_reader_decoder(&first_extension, reader)?;
let mut writer = match utils::ask_to_create_file(
&options.output_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
let mut writer = match utils::ask_to_create_file(&output_file_path, question_policy)? {
Some(file) => file,
None => return Ok(()),
};
@ -161,13 +111,11 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
1
}
Tar => {
if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| crate::archive::tar::unpack_archive(reader, output_dir, options.quiet),
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
if let ControlFlow::Continue(files) = smart_unpack(
|output_dir| crate::archive::tar::unpack_archive(reader, output_dir, quiet),
output_dir,
&output_file_path,
question_policy,
)? {
files
} else {
@ -175,17 +123,10 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
}
}
Zip => {
if options.formats.len() > 1 {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
if formats.len() > 1 {
warn_user_about_loading_zip_in_memory();
if !user_wants_to_continue(
options.input_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
if !user_wants_to_continue(input_file_path, question_policy, QuestionAction::Decompression)? {
return Ok(());
}
}
@ -194,91 +135,11 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
io::copy(&mut reader, &mut vec)?;
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| {
crate::archive::zip::unpack_archive(zip_archive, output_dir, options.password, options.quiet)
},
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
return Ok(());
}
}
#[cfg(feature = "unrar")]
Rar => {
type UnpackResult = crate::Result<usize>;
let unpack_fn: Box<dyn FnOnce(&Path) -> UnpackResult> = if options.formats.len() > 1 || input_is_stdin {
let mut temp_file = tempfile::NamedTempFile::new()?;
io::copy(&mut reader, &mut temp_file)?;
Box::new(move |output_dir| {
crate::archive::rar::unpack_archive(temp_file.path(), output_dir, options.password, options.quiet)
})
} else {
Box::new(|output_dir| {
crate::archive::rar::unpack_archive(
options.input_file_path,
if let ControlFlow::Continue(files) = smart_unpack(
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, quiet),
output_dir,
options.password,
options.quiet,
)
})
};
if let ControlFlow::Continue(files) = execute_decompression(
unpack_fn,
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
)? {
files
} else {
return Ok(());
}
}
#[cfg(not(feature = "unrar"))]
Rar => {
return Err(crate::archive::rar_stub::no_support());
}
SevenZip => {
if options.formats.len() > 1 {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_sevenz_in_memory();
if !user_wants_to_continue(
options.input_file_path,
options.question_policy,
QuestionAction::Decompression,
)? {
return Ok(());
}
}
let mut vec = vec![];
io::copy(&mut reader, &mut vec)?;
if let ControlFlow::Continue(files) = execute_decompression(
|output_dir| {
crate::archive::sevenz::decompress_sevenz(
io::Cursor::new(vec),
output_dir,
options.password,
options.quiet,
)
},
options.output_dir,
&options.output_file_path,
options.question_policy,
options.is_output_dir_provided,
options.is_smart_unpack,
&output_file_path,
question_policy,
)? {
files
} else {
@ -291,78 +152,20 @@ pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
// having a final status message is important especially in an accessibility context
// as screen readers may not read a commands exit code, making it hard to reason
// about whether the command succeeded without such a message
info_accessible(format!(
"Successfully decompressed archive in {}",
nice_directory_display(options.output_dir)
));
info_accessible(format!("Files unpacked: {}", files_unpacked));
if !input_is_stdin && options.remove {
fs::remove_file(options.input_file_path)?;
info(format!(
"Removed input file {}",
nice_directory_display(options.input_file_path)
));
}
info!(
accessible,
"Successfully decompressed archive in {}.",
nice_directory_display(output_dir)
);
info!(accessible, "Files unpacked: {}", files_unpacked);
Ok(())
}
fn execute_decompression(
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
output_dir: &Path,
output_file_path: &Path,
question_policy: QuestionPolicy,
is_output_dir_provided: bool,
is_smart_unpack: bool,
) -> crate::Result<ControlFlow<(), usize>> {
if is_smart_unpack {
return smart_unpack(unpack_fn, output_dir, output_file_path, question_policy);
}
let target_output_dir = if is_output_dir_provided {
output_dir
} else {
output_file_path
};
unpack(unpack_fn, target_output_dir, question_policy)
}
/// Unpacks an archive creating the output directory, this function will create the output_dir
/// directory or replace it if it already exists. The `output_dir` needs to be empty
/// - If `output_dir` does not exist OR is a empty directory, it will unpack there
/// - If `output_dir` exist OR is a directory not empty, the user will be asked what to do
fn unpack(
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
output_dir: &Path,
question_policy: QuestionPolicy,
) -> crate::Result<ControlFlow<(), usize>> {
let is_valid_output_dir = !output_dir.exists() || (output_dir.is_dir() && output_dir.read_dir()?.next().is_none());
let output_dir_cleaned = if is_valid_output_dir {
output_dir.to_owned()
} else {
match utils::resolve_path_conflict(output_dir, question_policy, QuestionAction::Decompression)? {
Some(path) => path,
None => return Ok(ControlFlow::Break(())),
}
};
if !output_dir_cleaned.exists() {
fs::create_dir(&output_dir_cleaned)?;
}
let files = unpack_fn(&output_dir_cleaned)?;
Ok(ControlFlow::Continue(files))
}
/// Unpacks an archive with some heuristics
/// - If the archive contains only one file, it will be extracted to the `output_dir`
/// - If the archive contains multiple files, it will be extracted to a subdirectory of the
/// output_dir named after the archive (given by `output_file_path`)
///
/// Note: This functions assumes that `output_dir` exists
fn smart_unpack(
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
@ -371,19 +174,18 @@ fn smart_unpack(
question_policy: QuestionPolicy,
) -> crate::Result<ControlFlow<(), usize>> {
assert!(output_dir.exists());
let temp_dir = tempfile::Builder::new().prefix("tmp-ouch-").tempdir_in(output_dir)?;
let temp_dir = tempfile::tempdir_in(output_dir)?;
let temp_dir_path = temp_dir.path();
info_accessible(format!(
"Created temporary directory {} to hold decompressed elements",
info!(
accessible,
"Created temporary directory {} to hold decompressed elements.",
nice_directory_display(temp_dir_path)
));
);
let files = unpack_fn(temp_dir_path)?;
let root_contains_only_one_element = fs::read_dir(temp_dir_path)?.take(2).count() == 1;
let (previous_path, mut new_path) = if root_contains_only_one_element {
let root_contains_only_one_element = fs::read_dir(temp_dir_path)?.count() == 1;
if root_contains_only_one_element {
// Only one file in the root directory, so we can just move it to the output directory
let file = fs::read_dir(temp_dir_path)?.next().expect("item exists")?;
let file_path = file.path();
@ -391,26 +193,32 @@ fn smart_unpack(
.file_name()
.expect("Should be safe because paths in archives should not end with '..'");
let correct_path = output_dir.join(file_name);
(file_path, correct_path)
} else {
(temp_dir_path.to_owned(), output_file_path.to_owned())
};
// Before moving, need to check if a file with the same name already exists
// If it does, need to ask the user what to do
new_path = match utils::resolve_path_conflict(&new_path, question_policy, QuestionAction::Decompression)? {
Some(path) => path,
None => return Ok(ControlFlow::Break(())),
};
if !utils::clear_path(&correct_path, question_policy)? {
return Ok(ControlFlow::Break(()));
}
fs::rename(&file_path, &correct_path)?;
info!(
accessible,
"Successfully moved {} to {}.",
nice_directory_display(&file_path),
nice_directory_display(&correct_path)
);
} else {
// Multiple files in the root directory, so:
// Rename the temporary directory to the archive name, which is output_file_path
fs::rename(&previous_path, &new_path)?;
info_accessible(format!(
"Successfully moved \"{}\" to \"{}\"",
nice_directory_display(&previous_path),
nice_directory_display(&new_path),
));
// One case to handle tough is we need to check if a file with the same name already exists
if !utils::clear_path(output_file_path, question_policy)? {
return Ok(ControlFlow::Break(()));
}
fs::rename(temp_dir_path, output_file_path)?;
info!(
accessible,
"Successfully moved {} to {}.",
nice_directory_display(temp_dir_path),
nice_directory_display(output_file_path)
);
}
Ok(ControlFlow::Continue(files))
}

View File

@ -6,22 +6,20 @@ use std::{
use fs_err as fs;
use crate::{
archive,
commands::warn_user_about_loading_zip_in_memory,
extension::CompressionFormat::{self, *},
list::{self, FileInArchive, ListOptions},
utils::{io::lock_and_flush_output_stdio, user_wants_to_continue},
utils::user_wants_to_continue,
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
};
/// File at input_file_path is opened for reading, example: "archive.tar.gz"
/// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
// File at input_file_path is opened for reading, example: "archive.tar.gz"
// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
pub fn list_archive_contents(
archive_path: &Path,
formats: Vec<CompressionFormat>,
list_options: ListOptions,
question_policy: QuestionPolicy,
password: Option<&[u8]>,
) -> crate::Result<()> {
let reader = fs::File::open(archive_path)?;
@ -34,8 +32,9 @@ pub fn list_archive_contents(
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
if let &[Zip] = formats.as_slice() {
let zip_archive = zip::ZipArchive::new(reader)?;
let files = crate::archive::zip::list_archive(zip_archive, password);
let files = crate::archive::zip::list_archive(zip_archive);
list::list_files(archive_path, files, list_options)?;
return Ok(());
}
@ -45,46 +44,29 @@ pub fn list_archive_contents(
// Grab previous decoder and wrap it inside of a new one
let chain_reader_decoder =
|format: CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
|format: &CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
let decoder: Box<dyn Read + Send> = match format {
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
Bzip3 => {
#[cfg(not(feature = "bzip3"))]
return Err(archive::bzip3_stub::no_support());
#[cfg(feature = "bzip3")]
Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap())
}
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
Lz4 => Box::new(lzzzz::lz4f::ReadDecompressor::new(decoder)?),
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
Tar | Zip | Rar | SevenZip => unreachable!("should be treated by caller"),
Tar | Zip => unreachable!(),
};
Ok(decoder)
};
let mut misplaced_archive_format = None;
for &format in formats.iter().skip(1).rev() {
if format.archive_format() {
misplaced_archive_format = Some(format);
break;
}
for format in formats.iter().skip(1).rev() {
reader = chain_reader_decoder(format, reader)?;
}
let archive_format = misplaced_archive_format.unwrap_or(formats[0]);
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match archive_format {
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match formats[0] {
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
Zip => {
if formats.len() > 1 {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_zip_in_memory();
if !user_wants_to_continue(archive_path, question_policy, QuestionAction::Decompression)? {
return Ok(());
}
@ -94,43 +76,12 @@ pub fn list_archive_contents(
io::copy(&mut reader, &mut vec)?;
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
Box::new(crate::archive::zip::list_archive(zip_archive, password))
Box::new(crate::archive::zip::list_archive(zip_archive))
}
#[cfg(feature = "unrar")]
Rar => {
if formats.len() > 1 {
let mut temp_file = tempfile::NamedTempFile::new()?;
io::copy(&mut reader, &mut temp_file)?;
Box::new(crate::archive::rar::list_archive(temp_file.path(), password)?)
} else {
Box::new(crate::archive::rar::list_archive(archive_path, password)?)
}
}
#[cfg(not(feature = "unrar"))]
Rar => {
return Err(crate::archive::rar_stub::no_support());
}
SevenZip => {
if formats.len() > 1 {
// Locking necessary to guarantee that warning and question
// messages stay adjacent
let _locks = lock_and_flush_output_stdio();
warn_user_about_loading_zip_in_memory();
if !user_wants_to_continue(archive_path, question_policy, QuestionAction::Decompression)? {
return Ok(());
}
}
let mut vec = vec![];
io::copy(&mut reader, &mut vec)?;
Box::new(archive::sevenz::list_archive(io::Cursor::new(vec), password)?)
}
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
unreachable!("Not an archive, should be validated before calling this function.");
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
panic!("Not an archive! This should never happen, if it does, something is wrong with `CompressionFormat::is_archive()`. Please report this error!");
}
};
list::list_files(archive_path, files, list_options)
list::list_files(archive_path, files, list_options)?;
Ok(())
}

View File

@ -6,8 +6,6 @@ mod list;
use std::{ops::ControlFlow, path::PathBuf};
use bstr::ByteSlice;
use decompress::DecompressOptions;
use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator};
use utils::colors;
@ -16,33 +14,21 @@ use crate::{
cli::Subcommand,
commands::{compress::compress_files, decompress::decompress_file, list::list_archive_contents},
error::{Error, FinalError},
extension::{self, parse_format_flag},
extension::{self, parse_format},
info,
list::ListOptions,
utils::{
self, colors::*, is_path_stdin, logger::info_accessible, path_to_str, EscapedPathDisplay, FileVisibilityPolicy,
QuestionAction,
},
CliArgs, QuestionPolicy,
utils::{self, to_utf, EscapedPathDisplay, FileVisibilityPolicy},
warning, CliArgs, QuestionPolicy,
};
/// Warn the user that (de)compressing this .zip archive might freeze their system.
fn warn_user_about_loading_zip_in_memory() {
const ZIP_IN_MEMORY_LIMITATION_WARNING: &str = "\n \
The format '.zip' is limited by design and cannot be (de)compressed with encoding streams.\n \
When chaining '.zip' with other formats, all (de)compression needs to be done in-memory\n \
Careful, you might run out of RAM if the archive is too large!";
const ZIP_IN_MEMORY_LIMITATION_WARNING: &str = "\n\
\tThe format '.zip' is limited and cannot be (de)compressed using encoding streams.\n\
\tWhen using '.zip' with other formats, (de)compression must be done in-memory\n\
\tCareful, you might run out of RAM if the archive is too large!";
eprintln!("{}[WARNING]{}: {ZIP_IN_MEMORY_LIMITATION_WARNING}", *ORANGE, *RESET);
}
/// Warn the user that (de)compressing this .7z archive might freeze their system.
fn warn_user_about_loading_sevenz_in_memory() {
const SEVENZ_IN_MEMORY_LIMITATION_WARNING: &str = "\n \
The format '.7z' is limited by design and cannot be (de)compressed with encoding streams.\n \
When chaining '.7z' with other formats, all (de)compression needs to be done in-memory\n \
Careful, you might run out of RAM if the archive is too large!";
eprintln!("{}[WARNING]{}: {SEVENZ_IN_MEMORY_LIMITATION_WARNING}", *ORANGE, *RESET);
warning!("{}", ZIP_IN_MEMORY_LIMITATION_WARNING);
}
/// This function checks what command needs to be run and performs A LOT of ahead-of-time checks
@ -54,13 +40,6 @@ pub fn run(
question_policy: QuestionPolicy,
file_visibility_policy: FileVisibilityPolicy,
) -> crate::Result<()> {
if let Some(threads) = args.threads {
rayon::ThreadPoolBuilder::new()
.num_threads(threads)
.build_global()
.unwrap();
}
match args.cmd {
Subcommand::Compress {
files,
@ -68,7 +47,6 @@ pub fn run(
level,
fast,
slow,
follow_symlinks,
} => {
// After cleaning, if there are no input files left, exit
if files.is_empty() {
@ -78,10 +56,10 @@ pub fn run(
// Formats from path extension, like "file.tar.gz.xz" -> vec![Tar, Gzip, Lzma]
let (formats_from_flag, formats) = match args.format {
Some(formats) => {
let parsed_formats = parse_format_flag(&formats)?;
let parsed_formats = parse_format(&formats)?;
(Some(formats), parsed_formats)
}
None => (None, extension::extensions_from_path(&output_path)?),
None => (None, extension::extensions_from_path(&output_path)),
};
check::check_invalid_compression_with_non_archive_format(
@ -92,8 +70,7 @@ pub fn run(
)?;
check::check_archive_formats_position(&formats, &output_path)?;
let output_file =
match utils::ask_to_create_file(&output_path, question_policy, QuestionAction::Compression)? {
let output_file = match utils::ask_to_create_file(&output_path, question_policy)? {
Some(writer) => writer,
None => return Ok(()),
};
@ -112,7 +89,6 @@ pub fn run(
output_file,
&output_path,
args.quiet,
follow_symlinks,
question_policy,
file_visibility_policy,
level,
@ -123,7 +99,7 @@ pub fn run(
// having a final status message is important especially in an accessibility context
// as screen readers may not read a commands exit code, making it hard to reason
// about whether the command succeeded without such a message
info_accessible(format!("Successfully compressed '{}'", path_to_str(&output_path)));
info!(accessible, "Successfully compressed '{}'.", to_utf(&output_path));
} else {
// If Ok(false) or Err() occurred, delete incomplete file at `output_path`
//
@ -144,21 +120,15 @@ pub fn run(
}
}
compress_result.map(|_| ())
compress_result?;
}
Subcommand::Decompress {
files,
output_dir,
remove,
no_smart_unpack,
} => {
Subcommand::Decompress { files, output_dir } => {
let mut output_paths = vec![];
let mut formats = vec![];
if let Some(format) = args.format {
let format = parse_format_flag(&format)?;
let format = parse_format(&format)?;
for path in files.iter() {
// TODO: use Error::Custom
let file_name = path.file_name().ok_or_else(|| Error::NotFound {
error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)),
})?;
@ -167,22 +137,19 @@ pub fn run(
}
} else {
for path in files.iter() {
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path)?;
let (path, mut file_formats) = extension::separate_known_extensions_from_name(path);
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
return Ok(());
}
output_paths.push(pathbase);
output_paths.push(path);
formats.push(file_formats);
}
}
check::check_missing_formats_when_decompressing(&files, &formats)?;
let is_output_dir_provided = output_dir.is_some();
let is_smart_unpack = !is_output_dir_provided && !no_smart_unpack;
// The directory that will contain the output files
// We default to the current directory if the user didn't specify an output directory with --dir
let output_dir = if let Some(dir) = output_dir {
@ -197,39 +164,28 @@ pub fn run(
.zip(formats)
.zip(output_paths)
.try_for_each(|((input_path, formats), file_name)| {
// Path used by single file format archives
let output_file_path = if is_path_stdin(file_name) {
output_dir.join("stdin-output")
} else {
output_dir.join(file_name)
};
decompress_file(DecompressOptions {
input_file_path: input_path,
let output_file_path = output_dir.join(file_name); // Path used by single file format archives
decompress_file(
input_path,
formats,
is_output_dir_provided,
output_dir: &output_dir,
&output_dir,
output_file_path,
is_smart_unpack,
question_policy,
quiet: args.quiet,
password: args.password.as_deref().map(|str| {
<[u8] as ByteSlice>::from_os_str(str).expect("convert password to bytes failed")
}),
remove,
})
})
args.quiet,
)
})?;
}
Subcommand::List { archives: files, tree } => {
let mut formats = vec![];
if let Some(format) = args.format {
let format = parse_format_flag(&format)?;
let format = parse_format(&format)?;
for _ in 0..files.len() {
formats.push(format.clone());
}
} else {
for path in files.iter() {
let mut file_formats = extension::extensions_from_path(path)?;
let mut file_formats = extension::extensions_from_path(path);
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
return Ok(());
@ -249,18 +205,9 @@ pub fn run(
println!();
}
let formats = extension::flatten_compression_formats(&formats);
list_archive_contents(
archive_path,
formats,
list_options,
question_policy,
args.password
.as_deref()
.map(|str| <[u8] as ByteSlice>::from_os_str(str).expect("convert password to bytes failed")),
)?;
list_archive_contents(archive_path, formats, list_options, question_policy)?;
}
}
}
Ok(())
}
}
}

View File

@ -4,21 +4,15 @@
use std::{
borrow::Cow,
ffi::OsString,
fmt::{self, Display},
io,
};
use crate::{
accessible::is_running_in_accessible_mode,
extension::{PRETTY_SUPPORTED_ALIASES, PRETTY_SUPPORTED_EXTENSIONS},
utils::os_str_to_str,
};
use crate::{accessible::is_running_in_accessible_mode, utils::colors::*};
/// All errors that can be generated by `ouch`
#[derive(Debug, Clone)]
#[derive(Debug)]
pub enum Error {
/// An IoError that doesn't have a dedicated error variant
/// Not every IoError, some of them get filtered by `From<io::Error>` into other variants
IoError { reason: String },
/// From lzzzz::lz4f::Error
Lz4Error { reason: String },
@ -32,21 +26,14 @@ pub enum Error {
PermissionDenied { error_title: String },
/// From zip::result::ZipError::UnsupportedArchive
UnsupportedZipArchive(&'static str),
/// We don't support compressing the root folder.
/// TO BE REMOVED
CompressingRootFolder,
/// Specialized walkdir's io::Error wrapper with additional information on the error
WalkdirError { reason: String },
/// Custom and unique errors are reported in this variant
Custom { reason: FinalError },
/// Invalid format passed to `--format`
InvalidFormatFlag { text: OsString, reason: String },
/// From sevenz_rust::Error
SevenzipError { reason: String },
/// Recognised but unsupported format
// currently only RAR when built without the `unrar` feature
UnsupportedFormat { reason: String },
/// Invalid password provided
InvalidPassword { reason: String },
InvalidFormat { reason: String },
}
/// Alias to std's Result with ouch's Error
@ -68,8 +55,6 @@ pub struct FinalError {
impl Display for FinalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use crate::utils::colors::*;
// Title
//
// When in ACCESSIBLE mode, the square brackets are suppressed
@ -130,85 +115,59 @@ impl FinalError {
self.hints.push(hint.into());
self
}
/// Adds all supported formats as hints.
///
/// This is what it looks like:
/// ```
/// hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst
/// hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
/// ```
pub fn hint_all_supported_formats(self) -> Self {
self.hint(format!("Supported extensions are: {}", PRETTY_SUPPORTED_EXTENSIONS))
.hint(format!("Supported aliases are: {}", PRETTY_SUPPORTED_ALIASES))
}
}
impl From<Error> for FinalError {
fn from(err: Error) -> Self {
match err {
Error::WalkdirError { reason } => FinalError::with_title(reason),
Error::NotFound { error_title } => FinalError::with_title(error_title).detail("File not found"),
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let err = match self {
Error::WalkdirError { reason } => FinalError::with_title(reason.to_string()),
Error::NotFound { error_title } => FinalError::with_title(error_title.to_string()).detail("File not found"),
Error::CompressingRootFolder => {
FinalError::with_title("It seems you're trying to compress the root folder.")
.detail("This is unadvisable since ouch does compressions in-memory.")
.hint("Use a more appropriate tool for this, such as rsync.")
}
Error::IoError { reason } => FinalError::with_title(reason),
Error::Lz4Error { reason } => FinalError::with_title(reason),
Error::AlreadyExists { error_title } => FinalError::with_title(error_title).detail("File already exists"),
Error::InvalidZipArchive(reason) => FinalError::with_title("Invalid zip archive").detail(reason),
Error::PermissionDenied { error_title } => FinalError::with_title(error_title).detail("Permission denied"),
Error::UnsupportedZipArchive(reason) => FinalError::with_title("Unsupported zip archive").detail(reason),
Error::InvalidFormatFlag { reason, text } => {
FinalError::with_title(format!("Failed to parse `--format {}`", os_str_to_str(&text)))
.detail(reason)
.hint_all_supported_formats()
.hint("")
.hint("Examples:")
.hint(" --format tar")
.hint(" --format gz")
.hint(" --format tar.gz")
Error::IoError { reason } => FinalError::with_title(reason.to_string()),
Error::Lz4Error { reason } => FinalError::with_title(reason.to_string()),
Error::AlreadyExists { error_title } => {
FinalError::with_title(error_title.to_string()).detail("File already exists")
}
Error::InvalidZipArchive(reason) => FinalError::with_title("Invalid zip archive").detail(*reason),
Error::PermissionDenied { error_title } => {
FinalError::with_title(error_title.to_string()).detail("Permission denied")
}
Error::UnsupportedZipArchive(reason) => FinalError::with_title("Unsupported zip archive").detail(*reason),
Error::InvalidFormat { reason } => FinalError::with_title("Invalid archive format").detail(reason.clone()),
Error::Custom { reason } => reason.clone(),
Error::SevenzipError { reason } => FinalError::with_title("7z error").detail(reason),
Error::UnsupportedFormat { reason } => {
FinalError::with_title("Recognised but unsupported format").detail(reason.clone())
}
Error::InvalidPassword { reason } => FinalError::with_title("Invalid password").detail(reason.clone()),
}
}
}
};
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let err = FinalError::from(self.clone());
write!(f, "{err}")
}
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
let error_title = err.to_string();
match err.kind() {
io::ErrorKind::NotFound => Self::NotFound { error_title },
io::ErrorKind::PermissionDenied => Self::PermissionDenied { error_title },
io::ErrorKind::AlreadyExists => Self::AlreadyExists { error_title },
_other => Self::IoError { reason: error_title },
std::io::ErrorKind::NotFound => Self::NotFound {
error_title: err.to_string(),
},
std::io::ErrorKind::PermissionDenied => Self::PermissionDenied {
error_title: err.to_string(),
},
std::io::ErrorKind::AlreadyExists => Self::AlreadyExists {
error_title: err.to_string(),
},
_other => Self::IoError {
reason: err.to_string(),
},
}
}
}
#[cfg(feature = "bzip3")]
impl From<bzip3::Error> for Error {
fn from(err: bzip3::Error) -> Self {
use bzip3::Error as Bz3Error;
match err {
Bz3Error::Io(inner) => inner.into(),
Bz3Error::BlockSize | Bz3Error::ProcessBlock(_) | Bz3Error::InvalidSignature => {
FinalError::with_title("bzip3 error").detail(err.to_string()).into()
}
impl From<lzzzz::lz4f::Error> for Error {
fn from(err: lzzzz::lz4f::Error) -> Self {
Self::Lz4Error {
reason: err.to_string(),
}
}
}
@ -227,23 +186,6 @@ impl From<zip::result::ZipError> for Error {
}
}
#[cfg(feature = "unrar")]
impl From<unrar::error::UnrarError> for Error {
fn from(err: unrar::error::UnrarError) -> Self {
Self::Custom {
reason: FinalError::with_title("Unexpected error in rar archive").detail(format!("{:?}", err.code)),
}
}
}
impl From<sevenz_rust2::Error> for Error {
fn from(err: sevenz_rust2::Error) -> Self {
Self::SevenzipError {
reason: err.to_string(),
}
}
}
impl From<ignore::Error> for Error {
fn from(err: ignore::Error) -> Self {
Self::WalkdirError {

View File

@ -3,45 +3,12 @@
use std::{ffi::OsStr, fmt, path::Path};
use bstr::ByteSlice;
use CompressionFormat::*;
use crate::{
error::{Error, FinalError, Result},
utils::logger::warning,
};
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
"tar",
"zip",
"bz",
"bz2",
"gz",
"lz4",
"xz",
"lzma",
"sz",
"zst",
#[cfg(feature = "unrar")]
"rar",
"7z",
"br",
];
pub const SUPPORTED_ALIASES: &[&str] = &["tgz", "tbz", "tlz4", "txz", "tzlma", "tsz", "tzst"];
#[cfg(not(feature = "unrar"))]
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z";
#[cfg(feature = "unrar")]
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z";
pub const PRETTY_SUPPORTED_ALIASES: &str = "tgz, tbz, tlz4, txz, tzlma, tsz, tzst";
use self::CompressionFormat::*;
use crate::{error::Error, warning};
/// A wrapper around `CompressionFormat` that allows combinations like `tgz`
#[derive(Debug, Clone)]
// Keep `PartialEq` only for testing because two formats are the same even if
// their `display_text` does not match (beware of aliases)
#[cfg_attr(test, derive(PartialEq))]
// Should only be built with constructors
#[derive(Debug, Clone, Eq)]
#[non_exhaustive]
pub struct Extension {
/// One extension like "tgz" can be made of multiple CompressionFormats ([Tar, Gz])
@ -50,6 +17,13 @@ pub struct Extension {
display_text: String,
}
// The display_text should be ignored when comparing extensions
impl PartialEq for Extension {
fn eq(&self, other: &Self) -> bool {
self.compression_formats == other.compression_formats
}
}
impl Extension {
/// # Panics:
/// Will panic if `formats` is empty
@ -63,8 +37,8 @@ impl Extension {
/// Checks if the first format in `compression_formats` is an archive
pub fn is_archive(&self) -> bool {
// Index Safety: we check that `compression_formats` is not empty in `Self::new`
self.compression_formats[0].archive_format()
// Safety: we check that `compression_formats` is not empty in `Self::new`
self.compression_formats[0].is_archive_format()
}
}
@ -81,108 +55,85 @@ pub enum CompressionFormat {
Gzip,
/// .bz .bz2
Bzip,
/// .bz3
Bzip3,
/// .lz4
Lz4,
/// .xz .lzma
Lzma,
/// .sz
Snappy,
/// tar, tgz, tbz, tbz2, tbz3, txz, tlz4, tlzma, tsz, tzst
/// tar, tgz, tbz, tbz2, txz, tlz4, tlzma, tsz, tzst
Tar,
/// .zst
Zstd,
/// .zip
Zip,
// even if built without RAR support, we still want to recognise the format
/// .rar
Rar,
/// .7z
SevenZip,
/// .br
Brotli,
}
impl CompressionFormat {
/// Currently supported archive formats are .tar (and aliases to it) and .zip
pub fn archive_format(&self) -> bool {
fn is_archive_format(&self) -> bool {
// Keep this match like that without a wildcard `_` so we don't forget to update it
match self {
Tar | Zip | Rar | SevenZip => true,
Tar | Zip => true,
Gzip => false,
Bzip => false,
Bzip3 => false,
Lz4 => false,
Lzma => false,
Snappy => false,
Zstd => false,
Brotli => false,
}
}
}
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
"tar", "tgz", "tbz", "tlz4", "txz", "tzlma", "tsz", "tzst", "zip", "bz", "bz2", "gz", "lz4", "xz", "lzma", "sz",
"zst",
];
fn to_extension(ext: &[u8]) -> Option<Extension> {
Some(Extension::new(
match ext {
b"tar" => &[Tar],
b"tgz" => &[Tar, Gzip],
b"tbz" | b"tbz2" => &[Tar, Bzip],
b"tbz3" => &[Tar, Bzip3],
b"tlz4" => &[Tar, Lz4],
b"txz" | b"tlzma" => &[Tar, Lzma],
b"tsz" => &[Tar, Snappy],
b"tzst" => &[Tar, Zstd],
b"zip" => &[Zip],
b"bz" | b"bz2" => &[Bzip],
b"bz3" => &[Bzip3],
b"gz" => &[Gzip],
b"lz4" => &[Lz4],
b"xz" | b"lzma" => &[Lzma],
b"sz" => &[Snappy],
b"zst" => &[Zstd],
b"rar" => &[Rar],
b"7z" => &[SevenZip],
b"br" => &[Brotli],
_ => return None,
},
ext.to_str_lossy(),
))
}
fn split_extension_at_end(name: &[u8]) -> Option<(&[u8], Extension)> {
fn split_extension(name: &mut &[u8]) -> Option<Extension> {
let (new_name, ext) = name.rsplit_once_str(b".")?;
if matches!(new_name, b"" | b"." | b"..") {
return None;
}
let ext = to_extension(ext)?;
Some((new_name, ext))
*name = new_name;
Some(ext)
}
pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
let format = input.as_encoded_bytes();
let format = std::str::from_utf8(format).map_err(|_| Error::InvalidFormatFlag {
text: input.to_owned(),
reason: "Invalid UTF-8.".to_string(),
pub fn parse_format(fmt: &OsStr) -> crate::Result<Vec<Extension>> {
let fmt = <[u8] as ByteSlice>::from_os_str(fmt).ok_or_else(|| Error::InvalidFormat {
reason: "Invalid UTF-8".into(),
})?;
let extensions: Vec<Extension> = format
.split('.')
.filter(|extension| !extension.is_empty())
.map(|extension| {
to_extension(extension.as_bytes()).ok_or_else(|| Error::InvalidFormatFlag {
text: input.to_owned(),
reason: format!("Unsupported extension '{}'", extension),
})
})
.collect::<crate::Result<_>>()?;
if extensions.is_empty() {
return Err(Error::InvalidFormatFlag {
text: input.to_owned(),
reason: "Parsing got an empty list of extensions.".to_string(),
});
let mut extensions = Vec::new();
for extension in fmt.split_str(b".") {
let extension = to_extension(extension).ok_or_else(|| Error::InvalidFormat {
reason: format!("Unsupported extension: {}", extension.to_str_lossy()),
})?;
extensions.push(extension);
}
Ok(extensions)
@ -190,60 +141,36 @@ pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
/// Extracts extensions from a path.
///
/// Returns both the remaining path and the list of extension objects.
pub fn separate_known_extensions_from_name(path: &Path) -> Result<(&Path, Vec<Extension>)> {
/// Returns both the remaining path and the list of extension objects
pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension>) {
let mut extensions = vec![];
let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else {
return Ok((path, extensions));
return (path, extensions);
};
while let Some((new_name, extension)) = split_extension_at_end(name) {
name = new_name;
// While there is known extensions at the tail, grab them
while let Some(extension) = split_extension(&mut name) {
extensions.insert(0, extension);
if extensions[0].is_archive() {
if let Some((_, misplaced_extension)) = split_extension_at_end(name) {
let mut error = FinalError::with_title("File extensions are invalid for operation").detail(format!(
"The archive extension '.{}' can only be placed at the start of the extension list",
extensions[0].display_text,
));
if misplaced_extension.compression_formats == extensions[0].compression_formats {
error = error.detail(format!(
"File: '{path:?}' contains '.{}' and '.{}'",
misplaced_extension.display_text, extensions[0].display_text,
));
}
return Err(error
.hint("You can use `--format` to specify what format to use, examples:")
.hint(" ouch compress file.zip.zip file --format zip")
.hint(" ouch decompress file --format zst")
.hint(" ouch list archive --format tar.gz")
.into());
}
break;
}
}
if let Ok(name) = name.to_str() {
let file_stem = name.trim_matches('.');
if SUPPORTED_EXTENSIONS.contains(&file_stem) || SUPPORTED_ALIASES.contains(&file_stem) {
warning(format!(
"Received a file with name '{file_stem}', but {file_stem} was expected as the extension"
));
if SUPPORTED_EXTENSIONS.contains(&file_stem) {
warning!("Received a file with name '{file_stem}', but {file_stem} was expected as the extension.");
}
}
Ok((name.to_path().unwrap(), extensions))
(name.to_path().unwrap(), extensions)
}
/// Extracts extensions from a path, return only the list of extension objects
pub fn extensions_from_path(path: &Path) -> Result<Vec<Extension>> {
separate_known_extensions_from_name(path).map(|(_, extensions)| extensions)
pub fn extensions_from_path(path: &Path) -> Vec<Extension> {
let (_, extensions) = separate_known_extensions_from_name(path);
extensions
}
/// Panics if formats has an empty list of compression formats
// Panics if formats has an empty list of compression formats
pub fn split_first_compression_format(formats: &[Extension]) -> (CompressionFormat, Vec<CompressionFormat>) {
let mut extensions: Vec<CompressionFormat> = flatten_compression_formats(formats);
let first_extension = extensions.remove(0);
@ -281,7 +208,7 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
// If the extension we got is a supported extension, generate the suggestion
// at the position we found
if SUPPORTED_EXTENSIONS.contains(&maybe_extension) || SUPPORTED_ALIASES.contains(&maybe_extension) {
if SUPPORTED_EXTENSIONS.contains(&maybe_extension) {
let mut path = path.to_string();
path.insert_str(position_to_insert - 1, suggested_extension);
@ -294,81 +221,21 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
#[cfg(test)]
mod tests {
use std::path::Path;
use super::*;
#[test]
fn test_extensions_from_path() {
use CompressionFormat::*;
let path = Path::new("bolovo.tar.gz");
let extensions = extensions_from_path(path).unwrap();
let formats = flatten_compression_formats(&extensions);
let extensions: Vec<Extension> = extensions_from_path(path);
let formats: Vec<CompressionFormat> = flatten_compression_formats(&extensions);
assert_eq!(formats, vec![Tar, Gzip]);
}
#[test]
/// Test extension parsing for input/output files
fn test_separate_known_extensions_from_name() {
assert_eq!(
separate_known_extensions_from_name("file".as_ref()).unwrap(),
("file".as_ref(), vec![])
);
assert_eq!(
separate_known_extensions_from_name("tar".as_ref()).unwrap(),
("tar".as_ref(), vec![])
);
assert_eq!(
separate_known_extensions_from_name(".tar".as_ref()).unwrap(),
(".tar".as_ref(), vec![])
);
assert_eq!(
separate_known_extensions_from_name("file.tar".as_ref()).unwrap(),
("file".as_ref(), vec![Extension::new(&[Tar], "tar")])
);
assert_eq!(
separate_known_extensions_from_name("file.tar.gz".as_ref()).unwrap(),
(
"file".as_ref(),
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
)
);
assert_eq!(
separate_known_extensions_from_name(".tar.gz".as_ref()).unwrap(),
(".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")])
);
}
#[test]
/// Test extension parsing of `--format FORMAT`
fn test_parse_of_format_flag() {
assert_eq!(
parse_format_flag(OsStr::new("tar")).unwrap(),
vec![Extension::new(&[Tar], "tar")]
);
assert_eq!(
parse_format_flag(OsStr::new(".tar")).unwrap(),
vec![Extension::new(&[Tar], "tar")]
);
assert_eq!(
parse_format_flag(OsStr::new("tar.gz")).unwrap(),
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
);
assert_eq!(
parse_format_flag(OsStr::new(".tar.gz")).unwrap(),
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
);
assert_eq!(
parse_format_flag(OsStr::new("..tar..gz.....")).unwrap(),
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
);
assert!(parse_format_flag(OsStr::new("../tar.gz")).is_err());
assert!(parse_format_flag(OsStr::new("targz")).is_err());
assert!(parse_format_flag(OsStr::new("tar.gz.unknown")).is_err());
assert!(parse_format_flag(OsStr::new(".tar.gz.unknown")).is_err());
assert!(parse_format_flag(OsStr::new(".tar.!@#.gz")).is_err());
}
#[test]
fn builds_suggestion_correctly() {
assert_eq!(build_archive_file_suggestion(Path::new("linux.png"), ".tar"), None);
@ -389,10 +256,4 @@ mod tests {
"linux.pkg.info.tar.zst"
);
}
#[test]
fn test_extension_parsing_with_multiple_archive_formats() {
assert!(separate_known_extensions_from_name("file.tar.zip".as_ref()).is_err());
assert!(separate_known_extensions_from_name("file.7z.zst.zip.lz4".as_ref()).is_err());
}
}

View File

@ -1,7 +1,7 @@
//! Some implementation helpers related to the 'list' command.
use std::{
io::{stdout, BufWriter, Write},
io::{stdout, Write},
path::{Path, PathBuf},
};
@ -32,16 +32,16 @@ pub fn list_files(
files: impl IntoIterator<Item = crate::Result<FileInArchive>>,
list_options: ListOptions,
) -> crate::Result<()> {
let mut out = BufWriter::new(stdout().lock());
let out = &mut stdout().lock();
let _ = writeln!(out, "Archive: {}", EscapedPathDisplay::new(archive));
if list_options.tree {
let tree = files.into_iter().collect::<crate::Result<Tree>>()?;
tree.print(&mut out);
tree.print(out);
} else {
for file in files {
let FileInArchive { path, is_dir } = file?;
print_entry(&mut out, EscapedPathDisplay::new(&path), is_dir);
print_entry(out, EscapedPathDisplay::new(&path), is_dir);
}
}
Ok(())
@ -78,6 +78,7 @@ mod tree {
use std::{
ffi::{OsStr, OsString},
io::Write,
iter::FromIterator,
path,
};
@ -85,7 +86,7 @@ mod tree {
use linked_hash_map::LinkedHashMap;
use super::FileInArchive;
use crate::utils::{logger::warning, EscapedPathDisplay};
use crate::{utils::EscapedPathDisplay, warning};
/// Directory tree
#[derive(Debug, Default)]
@ -119,10 +120,10 @@ mod tree {
match &self.file {
None => self.file = Some(file),
Some(file) => {
warning(format!(
warning!(
"multiple files with the same name in a single directory ({})",
EscapedPathDisplay::new(&file.path),
));
);
}
}
}
@ -143,7 +144,7 @@ mod tree {
false => draw::FINAL_BRANCH,
};
let _ = write!(out, "{prefix}{final_part}");
print!("{prefix}{final_part}");
let is_dir = match self.file {
Some(FileInArchive { is_dir, .. }) => is_dir,
None => true,

58
src/macros.rs Normal file
View File

@ -0,0 +1,58 @@
//! Macros used on ouch.
/// Macro that prints \[INFO\] messages, wraps [`eprintln`].
///
/// There are essentially two different versions of the `info!()` macro:
/// - `info!(accessible, ...)` should only be used for short, important
/// information which is expected to be useful for e.g. blind users whose
/// text-to-speech systems read out every output line, which is why we
/// should reduce nonessential output to a minimum when running in
/// ACCESSIBLE mode
/// - `info!(inaccessible, ...)` can be used more carelessly / for less
/// important information. A seeing user can easily skim through more lines
/// of output, so e.g. reporting every single processed file can be helpful,
/// while it would generate long and hard to navigate text for blind people
/// who have to have each line of output read to them aloud, without to
/// ability to skip some lines deemed not important like a seeing person would.
#[macro_export]
macro_rules! info {
// Accessible (short/important) info message.
// Show info message even in ACCESSIBLE mode
(accessible, $($arg:tt)*) => {{
use $crate::utils::colors::{YELLOW, RESET};
if $crate::accessible::is_running_in_accessible_mode() {
eprint!("{}Info:{} ", *YELLOW, *RESET);
} else {
eprint!("{}[INFO]{} ", *YELLOW, *RESET);
}
eprintln!($($arg)*);
}};
// Inccessible (long/no important) info message.
// Print info message if ACCESSIBLE is not turned on
(inaccessible, $($arg:tt)*) => {{
use $crate::utils::colors::{YELLOW, RESET};
if !$crate::accessible::is_running_in_accessible_mode() {
eprint!("{}[INFO]{} ", *YELLOW, *RESET);
eprintln!($($arg)*);
}
}};
}
/// Macro that prints WARNING messages, wraps [`eprintln`].
#[macro_export]
macro_rules! warning {
($($arg:tt)*) => {{
use $crate::utils::colors::{ORANGE, RESET};
if $crate::accessible::is_running_in_accessible_mode() {
eprint!("{}Warning:{} ", *ORANGE, *RESET);
} else {
eprint!("{}[WARNING]{} ", *ORANGE, *RESET);
}
eprintln!($($arg)*);
}};
}

View File

@ -1,3 +1,6 @@
// Macros should be declared first
pub mod macros;
pub mod accessible;
pub mod archive;
pub mod check;
@ -11,15 +14,9 @@ pub mod utils;
use std::{env, path::PathBuf};
use cli::CliArgs;
use error::{Error, Result};
use once_cell::sync::Lazy;
use self::{
error::{Error, Result},
utils::{
logger::{shutdown_logger_and_wait, spawn_logger_thread},
QuestionAction, QuestionPolicy,
},
};
use utils::{QuestionAction, QuestionPolicy};
// Used in BufReader and BufWriter to perform less syscalls
const BUFFER_CAPACITY: usize = 1024 * 32;
@ -31,17 +28,13 @@ static CURRENT_DIRECTORY: Lazy<PathBuf> = Lazy::new(|| env::current_dir().unwrap
pub const EXIT_FAILURE: i32 = libc::EXIT_FAILURE;
fn main() {
spawn_logger_thread();
let result = run();
shutdown_logger_and_wait();
if let Err(err) = result {
if let Err(err) = run() {
eprintln!("{err}");
std::process::exit(EXIT_FAILURE);
}
}
fn run() -> Result<()> {
let (args, skip_questions_positively, file_visibility_policy) = CliArgs::parse_and_validate_args()?;
let (args, skip_questions_positively, file_visibility_policy) = CliArgs::parse_args()?;
commands::run(args, skip_questions_positively, file_visibility_policy)
}

View File

@ -69,18 +69,11 @@ impl FileVisibilityPolicy {
/// Walks through a directory using [`ignore::Walk`]
pub fn build_walker(&self, path: impl AsRef<Path>) -> ignore::Walk {
let mut builder = ignore::WalkBuilder::new(path);
builder
ignore::WalkBuilder::new(path)
.git_exclude(self.read_git_exclude)
.git_ignore(self.read_git_ignore)
.ignore(self.read_ignore)
.hidden(self.read_hidden);
if self.read_git_ignore {
builder.filter_entry(|p| p.path().file_name().is_some_and(|name| name != ".git"));
}
builder.build()
.hidden(self.read_hidden)
.build()
}
}

View File

@ -1,4 +1,4 @@
use std::{borrow::Cow, cmp, ffi::OsStr, fmt::Display, path::Path};
use std::{borrow::Cow, fmt::Display, path::Path};
use crate::CURRENT_DIRECTORY;
@ -45,11 +45,7 @@ impl Display for EscapedPathDisplay<'_> {
/// This is different from [`Path::display`].
///
/// See <https://gist.github.com/marcospb19/ebce5572be26397cf08bbd0fd3b65ac1> for a comparison.
pub fn path_to_str(path: &Path) -> Cow<str> {
os_str_to_str(path.as_ref())
}
pub fn os_str_to_str(os_str: &OsStr) -> Cow<str> {
pub fn to_utf(os_str: &Path) -> Cow<str> {
let format = || {
let text = format!("{os_str:?}");
Cow::Owned(text.trim_matches('"').to_string())
@ -69,15 +65,15 @@ pub fn strip_cur_dir(source_path: &Path) -> &Path {
/// Converts a slice of `AsRef<OsStr>` to comma separated String
///
/// Panics if the slice is empty.
pub fn pretty_format_list_of_paths(paths: &[impl AsRef<Path>]) -> String {
let mut iter = paths.iter().map(AsRef::as_ref);
pub fn pretty_format_list_of_paths(os_strs: &[impl AsRef<Path>]) -> String {
let mut iter = os_strs.iter().map(AsRef::as_ref);
let first_path = iter.next().unwrap();
let mut string = path_to_str(first_path).into_owned();
let first_element = iter.next().unwrap();
let mut string = to_utf(first_element).into_owned();
for path in iter {
for os_str in iter {
string += ", ";
string += &path_to_str(path);
string += &to_utf(os_str);
}
string
}
@ -87,84 +83,6 @@ pub fn nice_directory_display(path: &Path) -> Cow<str> {
if path == Path::new(".") {
Cow::Borrowed("current directory")
} else {
path_to_str(path)
}
}
/// Struct useful to printing bytes as kB, MB, GB, etc.
pub struct Bytes(f64);
impl Bytes {
const UNIT_PREFIXES: [&'static str; 6] = ["", "ki", "Mi", "Gi", "Ti", "Pi"];
/// Create a new Bytes.
pub fn new(bytes: u64) -> Self {
Self(bytes as f64)
}
}
impl std::fmt::Display for Bytes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let num = self.0;
debug_assert!(num >= 0.0);
if num < 1_f64 {
return write!(f, "{:>6.2} B", num);
}
let delimiter = 1000_f64;
let exponent = cmp::min((num.ln() / 6.90775).floor() as i32, 4);
write!(
f,
"{:>6.2} {:>2}B",
num / delimiter.powi(exponent),
Bytes::UNIT_PREFIXES[exponent as usize],
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_pretty_bytes_formatting() {
fn format_bytes(bytes: u64) -> String {
format!("{}", Bytes::new(bytes))
}
let b = 1;
let kb = b * 1000;
let mb = kb * 1000;
let gb = mb * 1000;
assert_eq!(" 0.00 B", format_bytes(0)); // This is weird
assert_eq!(" 1.00 B", format_bytes(b));
assert_eq!("999.00 B", format_bytes(b * 999));
assert_eq!(" 12.00 MiB", format_bytes(mb * 12));
assert_eq!("123.00 MiB", format_bytes(mb * 123));
assert_eq!(" 5.50 MiB", format_bytes(mb * 5 + kb * 500));
assert_eq!(" 7.54 GiB", format_bytes(gb * 7 + 540 * mb));
assert_eq!(" 1.20 TiB", format_bytes(gb * 1200));
// bytes
assert_eq!("234.00 B", format_bytes(234));
assert_eq!("999.00 B", format_bytes(999));
// kilobytes
assert_eq!(" 2.23 kiB", format_bytes(2234));
assert_eq!(" 62.50 kiB", format_bytes(62500));
assert_eq!("329.99 kiB", format_bytes(329990));
// megabytes
assert_eq!(" 2.75 MiB", format_bytes(2750000));
assert_eq!(" 55.00 MiB", format_bytes(55000000));
assert_eq!("987.65 MiB", format_bytes(987654321));
// gigabytes
assert_eq!(" 5.28 GiB", format_bytes(5280000000));
assert_eq!(" 95.20 GiB", format_bytes(95200000000));
assert_eq!("302.00 GiB", format_bytes(302000000000));
assert_eq!("302.99 GiB", format_bytes(302990000000));
// Weird aproximation cases:
assert_eq!("999.90 GiB", format_bytes(999900000000));
assert_eq!(" 1.00 TiB", format_bytes(999990000000));
to_utf(path)
}
}

View File

@ -8,45 +8,22 @@ use std::{
use fs_err as fs;
use super::{question::FileConflitOperation, user_wants_to_overwrite};
use crate::{
extension::Extension,
utils::{logger::info_accessible, EscapedPathDisplay, QuestionAction},
QuestionPolicy,
};
use super::user_wants_to_overwrite;
use crate::{extension::Extension, info, utils::EscapedPathDisplay, QuestionPolicy};
pub fn is_path_stdin(path: &Path) -> bool {
path.as_os_str() == "-"
}
/// Check if &Path exists, if it does then ask the user if they want to overwrite or rename it.
/// If the user want to overwrite then the file or directory will be removed and returned the same input path
/// If the user want to rename then nothing will be removed and a new path will be returned with a new name
/// Remove `path` asking the user to overwrite if necessary.
///
/// * `Ok(None)` means the user wants to cancel the operation
/// * `Ok(Some(path))` returns a valid PathBuf without any another file or directory with the same name
/// * `Ok(true)` means the path is clear,
/// * `Ok(false)` means the user doesn't want to overwrite
/// * `Err(_)` is an error
pub fn resolve_path_conflict(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> crate::Result<Option<PathBuf>> {
if path.exists() {
match user_wants_to_overwrite(path, question_policy, question_action)? {
FileConflitOperation::Cancel => Ok(None),
FileConflitOperation::Overwrite => {
pub fn clear_path(path: &Path, question_policy: QuestionPolicy) -> crate::Result<bool> {
if path.exists() && !user_wants_to_overwrite(path, question_policy)? {
return Ok(false);
}
remove_file_or_dir(path)?;
Ok(Some(path.to_path_buf()))
}
FileConflitOperation::Rename => {
let renamed_path = rename_for_available_filename(path);
Ok(Some(renamed_path))
}
FileConflitOperation::Merge => Ok(Some(path.to_path_buf())),
}
} else {
Ok(Some(path.to_path_buf()))
}
Ok(true)
}
pub fn remove_file_or_dir(path: &Path) -> crate::Result<()> {
@ -58,48 +35,13 @@ pub fn remove_file_or_dir(path: &Path) -> crate::Result<()> {
Ok(())
}
/// Create a new path renaming the "filename" from &Path for a available name in the same directory
pub fn rename_for_available_filename(path: &Path) -> PathBuf {
let mut renamed_path = rename_or_increment_filename(path);
while renamed_path.exists() {
renamed_path = rename_or_increment_filename(&renamed_path);
}
renamed_path
}
/// Create a new path renaming the "filename" from &Path to `filename_1`
/// if its name already ends with `_` and some number, then it increments the number
/// Example:
/// - `file.txt` -> `file_1.txt`
/// - `file_1.txt` -> `file_2.txt`
pub fn rename_or_increment_filename(path: &Path) -> PathBuf {
let parent = path.parent().unwrap_or_else(|| Path::new(""));
let filename = path.file_stem().and_then(|s| s.to_str()).unwrap_or("");
let extension = path.extension().and_then(|s| s.to_str()).unwrap_or("");
let new_filename = match filename.rsplit_once('_') {
Some((base, number_str)) if number_str.chars().all(char::is_numeric) => {
let number = number_str.parse::<u32>().unwrap_or(0);
format!("{}_{}", base, number + 1)
}
_ => format!("{}_1", filename),
};
let mut new_path = parent.join(new_filename);
if !extension.is_empty() {
new_path.set_extension(extension);
}
new_path
}
/// Creates a directory at the path, if there is nothing there.
pub fn create_dir_if_non_existent(path: &Path) -> crate::Result<()> {
if !path.exists() {
fs::create_dir_all(path)?;
// creating a directory is an important change to the file system we
// should always inform the user about
info_accessible(format!("Directory {} created", EscapedPathDisplay::new(path)));
info!(accessible, "directory {} created.", EscapedPathDisplay::new(path));
}
Ok(())
}
@ -132,9 +74,6 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
fn is_bz2(buf: &[u8]) -> bool {
buf.starts_with(&[0x42, 0x5A, 0x68])
}
fn is_bz3(buf: &[u8]) -> bool {
buf.starts_with(b"BZ3v1")
}
fn is_xz(buf: &[u8]) -> bool {
buf.starts_with(&[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])
}
@ -147,17 +86,6 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
fn is_zst(buf: &[u8]) -> bool {
buf.starts_with(&[0x28, 0xB5, 0x2F, 0xFD])
}
fn is_rar(buf: &[u8]) -> bool {
// ref https://www.rarlab.com/technote.htm#rarsign
// RAR 5.0 8 bytes length signature: 0x52 0x61 0x72 0x21 0x1A 0x07 0x01 0x00
// RAR 4.x 7 bytes length signature: 0x52 0x61 0x72 0x21 0x1A 0x07 0x00
buf.len() >= 7
&& buf.starts_with(&[0x52, 0x61, 0x72, 0x21, 0x1A, 0x07])
&& (buf[6] == 0x00 || (buf.len() >= 8 && buf[6..=7] == [0x01, 0x00]))
}
fn is_sevenz(buf: &[u8]) -> bool {
buf.starts_with(&[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
}
let buf = {
let mut buf = [0; 270];
@ -181,8 +109,6 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
Some(Extension::new(&[Gzip], "gz"))
} else if is_bz2(&buf) {
Some(Extension::new(&[Bzip], "bz2"))
} else if is_bz3(&buf) {
Some(Extension::new(&[Bzip3], "bz3"))
} else if is_xz(&buf) {
Some(Extension::new(&[Lzma], "xz"))
} else if is_lz4(&buf) {
@ -191,11 +117,16 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
Some(Extension::new(&[Snappy], "sz"))
} else if is_zst(&buf) {
Some(Extension::new(&[Zstd], "zst"))
} else if is_rar(&buf) {
Some(Extension::new(&[Rar], "rar"))
} else if is_sevenz(&buf) {
Some(Extension::new(&[SevenZip], "7z"))
} else {
None
}
}
/// Returns true if a path is a symlink.
/// This is the same as the nightly <https://doc.rust-lang.org/std/path/struct.Path.html#method.is_symlink>
/// Useful to detect broken symlinks when compressing. (So we can safely ignore them)
pub fn is_symlink(path: &Path) -> bool {
fs::symlink_metadata(path)
.map(|m| m.file_type().is_symlink())
.unwrap_or(false)
}

View File

@ -1,16 +0,0 @@
use std::io::{self, stderr, stdout, StderrLock, StdoutLock, Write};
use crate::utils::logger;
type StdioOutputLocks = (StdoutLock<'static>, StderrLock<'static>);
pub fn lock_and_flush_output_stdio() -> io::Result<StdioOutputLocks> {
logger::flush_messages();
let mut stdout = stdout().lock();
stdout.flush()?;
let mut stderr = stderr().lock();
stderr.flush()?;
Ok((stdout, stderr))
}

View File

@ -1,231 +0,0 @@
use std::{
sync::{mpsc, Arc, Barrier, OnceLock},
thread,
};
pub use logger_thread::spawn_logger_thread;
use super::colors::{ORANGE, RESET, YELLOW};
use crate::accessible::is_running_in_accessible_mode;
/// Asks logger to shutdown and waits till it flushes all pending messages.
#[track_caller]
pub fn shutdown_logger_and_wait() {
logger_thread::send_shutdown_command_and_wait();
}
/// Asks logger to flush all messages, useful before starting STDIN interaction.
#[track_caller]
pub fn flush_messages() {
logger_thread::send_flush_command_and_wait();
}
/// An `[INFO]` log to be displayed if we're not running accessibility mode.
///
/// Same as `.info_accessible()`, but only displayed if accessibility mode
/// is turned off, which is detected by the function
/// `is_running_in_accessible_mode`.
///
/// Read more about accessibility mode in `accessible.rs`.
#[track_caller]
pub fn info(contents: String) {
info_with_accessibility(contents, false);
}
/// An `[INFO]` log to be displayed.
///
/// Same as `.info()`, but also displays if `is_running_in_accessible_mode`
/// returns `true`.
///
/// Read more about accessibility mode in `accessible.rs`.
#[track_caller]
pub fn info_accessible(contents: String) {
info_with_accessibility(contents, true);
}
#[track_caller]
fn info_with_accessibility(contents: String, accessible: bool) {
logger_thread::send_print_command(PrintMessage {
contents,
accessible,
level: MessageLevel::Info,
});
}
#[track_caller]
pub fn warning(contents: String) {
logger_thread::send_print_command(PrintMessage {
contents,
// Warnings are important and unlikely to flood, so they should be displayed
accessible: true,
level: MessageLevel::Warning,
});
}
#[derive(Debug)]
enum LoggerCommand {
Print(PrintMessage),
Flush { finished_barrier: Arc<Barrier> },
FlushAndShutdown { finished_barrier: Arc<Barrier> },
}
/// Message object used for sending logs from worker threads to a logging thread via channels.
/// See <https://github.com/ouch-org/ouch/issues/643>
#[derive(Debug)]
struct PrintMessage {
contents: String,
accessible: bool,
level: MessageLevel,
}
impl PrintMessage {
fn to_formatted_message(&self) -> Option<String> {
match self.level {
MessageLevel::Info => {
if self.accessible {
if is_running_in_accessible_mode() {
Some(format!("{}Info:{} {}", *YELLOW, *RESET, self.contents))
} else {
Some(format!("{}[INFO]{} {}", *YELLOW, *RESET, self.contents))
}
} else if !is_running_in_accessible_mode() {
Some(format!("{}[INFO]{} {}", *YELLOW, *RESET, self.contents))
} else {
None
}
}
MessageLevel::Warning => {
if is_running_in_accessible_mode() {
Some(format!("{}Warning:{} {}", *ORANGE, *RESET, self.contents))
} else {
Some(format!("{}[WARNING]{} {}", *ORANGE, *RESET, self.contents))
}
}
}
}
}
#[derive(Debug, PartialEq)]
enum MessageLevel {
Info,
Warning,
}
mod logger_thread {
use std::{
sync::{mpsc::RecvTimeoutError, Arc, Barrier},
time::Duration,
};
use super::*;
type LogReceiver = mpsc::Receiver<LoggerCommand>;
type LogSender = mpsc::Sender<LoggerCommand>;
static SENDER: OnceLock<LogSender> = OnceLock::new();
#[track_caller]
fn setup_channel() -> Option<LogReceiver> {
let mut optional = None;
SENDER.get_or_init(|| {
let (tx, rx) = mpsc::channel();
optional = Some(rx);
tx
});
optional
}
#[track_caller]
fn get_sender() -> &'static LogSender {
SENDER.get().expect("No sender, you need to call `setup_channel` first")
}
#[track_caller]
pub(super) fn send_print_command(msg: PrintMessage) {
if cfg!(test) {
spawn_logger_thread();
}
get_sender()
.send(LoggerCommand::Print(msg))
.expect("Failed to send print command");
}
#[track_caller]
pub(super) fn send_flush_command_and_wait() {
let barrier = Arc::new(Barrier::new(2));
get_sender()
.send(LoggerCommand::Flush {
finished_barrier: barrier.clone(),
})
.expect("Failed to send flush command");
barrier.wait();
}
#[track_caller]
pub(super) fn send_shutdown_command_and_wait() {
let barrier = Arc::new(Barrier::new(2));
get_sender()
.send(LoggerCommand::FlushAndShutdown {
finished_barrier: barrier.clone(),
})
.expect("Failed to send shutdown command");
barrier.wait();
}
pub fn spawn_logger_thread() {
if let Some(log_receiver) = setup_channel() {
thread::spawn(move || run_logger(log_receiver));
}
}
fn run_logger(log_receiver: LogReceiver) {
const FLUSH_TIMEOUT: Duration = Duration::from_millis(200);
let mut buffer = Vec::<String>::with_capacity(16);
loop {
let msg = match log_receiver.recv_timeout(FLUSH_TIMEOUT) {
Ok(msg) => msg,
Err(RecvTimeoutError::Timeout) => {
flush_logs_to_stderr(&mut buffer);
continue;
}
Err(RecvTimeoutError::Disconnected) => unreachable!("sender is static"),
};
match msg {
LoggerCommand::Print(msg) => {
// Append message to buffer
if let Some(msg) = msg.to_formatted_message() {
buffer.push(msg);
}
if buffer.len() == buffer.capacity() {
flush_logs_to_stderr(&mut buffer);
}
}
LoggerCommand::Flush { finished_barrier } => {
flush_logs_to_stderr(&mut buffer);
finished_barrier.wait();
}
LoggerCommand::FlushAndShutdown { finished_barrier } => {
flush_logs_to_stderr(&mut buffer);
finished_barrier.wait();
return;
}
}
}
}
fn flush_logs_to_stderr(buffer: &mut Vec<String>) {
if !buffer.is_empty() {
let text = buffer.join("\n");
eprintln!("{text}");
buffer.clear();
}
}
}

View File

@ -7,26 +7,17 @@ pub mod colors;
mod file_visibility;
mod formatting;
mod fs;
pub mod io;
pub mod logger;
mod question;
pub use self::{
file_visibility::FileVisibilityPolicy,
formatting::{
nice_directory_display, os_str_to_str, path_to_str, pretty_format_list_of_paths, strip_cur_dir, Bytes,
EscapedPathDisplay,
},
fs::{
cd_into_same_dir_as, create_dir_if_non_existent, is_path_stdin, remove_file_or_dir,
rename_for_available_filename, resolve_path_conflict, try_infer_extension,
},
question::{
ask_to_create_file, user_wants_to_continue, user_wants_to_overwrite, FileConflitOperation, QuestionAction,
QuestionPolicy,
},
utf8::{get_invalid_utf8_paths, is_invalid_utf8},
pub use file_visibility::FileVisibilityPolicy;
pub use formatting::{nice_directory_display, pretty_format_list_of_paths, strip_cur_dir, to_utf, EscapedPathDisplay};
pub use fs::{
cd_into_same_dir_as, clear_path, create_dir_if_non_existent, is_symlink, remove_file_or_dir, try_infer_extension,
};
pub use question::{
ask_to_create_file, user_wants_to_continue, user_wants_to_overwrite, QuestionAction, QuestionPolicy,
};
pub use utf8::{get_invalid_utf8_paths, is_invalid_utf8};
mod utf8 {
use std::{ffi::OsStr, path::PathBuf};
@ -38,6 +29,9 @@ mod utf8 {
/// Filter out list of paths that are not utf8 valid
pub fn get_invalid_utf8_paths(paths: &[PathBuf]) -> Vec<&PathBuf> {
paths.iter().filter(|path| is_invalid_utf8(path)).collect()
paths
.iter()
.filter_map(|path| is_invalid_utf8(path).then_some(path))
.collect()
}
}

View File

@ -5,16 +5,17 @@
use std::{
borrow::Cow,
io::{stdin, BufRead, IsTerminal},
io::{self, Write},
path::Path,
};
use fs_err as fs;
use super::{strip_cur_dir, to_utf};
use crate::{
accessible::is_running_in_accessible_mode,
error::{Error, FinalError, Result},
utils::{self, colors, formatting::path_to_str, io::lock_and_flush_output_stdio, strip_cur_dir},
utils::{self, colors},
};
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
@ -37,91 +38,31 @@ pub enum QuestionAction {
Decompression,
}
#[derive(Default)]
/// Determines which action to do when there is a file conflict
pub enum FileConflitOperation {
#[default]
/// Cancel the operation
Cancel,
/// Overwrite the existing file with the new one
Overwrite,
/// Rename the file
/// It'll be put "_1" at the end of the filename or "_2","_3","_4".. if already exists
Rename,
/// Merge conflicting folders
Merge,
}
/// Check if QuestionPolicy flags were set, otherwise, ask user if they want to overwrite.
pub fn user_wants_to_overwrite(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> crate::Result<FileConflitOperation> {
use FileConflitOperation as Op;
pub fn user_wants_to_overwrite(path: &Path, question_policy: QuestionPolicy) -> crate::Result<bool> {
match question_policy {
QuestionPolicy::AlwaysYes => Ok(Op::Overwrite),
QuestionPolicy::AlwaysNo => Ok(Op::Cancel),
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action),
QuestionPolicy::AlwaysYes => Ok(true),
QuestionPolicy::AlwaysNo => Ok(false),
QuestionPolicy::Ask => {
let path = to_utf(strip_cur_dir(path));
let path = Some(&*path);
let placeholder = Some("FILE");
Confirmation::new("Do you want to overwrite 'FILE'?", placeholder).ask(path)
}
}
/// Ask the user if they want to overwrite or rename the &Path
pub fn ask_file_conflict_operation(path: &Path, question_action: QuestionAction) -> Result<FileConflitOperation> {
use FileConflitOperation as Op;
let path = path_to_str(strip_cur_dir(path));
match question_action {
QuestionAction::Compression => ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
],
)
.ask(),
QuestionAction::Decompression => ChoicePrompt::new(
format!("Do you want to overwrite {path}?"),
[
("yes", Op::Overwrite, *colors::GREEN),
("no", Op::Cancel, *colors::RED),
("rename", Op::Rename, *colors::BLUE),
("merge", Op::Merge, *colors::ORANGE),
],
)
.ask(),
}
}
/// Create the file if it doesn't exist and if it does then ask to overwrite it.
/// If the user doesn't want to overwrite then we return [`Ok(None)`]
pub fn ask_to_create_file(
path: &Path,
question_policy: QuestionPolicy,
question_action: QuestionAction,
) -> Result<Option<fs::File>> {
pub fn ask_to_create_file(path: &Path, question_policy: QuestionPolicy) -> Result<Option<fs::File>> {
match fs::OpenOptions::new().write(true).create_new(true).open(path) {
Ok(w) => Ok(Some(w)),
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {
let action = match question_policy {
QuestionPolicy::AlwaysYes => FileConflitOperation::Overwrite,
QuestionPolicy::AlwaysNo => FileConflitOperation::Cancel,
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action)?,
};
match action {
FileConflitOperation::Merge => Ok(Some(fs::File::create(path)?)),
FileConflitOperation::Overwrite => {
if user_wants_to_overwrite(path, question_policy)? {
utils::remove_file_or_dir(path)?;
Ok(Some(fs::File::create(path)?))
}
FileConflitOperation::Cancel => Ok(None),
FileConflitOperation::Rename => {
let renamed_file_path = utils::rename_for_available_filename(path);
Ok(Some(fs::File::create(renamed_file_path)?))
}
} else {
Ok(None)
}
}
Err(e) => Err(Error::from(e)),
@ -142,7 +83,7 @@ pub fn user_wants_to_continue(
QuestionAction::Compression => "compress",
QuestionAction::Decompression => "decompress",
};
let path = path_to_str(strip_cur_dir(path));
let path = to_utf(strip_cur_dir(path));
let path = Some(&*path);
let placeholder = Some("FILE");
Confirmation::new(&format!("Do you want to {action} 'FILE'?"), placeholder).ask(path)
@ -150,108 +91,6 @@ pub fn user_wants_to_continue(
}
}
/// Choise dialog for end user with [option1/option2/...] question.
/// Each option is a [Choice] entity, holding a value "T" returned when that option is selected
pub struct ChoicePrompt<'a, T: Default> {
/// The message to be displayed before the options
/// e.g.: "Do you want to overwrite 'FILE'?"
pub prompt: String,
pub choises: Vec<Choice<'a, T>>,
}
/// A single choice showed as a option to user in a [ChoicePrompt]
/// It holds a label and a color to display to user and a real value to be returned
pub struct Choice<'a, T: Default> {
label: &'a str,
value: T,
color: &'a str,
}
impl<'a, T: Default> ChoicePrompt<'a, T> {
/// Creates a new Confirmation.
pub fn new(prompt: impl Into<String>, choises: impl IntoIterator<Item = (&'a str, T, &'a str)>) -> Self {
Self {
prompt: prompt.into(),
choises: choises
.into_iter()
.map(|(label, value, color)| Choice { label, value, color })
.collect(),
}
}
/// Creates user message and receives a input to be compared with choises "label"
/// and returning the real value of the choise selected
pub fn ask(mut self) -> crate::Result<T> {
let message = self.prompt;
#[cfg(not(feature = "allow_piped_choice"))]
if !stdin().is_terminal() {
eprintln!("{}", message);
eprintln!("Pass --yes to proceed");
return Ok(T::default());
}
let _locks = lock_and_flush_output_stdio()?;
let mut stdin_lock = stdin().lock();
// Ask the same question to end while no valid answers are given
loop {
let choice_prompt = if is_running_in_accessible_mode() {
self.choises
.iter()
.map(|choise| format!("{}{}{}", choise.color, choise.label, *colors::RESET))
.collect::<Vec<_>>()
.join("/")
} else {
let choises = self
.choises
.iter()
.map(|choise| {
format!(
"{}{}{}",
choise.color,
choise
.label
.chars()
.nth(0)
.expect("dev error, should be reported, we checked this won't happen"),
*colors::RESET
)
})
.collect::<Vec<_>>()
.join("/");
format!("[{}]", choises)
};
eprintln!("{} {}", message, choice_prompt);
let mut answer = String::new();
let bytes_read = stdin_lock.read_line(&mut answer)?;
if bytes_read == 0 {
let error = FinalError::with_title("Unexpected EOF when asking question.")
.detail("When asking the user:")
.detail(format!(" \"{message}\""))
.detail("Expected one of the options as answer, but found EOF instead.")
.hint("If using Ouch in scripting, consider using `--yes` and `--no`.");
return Err(error.into());
}
answer.make_ascii_lowercase();
let answer = answer.trim();
let chosen_index = self.choises.iter().position(|choise| choise.label.starts_with(answer));
if let Some(i) = chosen_index {
return Ok(self.choises.remove(i).value);
}
}
}
}
/// Confirmation dialog for end user with [Y/n] question.
///
/// If the placeholder is found in the prompt text, it will be replaced to form the final message.
@ -282,20 +121,10 @@ impl<'a> Confirmation<'a> {
(Some(placeholder), Some(subs)) => Cow::Owned(self.prompt.replace(placeholder, subs)),
};
#[cfg(not(feature = "allow_piped_choice"))]
if !stdin().is_terminal() {
eprintln!("{}", message);
eprintln!("Pass --yes to proceed");
return Ok(false);
}
let _locks = lock_and_flush_output_stdio()?;
let mut stdin_lock = stdin().lock();
// Ask the same question to end while no valid answers are given
loop {
if is_running_in_accessible_mode() {
eprintln!(
print!(
"{} {}yes{}/{}no{}: ",
message,
*colors::GREEN,
@ -304,7 +133,7 @@ impl<'a> Confirmation<'a> {
*colors::RESET
);
} else {
eprintln!(
print!(
"{} [{}Y{}/{}n{}] ",
message,
*colors::GREEN,
@ -313,9 +142,10 @@ impl<'a> Confirmation<'a> {
*colors::RESET
);
}
io::stdout().flush()?;
let mut answer = String::new();
let bytes_read = stdin_lock.read_line(&mut answer)?;
let bytes_read = io::stdin().read_line(&mut answer)?;
if bytes_read == 0 {
let error = FinalError::with_title("Unexpected EOF when asking question.")

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -17,12 +17,11 @@ fn sanity_check_through_mime() {
write_random_content(test_file, &mut SmallRng::from_entropy());
let formats = [
"7z", "tar", "zip", "tar.gz", "tgz", "tbz", "tbz2", "txz", "tlzma", "tzst", "tar.bz", "tar.bz2", "tar.lzma",
"tar", "zip", "tar.gz", "tgz", "tbz", "tbz2", "txz", "tlzma", "tzst", "tar.bz", "tar.bz2", "tar.lzma",
"tar.xz", "tar.zst",
];
let expected_mimes = [
"application/x-7z-compressed",
"application/x-tar",
"application/zip",
"application/gzip",

View File

@ -1,13 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output\", dir)"
---
[ERROR] Cannot compress to 'output'.
- You shall supply the compression format
hint: Try adding supported extensions (see --help):
hint: ouch compress <FILES>... output.tar.gz
hint: ouch compress <FILES>... output.zip
hint:
hint: Alternatively, you can overwrite this option by using the '--format' flag:
hint: ouch compress <FILES>... output --format tar.gz

View File

@ -1,13 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress a\", dir)"
---
[ERROR] Cannot decompress files
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:
hint: ouch decompress <TMP_DIR>/a --format tar.gz

View File

@ -1,11 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress a b.unknown\", dir)"
---
[ERROR] Cannot decompress files
- Files with unsupported extensions: <TMP_DIR>/b.unknown
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst

View File

@ -1,13 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress b.unknown\", dir)"
---
[ERROR] Cannot decompress files
- Files with unsupported extensions: <TMP_DIR>/b.unknown
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:
hint: ouch decompress <TMP_DIR>/b.unknown --format tar.gz

View File

@ -1,13 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress a\", dir)"
---
[ERROR] Cannot decompress files
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:
hint: ouch decompress <TMP_DIR>/a --format tar.gz

View File

@ -1,11 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress a b.unknown\", dir)"
---
[ERROR] Cannot decompress files
- Files with unsupported extensions: <TMP_DIR>/b.unknown
- Files with missing extensions: <TMP_DIR>/a
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst

View File

@ -1,13 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress b.unknown\", dir)"
---
[ERROR] Cannot decompress files
- Files with unsupported extensions: <TMP_DIR>/b.unknown
- Decompression formats are detected automatically from file extension
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Alternatively, you can pass an extension to the '--format' flag:
hint: ouch decompress <TMP_DIR>/b.unknown --format tar.gz

View File

@ -1,14 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output --format tar.gz.unknown\", dir)"
---
[ERROR] Failed to parse `--format tar.gz.unknown`
- Unsupported extension 'unknown'
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:
hint: --format tar
hint: --format gz
hint: --format tar.gz

View File

@ -1,14 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output --format targz\", dir)"
---
[ERROR] Failed to parse `--format targz`
- Unsupported extension 'targz'
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:
hint: --format tar
hint: --format gz
hint: --format tar.gz

View File

@ -1,14 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output --format .tar.$#!@.rest\", dir)"
---
[ERROR] Failed to parse `--format .tar.$#!@.rest`
- Unsupported extension '$#!@'
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:
hint: --format tar
hint: --format gz
hint: --format tar.gz

View File

@ -1,14 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output --format tar.gz.unknown\", dir)"
---
[ERROR] Failed to parse `--format tar.gz.unknown`
- Unsupported extension 'unknown'
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:
hint: --format tar
hint: --format gz
hint: --format tar.gz

View File

@ -1,14 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output --format targz\", dir)"
---
[ERROR] Failed to parse `--format targz`
- Unsupported extension 'targz'
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:
hint: --format tar
hint: --format gz
hint: --format tar.gz

View File

@ -1,14 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output --format .tar.$#!@.rest\", dir)"
---
[ERROR] Failed to parse `--format .tar.$#!@.rest`
- Unsupported extension '$#!@'
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
hint:
hint: Examples:
hint: --format tar
hint: --format gz
hint: --format tar.gz

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress a b\", dir)"
---
[ERROR] failed to canonicalize path `a`
- File not found

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch list a b\", dir)"
---
[ERROR] failed to canonicalize path `a`
- File not found

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress a b\", dir)"
---
[ERROR] failed to canonicalize path `a`
- File not found

View File

@ -1,5 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output.gz\", dir)"
---
[INFO] Successfully compressed 'output.gz'

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output.zip\", dir)"
---
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output.zip'

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch decompress output.zst\", dir)"
---
[INFO] Successfully decompressed archive in current directory
[INFO] Files unpacked: 1

View File

@ -1,13 +0,0 @@
---
source: tests/ui.rs
expression: stdout_lines
---
{
"",
"[INFO] Files unpacked: 4",
"[INFO] Successfully decompressed archive in <TMP_DIR>/outputs",
"[INFO] extracted ( 0.00 B) \"outputs/inputs\"",
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input\"",
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input2\"",
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input3\"",
}

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output1 --format tar.gz\", dir)"
---
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output1'

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output2 --format .tar.gz\", dir)"
---
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output2'

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output1 --format tar.gz\", dir)"
---
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output1'

View File

@ -1,6 +0,0 @@
---
source: tests/ui.rs
expression: "run_ouch(\"ouch compress input output2 --format .tar.gz\", dir)"
---
[INFO] Compressing 'input'
[INFO] Successfully compressed 'output2'

View File

@ -1,27 +0,0 @@
---
source: tests/ui.rs
expression: "output_to_string(ouch!(\"-h\"))"
snapshot_kind: text
---
A command-line utility for easily compressing and decompressing files and directories.
Usage: <OUCH_BIN> [OPTIONS] <COMMAND>
Commands:
compress Compress one or more files into one output file [aliases: c]
decompress Decompresses one or more files, optionally into another folder [aliases: d]
list List contents of an archive [aliases: l, ls]
help Print this message or the help of the given subcommand(s)
Options:
-y, --yes Skip [Y/n] questions, default to yes
-n, --no Skip [Y/n] questions, default to no
-A, --accessible Activate accessibility mode, reducing visual noise [env: ACCESSIBLE=]
-H, --hidden Ignore hidden files
-q, --quiet Silence output
-g, --gitignore Ignore files matched by git's ignore files
-f, --format <FORMAT> Specify the format of the archive
-p, --password <PASSWORD> Decompress or list with password
-c, --threads <THREADS> Concurrent working threads
-h, --help Print help (see more with '--help')
-V, --version Print version

View File

@ -1,54 +0,0 @@
---
source: tests/ui.rs
expression: "output_to_string(ouch!(\"--help\"))"
snapshot_kind: text
---
A command-line utility for easily compressing and decompressing files and directories.
Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, bz3, lz4, sz (Snappy), zst, rar and br.
Repository: https://github.com/ouch-org/ouch
Usage: <OUCH_BIN> [OPTIONS] <COMMAND>
Commands:
compress Compress one or more files into one output file [aliases: c]
decompress Decompresses one or more files, optionally into another folder [aliases: d]
list List contents of an archive [aliases: l, ls]
help Print this message or the help of the given subcommand(s)
Options:
-y, --yes
Skip [Y/n] questions, default to yes
-n, --no
Skip [Y/n] questions, default to no
-A, --accessible
Activate accessibility mode, reducing visual noise
[env: ACCESSIBLE=]
-H, --hidden
Ignore hidden files
-q, --quiet
Silence output
-g, --gitignore
Ignore files matched by git's ignore files
-f, --format <FORMAT>
Specify the format of the archive
-p, --password <PASSWORD>
Decompress or list with password
-c, --threads <THREADS>
Concurrent working threads
-h, --help
Print help (see a summary with '-h')
-V, --version
Print version

View File

@ -1,187 +0,0 @@
/// Snapshot tests for Ouch's output.
///
/// See CONTRIBUTING.md for a brief guide on how to use [`insta`] for these tests.
/// [`insta`]: https://docs.rs/insta
#[macro_use]
mod utils;
use std::{collections::BTreeSet, ffi::OsStr, io, path::Path, process::Output};
use insta::assert_snapshot as ui;
use regex::Regex;
use crate::utils::create_files_in;
fn testdir() -> io::Result<(tempfile::TempDir, &'static Path)> {
let dir = tempfile::tempdir()?;
let path = dir.path().to_path_buf().into_boxed_path();
Ok((dir, Box::leak(path)))
}
fn run_ouch(argv: &str, dir: &Path) -> String {
let output = utils::cargo_bin()
.args(argv.split_whitespace().skip(1))
.current_dir(dir)
.output()
.unwrap_or_else(|err| {
panic!(
"Failed to run command\n\
argv: {argv}\n\
path: {dir:?}\n\
err: {err}"
)
});
redact_paths(&output_to_string(output), dir)
}
/// Remove random tempdir paths from snapshots to make them deterministic.
fn redact_paths(text: &str, dir: &Path) -> String {
let dir_name = dir.file_name().and_then(OsStr::to_str).unwrap();
// this regex should be good as long as the path does not contain whitespace characters
let re = Regex::new(&format!(r"\S*[/\\]{dir_name}[/\\]")).unwrap();
re.replace_all(text, "<TMP_DIR>/").into()
}
fn output_to_string(output: Output) -> String {
String::from_utf8(output.stdout).unwrap() + std::str::from_utf8(&output.stderr).unwrap()
}
#[test]
fn ui_test_err_compress_missing_extension() {
let (_dropper, dir) = testdir().unwrap();
// prepare
create_files_in(dir, &["input"]);
ui!(run_ouch("ouch compress input output", dir));
}
#[test]
fn ui_test_err_decompress_missing_extension() {
let (_dropper, dir) = testdir().unwrap();
create_files_in(dir, &["a", "b.unknown"]);
let snapshot = concat_snapshot_filename_rar_feature("ui_test_err_decompress_missing_extension");
ui!(format!("{snapshot}-1"), run_ouch("ouch decompress a", dir));
ui!(format!("{snapshot}-2"), run_ouch("ouch decompress a b.unknown", dir));
ui!(format!("{snapshot}-3"), run_ouch("ouch decompress b.unknown", dir));
}
#[test]
fn ui_test_err_missing_files() {
let (_dropper, dir) = testdir().unwrap();
ui!(run_ouch("ouch compress a b", dir));
ui!(run_ouch("ouch decompress a b", dir));
ui!(run_ouch("ouch list a b", dir));
}
#[test]
fn ui_test_err_format_flag() {
let (_dropper, dir) = testdir().unwrap();
// prepare
create_files_in(dir, &["input"]);
let snapshot = concat_snapshot_filename_rar_feature("ui_test_err_format_flag");
ui!(
format!("{snapshot}-1"),
run_ouch("ouch compress input output --format tar.gz.unknown", dir),
);
ui!(
format!("{snapshot}-2"),
run_ouch("ouch compress input output --format targz", dir),
);
ui!(
format!("{snapshot}-3"),
run_ouch("ouch compress input output --format .tar.$#!@.rest", dir),
);
}
#[test]
fn ui_test_ok_format_flag() {
let (_dropper, dir) = testdir().unwrap();
// prepare
create_files_in(dir, &["input"]);
let snapshot = concat_snapshot_filename_rar_feature("ui_test_ok_format_flag");
ui!(
format!("{snapshot}-1"),
run_ouch("ouch compress input output1 --format tar.gz", dir),
);
ui!(
format!("{snapshot}-2"),
run_ouch("ouch compress input output2 --format .tar.gz", dir),
);
}
#[test]
fn ui_test_ok_compress() {
let (_dropper, dir) = testdir().unwrap();
// prepare
create_files_in(dir, &["input"]);
ui!(run_ouch("ouch compress input output.zip", dir));
ui!(run_ouch("ouch compress input output.gz", dir));
}
#[test]
fn ui_test_ok_decompress() {
let (_dropper, dir) = testdir().unwrap();
// prepare
create_files_in(dir, &["input"]);
run_ouch("ouch compress input output.zst", dir);
ui!(run_ouch("ouch decompress output.zst", dir));
}
#[cfg(target_os = "linux")]
#[test]
fn ui_test_ok_decompress_multiple_files() {
let (_dropper, dir) = testdir().unwrap();
let inputs_dir = dir.join("inputs");
std::fs::create_dir(&inputs_dir).unwrap();
let outputs_dir = dir.join("outputs");
std::fs::create_dir(&outputs_dir).unwrap();
// prepare
create_files_in(&inputs_dir, &["input", "input2", "input3"]);
let compress_command = format!("ouch compress {} output.tar.zst", inputs_dir.to_str().unwrap());
run_ouch(&compress_command, dir);
let decompress_command = format!("ouch decompress output.tar.zst --dir {}", outputs_dir.to_str().unwrap());
let stdout = run_ouch(&decompress_command, dir);
let stdout_lines = stdout.split('\n').collect::<BTreeSet<_>>();
insta::assert_debug_snapshot!(stdout_lines);
}
#[test]
fn ui_test_usage_help_flag() {
insta::with_settings!({filters => vec![
// binary name is `ouch.exe` on Windows and `ouch` on everywhere else
(r"(Usage:.*\b)ouch(\.exe)?\b", "${1}<OUCH_BIN>"),
]}, {
ui!(output_to_string(ouch!("--help")));
ui!(output_to_string(ouch!("-h")));
});
}
/// Concatenates `with_rar` or `without_rar` if the feature is toggled or not.
fn concat_snapshot_filename_rar_feature(name: &str) -> String {
let suffix = if cfg!(feature = "unrar") {
"with_rar"
} else {
"without_rar"
};
format!("{name}_{suffix}")
}

View File

@ -1,24 +1,16 @@
// This warning is unavoidable when reusing testing utils.
#![allow(dead_code)]
use std::{
env,
io::Write,
path::{Path, PathBuf},
};
use std::{env, io::Write, path::PathBuf};
use assert_cmd::Command;
use fs_err as fs;
use rand::{Rng, RngCore};
/// Run ouch with the provided arguments, returns [`assert_cmd::Output`]
#[macro_export]
macro_rules! ouch {
($($e:expr),*) => {
$crate::utils::cargo_bin()
$(.arg($e))*
.arg("--yes")
.unwrap()
.unwrap();
}
}
@ -35,32 +27,16 @@ pub fn cargo_bin() -> Command {
.unwrap_or_else(|| Command::cargo_bin("ouch").expect("Failed to find ouch executable"))
}
/// Creates files in the specified directory.
///
/// ## Example
///
/// ```no_run
/// let (_dropper, dir) = testdir().unwrap();
/// create_files_in(dir, &["file1.txt", "file2.txt"]);
/// ```
pub fn create_files_in(dir: &Path, files: &[&str]) {
for f in files {
std::fs::File::create(dir.join(f)).unwrap();
}
}
/// Write random content to a file
// write random content to a file
pub fn write_random_content(file: &mut impl Write, rng: &mut impl RngCore) {
let mut data = vec![0; rng.gen_range(0..8192)];
let mut data = Vec::new();
data.resize(rng.gen_range(0..8192), 0);
rng.fill_bytes(&mut data);
file.write_all(&data).unwrap();
}
/// Check that two directories have the exact same content recursively.
/// Checks equility of file types if preserve_permissions is true, ignored on non-unix
// Silence clippy warning that triggers because of the `#[cfg(unix)]` on Windows.
#[allow(clippy::only_used_in_recursion)]
// check that two directories have the exact same content recursively
// checks equility of file types if preserve_permissions is true, ignored on non-unix
pub fn assert_same_directory(x: impl Into<PathBuf>, y: impl Into<PathBuf>, preserve_permissions: bool) {
fn read_dir(dir: impl Into<PathBuf>) -> impl Iterator<Item = fs::DirEntry> {
let mut dir: Vec<_> = fs::read_dir(dir).unwrap().map(|entry| entry.unwrap()).collect();
@ -88,7 +64,7 @@ pub fn assert_same_directory(x: impl Into<PathBuf>, y: impl Into<PathBuf>, prese
if ft_x.is_dir() && ft_y.is_dir() {
assert_same_directory(x.path(), y.path(), preserve_permissions);
} else if (ft_x.is_file() && ft_y.is_file()) || (ft_x.is_symlink() && ft_y.is_symlink()) {
} else if ft_x.is_file() && ft_y.is_file() {
assert_eq!(meta_x.len(), meta_y.len());
assert_eq!(fs::read(x.path()).unwrap(), fs::read(y.path()).unwrap());
} else {