mirror of
https://github.com/ouch-org/ouch.git
synced 2025-06-05 02:55:31 +00:00
Compare commits
436 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
11344a6ffd | ||
![]() |
da9b32a366 | ||
![]() |
c3b89b038d | ||
![]() |
c8f97197c3 | ||
![]() |
07967927dd | ||
![]() |
1ff1932e3d | ||
![]() |
c97bb6a2d6 | ||
![]() |
2b9da1e441 | ||
![]() |
add1793d75 | ||
![]() |
c3ff0e963f | ||
![]() |
0b122fa05c | ||
![]() |
739dfa9507 | ||
![]() |
4961a2c478 | ||
![]() |
ab5dd00b86 | ||
![]() |
267ce7672e | ||
![]() |
08416c7a22 | ||
![]() |
fdab666bf8 | ||
![]() |
b9b1e11303 | ||
![]() |
7b082b59c5 | ||
![]() |
21e7fdf3d6 | ||
![]() |
c584170a24 | ||
![]() |
c6cbf6e157 | ||
![]() |
61dab2af29 | ||
![]() |
3bf6aaa810 | ||
![]() |
8c8d00cab8 | ||
![]() |
35a3f3627c | ||
![]() |
081642724e | ||
![]() |
184bafc0fa | ||
![]() |
8c478a3f9d | ||
![]() |
bb22cbb738 | ||
![]() |
3258cbef5b | ||
![]() |
f65444d2fb | ||
![]() |
4f9a786e57 | ||
![]() |
82c551ddef | ||
![]() |
31dd9eb923 | ||
![]() |
27e727ced3 | ||
![]() |
ecc05cdd60 | ||
![]() |
58271ab77f | ||
![]() |
fadfe1a213 | ||
![]() |
f3b7c0277f | ||
![]() |
aeefa694bf | ||
![]() |
28d0933d6c | ||
![]() |
e1d7f1424a | ||
![]() |
77b01d170f | ||
![]() |
6b38e1dd46 | ||
![]() |
195483a182 | ||
![]() |
499ad77657 | ||
![]() |
5941afe66e | ||
![]() |
353c360f6f | ||
![]() |
62f3d78f44 | ||
![]() |
55aa05b631 | ||
![]() |
493213e393 | ||
![]() |
22d4e0faf0 | ||
![]() |
8c32d2c31a | ||
![]() |
e405690d35 | ||
![]() |
97b4608693 | ||
![]() |
162dfbd29f | ||
![]() |
7ea0625860 | ||
![]() |
2f7c7e8ff2 | ||
![]() |
3e890eb307 | ||
![]() |
cc530bea94 | ||
![]() |
df6d2cea98 | ||
![]() |
2eca233132 | ||
![]() |
e108e5b778 | ||
![]() |
223f82d538 | ||
![]() |
60d5897de1 | ||
![]() |
b21b757af1 | ||
![]() |
065124cd30 | ||
![]() |
4bb759b21c | ||
![]() |
5b78b96fa1 | ||
![]() |
639ef19fbc | ||
![]() |
a99aee6a42 | ||
![]() |
92059c3de4 | ||
![]() |
917355685c | ||
![]() |
17499d7b5d | ||
![]() |
269058089f | ||
![]() |
534d39c069 | ||
![]() |
48f83e75f7 | ||
![]() |
256fedbcc2 | ||
![]() |
40f1234ad0 | ||
![]() |
1c6fb9a0b3 | ||
![]() |
1d70a810e5 | ||
![]() |
ca31742394 | ||
![]() |
e7d6c5e0f5 | ||
![]() |
b65ee9c3f4 | ||
![]() |
730ccbcf2a | ||
![]() |
28060ded3a | ||
![]() |
011a29f208 | ||
![]() |
179b055545 | ||
![]() |
ae9f4e0151 | ||
![]() |
8edd8d2e1c | ||
![]() |
32b50e9c7a | ||
![]() |
ba9f9c00f3 | ||
![]() |
362418364f | ||
![]() |
896562d76d | ||
![]() |
d098fec5cf | ||
![]() |
d7427b25ab | ||
![]() |
49c8a079a6 | ||
![]() |
3f6368dbe1 | ||
![]() |
1876f5eef1 | ||
![]() |
be40b4439c | ||
![]() |
f3af70915d | ||
![]() |
7e30545e96 | ||
![]() |
e2151c93d5 | ||
![]() |
83f28cf64a | ||
![]() |
3d717ec3bc | ||
![]() |
09899389ec | ||
![]() |
5dac8431f2 | ||
![]() |
1a80b919e3 | ||
![]() |
a3d4f6ac6b | ||
![]() |
c3e37e22d1 | ||
![]() |
f8f1439ec5 | ||
![]() |
84f1ec4daf | ||
![]() |
56f69e19a3 | ||
![]() |
39cef75dfe | ||
![]() |
9e1c30bb86 | ||
![]() |
9c69fbd911 | ||
![]() |
7e830d9f53 | ||
![]() |
bafbd83bd7 | ||
![]() |
08ab63837e | ||
![]() |
67ca183a25 | ||
![]() |
61868dfb56 | ||
![]() |
309d165606 | ||
![]() |
2dad11d0ba | ||
![]() |
d4e252a732 | ||
![]() |
a241397d95 | ||
![]() |
9b092fef71 | ||
![]() |
8987992c21 | ||
![]() |
058be43cf4 | ||
![]() |
3392730d66 | ||
![]() |
d21db763f1 | ||
![]() |
75e16510df | ||
![]() |
512d2445b2 | ||
![]() |
4ac8e2ba91 | ||
![]() |
b8f78d2925 | ||
![]() |
b814599d77 | ||
![]() |
f2800c7b41 | ||
![]() |
dbde18668a | ||
![]() |
7b9ec7d49a | ||
![]() |
6f183fbe6a | ||
![]() |
b2d0ff75c8 | ||
![]() |
bee5ae05ae | ||
![]() |
61f96cab13 | ||
![]() |
a7fe78fc68 | ||
![]() |
77c1a4e9db | ||
![]() |
3867fa33e9 | ||
![]() |
01b919043c | ||
![]() |
0ec7d4489d | ||
![]() |
4a323aeba8 | ||
![]() |
b7ebcd3703 | ||
![]() |
e2dd3186ce | ||
![]() |
ee505878c1 | ||
![]() |
f02e7fff26 | ||
![]() |
4d660f678c | ||
![]() |
1e383d65cd | ||
![]() |
419abbf4d7 | ||
![]() |
0d6045eacd | ||
![]() |
fbebf549e9 | ||
![]() |
4994d00616 | ||
![]() |
e08631f4d5 | ||
![]() |
a14bbfc904 | ||
![]() |
a3e5bac438 | ||
![]() |
ccbdceac34 | ||
![]() |
88e9d9afc7 | ||
![]() |
62d70225ab | ||
![]() |
615a7d3c49 | ||
![]() |
c56978d1e7 | ||
![]() |
25d7d3d870 | ||
![]() |
792db000bb | ||
![]() |
0b760aadf7 | ||
![]() |
39395c797a | ||
![]() |
d0280f0579 | ||
![]() |
28daa9e8c4 | ||
![]() |
bed8ea0276 | ||
![]() |
1e56bb8f1f | ||
![]() |
b04122a6de | ||
![]() |
380893b6df | ||
![]() |
93752d5fb5 | ||
![]() |
e2ac5c4c9b | ||
![]() |
e989db7a3a | ||
![]() |
c2873f77d3 | ||
![]() |
ba65d6e965 | ||
![]() |
4d263c3e37 | ||
![]() |
f82d7a46b0 | ||
![]() |
30ac0a821e | ||
![]() |
43e0f09f36 | ||
![]() |
cb75a7cd20 | ||
![]() |
6f566b9c4d | ||
![]() |
e992da26be | ||
![]() |
bddd727d86 | ||
![]() |
3ca479c256 | ||
![]() |
5b0e0b6991 | ||
![]() |
c4f2f6fcb6 | ||
![]() |
8e43cd2afa | ||
![]() |
c894829c37 | ||
![]() |
f847040e69 | ||
![]() |
63ad6b419c | ||
![]() |
5954e98427 | ||
![]() |
2a60ca836a | ||
![]() |
aa77929e5c | ||
![]() |
ed1d758fc3 | ||
![]() |
589328d6a5 | ||
![]() |
4dec225126 | ||
![]() |
5bba632331 | ||
![]() |
fd887d59c5 | ||
![]() |
ab8fa28e30 | ||
![]() |
90b60b0556 | ||
![]() |
7dfa54e5b5 | ||
![]() |
2c345776fc | ||
![]() |
e05f707d87 | ||
![]() |
05f6cd975e | ||
![]() |
72b2bb5021 | ||
![]() |
b3a6363b10 | ||
![]() |
17e3f4a06c | ||
![]() |
be29d80388 | ||
![]() |
2913452de6 | ||
![]() |
753027326d | ||
![]() |
3b98726b69 | ||
![]() |
2c654d1960 | ||
![]() |
51baeed0d7 | ||
![]() |
7f45924233 | ||
![]() |
ddbc5ac8d6 | ||
![]() |
7c374c281f | ||
![]() |
6593b70ea3 | ||
![]() |
d08ee99608 | ||
![]() |
5c55130917 | ||
![]() |
b376f57865 | ||
![]() |
c1d6480be8 | ||
![]() |
49e2481d06 | ||
![]() |
43f983170e | ||
![]() |
8328be8ee0 | ||
![]() |
a59d1635a0 | ||
![]() |
07a4051d49 | ||
![]() |
31bd1598e9 | ||
![]() |
f97aff0aa8 | ||
![]() |
f3551214d0 | ||
![]() |
2f715413ad | ||
![]() |
083d95257d | ||
![]() |
f4364a0902 | ||
![]() |
88b60b423b | ||
![]() |
c004face1e | ||
![]() |
aede974999 | ||
![]() |
f24445ece1 | ||
![]() |
703cd66010 | ||
![]() |
572fb0e203 | ||
![]() |
5a35618625 | ||
![]() |
bdb28895ce | ||
![]() |
3e4895a39b | ||
![]() |
cbfd0fa1d6 | ||
![]() |
fa33d351fc | ||
![]() |
d9c3b05146 | ||
![]() |
c86ebe12ee | ||
![]() |
cf18296e12 | ||
![]() |
ade7dce740 | ||
![]() |
036e1207ef | ||
![]() |
0f4e1daa0e | ||
![]() |
a2232afdc1 | ||
![]() |
fa2d214fee | ||
![]() |
642552f75b | ||
![]() |
d5a8767705 | ||
![]() |
9c5262fdbf | ||
![]() |
f293d59892 | ||
![]() |
b68186282d | ||
![]() |
88eab748b3 | ||
![]() |
fdaaf9a3ec | ||
![]() |
e385f5f367 | ||
![]() |
1c30f51051 | ||
![]() |
21007e8ee1 | ||
![]() |
23936c3d2c | ||
![]() |
0231a50e90 | ||
![]() |
a77438ae1c | ||
![]() |
0d1e581aee | ||
![]() |
9efe7f4155 | ||
![]() |
796a03a41d | ||
![]() |
53df5f17de | ||
![]() |
a7394cddab | ||
![]() |
4314a3bbdd | ||
![]() |
e2796cef78 | ||
![]() |
9f82c9a655 | ||
![]() |
e09d82ee42 | ||
![]() |
be199d07a6 | ||
![]() |
dd3b8e852f | ||
![]() |
0dbbd3b882 | ||
![]() |
6aec0372ec | ||
![]() |
5fff79e4e2 | ||
![]() |
e6ea9f7238 | ||
![]() |
e81adf15f0 | ||
![]() |
63e71085d0 | ||
![]() |
29791461f4 | ||
![]() |
83b1131fda | ||
![]() |
f3db923d83 | ||
![]() |
5b11f9cbc0 | ||
![]() |
f44ad70b70 | ||
![]() |
a8a580e2e3 | ||
![]() |
a7a65d2510 | ||
![]() |
d07c65508a | ||
![]() |
8483739184 | ||
![]() |
69b1db4a35 | ||
![]() |
633d1dedd0 | ||
![]() |
97b4356aa8 | ||
![]() |
9a6d73bf57 | ||
![]() |
8023a9157e | ||
![]() |
4bea6af526 | ||
![]() |
db0bc8a7d9 | ||
![]() |
d0d227b234 | ||
![]() |
30d269fc8f | ||
![]() |
2ebdaf6ecf | ||
![]() |
f2b984bc7a | ||
![]() |
76a459e02e | ||
![]() |
d825feaeff | ||
![]() |
862181ae0a | ||
![]() |
77c968fb2d | ||
![]() |
ceed0156a0 | ||
![]() |
9e6a9cf40b | ||
![]() |
d4581090d8 | ||
![]() |
3cb2367793 | ||
![]() |
f794f58328 | ||
![]() |
36c2b7fe31 | ||
![]() |
2ecf146a69 | ||
![]() |
1aba1a2dfa | ||
![]() |
dade163243 | ||
![]() |
a3dca85cdd | ||
![]() |
4c616ced10 | ||
![]() |
87b025bef2 | ||
![]() |
48af246562 | ||
![]() |
8ae68573ac | ||
![]() |
2fc4136ec7 | ||
![]() |
37ce4eb71f | ||
![]() |
0e829fd246 | ||
![]() |
197b584e79 | ||
![]() |
d190788c28 | ||
![]() |
822fe58cc6 | ||
![]() |
cbd9eef9a0 | ||
![]() |
be8e2be482 | ||
![]() |
6335f0ef66 | ||
![]() |
4459d651a0 | ||
![]() |
3bb54c8010 | ||
![]() |
3cc402bf4e | ||
![]() |
e1a232d9ef | ||
![]() |
385b99edb8 | ||
![]() |
fe3212b9ee | ||
![]() |
f07283c764 | ||
![]() |
dc21932102 | ||
![]() |
a81cc6e461 | ||
![]() |
0dfded3457 | ||
![]() |
9faad52c63 | ||
![]() |
7275df5337 | ||
![]() |
6005b3146b | ||
![]() |
79d15cdfae | ||
![]() |
202f8350ad | ||
![]() |
349fd3c2bf | ||
![]() |
85edb521d8 | ||
![]() |
cdad75e431 | ||
![]() |
6c2e1b6d82 | ||
![]() |
74ea2894bf | ||
![]() |
d701f73fa5 | ||
![]() |
ceaa8b876c | ||
![]() |
14626a9278 | ||
![]() |
c10f5db8f0 | ||
![]() |
5afd95b2db | ||
![]() |
2aad3fac0c | ||
![]() |
2d141c47a1 | ||
![]() |
91eae7ea3a | ||
![]() |
b7da9b02ac | ||
![]() |
86524ae6b6 | ||
![]() |
bab8023594 | ||
![]() |
156b3288ab | ||
![]() |
00c8dbf773 | ||
![]() |
a26d3d34ce | ||
![]() |
bc1d9457f0 | ||
![]() |
af7e95ae98 | ||
![]() |
192eaca5dc | ||
![]() |
c7f69194e8 | ||
![]() |
dd51525c6c | ||
![]() |
0151238c87 | ||
![]() |
4703f1b821 | ||
![]() |
c7d48297fc | ||
![]() |
6d4e8beb71 | ||
![]() |
864c268607 | ||
![]() |
d2e80a8b89 | ||
![]() |
b03a979335 | ||
![]() |
09a75e8a93 | ||
![]() |
10d530d236 | ||
![]() |
7c6989de0e | ||
![]() |
c0e053136f | ||
![]() |
709f100451 | ||
![]() |
1f49a812f4 | ||
![]() |
0c3f3bbaab | ||
![]() |
9507c4d93a | ||
![]() |
3706f0dcb1 | ||
![]() |
c32cbd8c7b | ||
![]() |
cbd327aad7 | ||
![]() |
674816ad5b | ||
![]() |
253edb5090 | ||
![]() |
693167e933 | ||
![]() |
b1c50556da | ||
![]() |
76116ed5ec | ||
![]() |
3541691731 | ||
![]() |
fe6fa928f9 | ||
![]() |
0a66d3215a | ||
![]() |
b6a0b94015 | ||
![]() |
c8e7a805f2 | ||
![]() |
5de4947a36 | ||
![]() |
cb4643eefd | ||
![]() |
bbc5d1d528 | ||
![]() |
fdcfe2f1fb | ||
![]() |
bb9d9bed5f | ||
![]() |
2174c803cd | ||
![]() |
c7fa9b6dbd | ||
![]() |
bff09e7588 | ||
![]() |
218429be48 | ||
![]() |
dbd0b235b8 | ||
![]() |
6e30b1490e | ||
![]() |
a93c2cf392 | ||
![]() |
bf40f1dc86 | ||
![]() |
7d449bb37f | ||
![]() |
8a831b2c99 | ||
![]() |
7d3f6e2d3a | ||
![]() |
73e4279197 | ||
![]() |
5cd63bfab7 | ||
![]() |
cd391f7cf9 | ||
![]() |
26f606c64d | ||
![]() |
29e01bc834 | ||
![]() |
3ee30609a1 | ||
![]() |
536fc1f963 | ||
![]() |
cd14b0038b | ||
![]() |
9ff7be7182 | ||
![]() |
fd170d5f3a | ||
![]() |
4f46608d6a | ||
![]() |
a1d6688a73 | ||
![]() |
9b295c5dd5 | ||
![]() |
51b02fe60a | ||
![]() |
cf89445226 | ||
![]() |
03d21253a4 | ||
![]() |
d6f012241f | ||
![]() |
c117517bbe |
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@ -1,12 +0,0 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@ -1,4 +1,6 @@
|
||||
<!--
|
||||
Make sure to check out CONTRIBUTING.md.
|
||||
Don't forget to add a CHANGELOG.md entry!
|
||||
If your code changes text output, you might need to update snapshots
|
||||
of UI tests, read more about `insta` at CONTRIBUTING.md.
|
||||
|
||||
Remember to edit `CHANGELOG.md` after opening the PR.
|
||||
-->
|
||||
|
17
.github/workflows/all-tests-slow.yml
vendored
Normal file
17
.github/workflows/all-tests-slow.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: Run tests for all combinations
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 1,15 * *" # biweekly
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
|
||||
jobs:
|
||||
run-tests-for-all-combinations:
|
||||
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
|
||||
with:
|
||||
matrix_all_combinations: true
|
||||
artifact_upload_mode: none
|
148
.github/workflows/build-and-test.yml
vendored
148
.github/workflows/build-and-test.yml
vendored
@ -1,148 +0,0 @@
|
||||
name: build-and-test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- "[0-9]+.[0-9]+.[0-9]+"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: build
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
CARGO: cargo
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
no-zstd-thin: true
|
||||
|
||||
- target: aarch64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
no-zstd-thin: true
|
||||
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
os: ubuntu-latest
|
||||
no-zstd-thin: true
|
||||
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
os: ubuntu-latest
|
||||
no-zstd-thin: true
|
||||
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
|
||||
- target: x86_64-pc-windows-gnu
|
||||
os: windows-latest
|
||||
no-zstd-thin: true
|
||||
ext: .exe
|
||||
|
||||
- target: x86_64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
ext: .exe
|
||||
|
||||
- target: aarch64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
ext: .exe
|
||||
skip-test: true
|
||||
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install cross (non-x86_64 linux)
|
||||
if: "!contains(matrix.target, 'x86_64') && runner.os == 'Linux'"
|
||||
run: |
|
||||
pushd "$(mktemp -d)"
|
||||
wget https://github.com/cross-rs/cross/releases/download/v0.2.4/cross-x86_64-unknown-linux-musl.tar.gz
|
||||
tar xf cross-x86_64-unknown-linux-musl.tar.gz
|
||||
cp cross ~/.cargo/bin
|
||||
popd
|
||||
echo CARGO=cross >> $GITHUB_ENV
|
||||
|
||||
- name: Install dependencies (x86_64-unknown-linux-musl)
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install musl-tools
|
||||
|
||||
- name: Set up extra cargo flags
|
||||
if: matrix.no-zstd-thin
|
||||
run: |
|
||||
echo "EXTRA_CARGO_FLAGS=--no-default-features --features flate2/zlib,zip/deflate-zlib" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust
|
||||
run: |
|
||||
rustup toolchain install stable nightly --profile minimal -t ${{ matrix.target }}
|
||||
|
||||
- name: Test on stable
|
||||
if: ${{ ! matrix.skip-test }}
|
||||
run: |
|
||||
${{ env.CARGO }} +stable test --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
|
||||
|
||||
- name: Release on nightly
|
||||
run: |
|
||||
${{ env.CARGO }} +nightly build --release --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
|
||||
env:
|
||||
OUCH_ARTIFACTS_FOLDER: artifacts
|
||||
RUSTFLAGS: -C strip=symbols
|
||||
|
||||
- name: Upload binary
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ouch-${{ matrix.target }}${{ matrix.ext }}
|
||||
path: target/${{ matrix.target }}/release/ouch${{ matrix.ext }}
|
||||
|
||||
- name: Upload artifacts (musl)
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: artifacts
|
||||
path: artifacts
|
||||
|
||||
clippy-rustfmt:
|
||||
name: clippy-rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: "Cargo: clippy, fmt"
|
||||
run: |
|
||||
rustup toolchain install stable --profile minimal -c clippy
|
||||
rustup toolchain install nightly --profile minimal -c rustfmt
|
||||
cargo +stable clippy -- -D warnings
|
||||
cargo +nightly fmt -- --check
|
||||
|
||||
github-release:
|
||||
name: github-release
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
needs: build
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Download artifacts
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Package release assets
|
||||
run: scripts/package-release-assets.sh
|
||||
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
files: release/ouch-*
|
162
.github/workflows/build-artifacts-and-run-tests.yml
vendored
Normal file
162
.github/workflows/build-artifacts-and-run-tests.yml
vendored
Normal file
@ -0,0 +1,162 @@
|
||||
# This is a reusable workflow
|
||||
|
||||
name: Build artifacts and run tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
matrix_all_combinations:
|
||||
description: "if matrix should have all combinations of targets and features"
|
||||
type: boolean
|
||||
required: true
|
||||
default: true
|
||||
artifact_upload_mode:
|
||||
description: "Control what artifacts to upload: 'none' for no uploads, 'with_default_features' to upload artifacts with default features (for releases), or 'all' for all feature combinations."
|
||||
type: choice
|
||||
options:
|
||||
- none
|
||||
- with_default_features
|
||||
- all
|
||||
required: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
matrix_all_combinations:
|
||||
description: "if matrix should have all combinations of targets and features"
|
||||
type: boolean
|
||||
required: true
|
||||
artifact_upload_mode:
|
||||
description: "Control which artifacts to upload: 'none' for no uploads, 'with_default_features' to upload only artifacts with default features (use_zlib+use_zstd_thin+unrar+bzip3), or 'all' to upload all feature combinations."
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build-artifacts-and-run-tests:
|
||||
runs-on: ${{ matrix.os || 'ubuntu-latest' }}
|
||||
env:
|
||||
CARGO: cargo
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# TODO: avoid exploding the matrix by removing unrar and bzip3 from the all combinations runs
|
||||
# I can add a monthly run with all combinations
|
||||
feature-unrar: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
|
||||
feature-bzip3: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[true]')}}
|
||||
feature-use-zlib: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
||||
feature-use-zstd-thin: ${{ inputs.matrix_all_combinations && fromJSON('[true, false]') || fromJSON('[false]')}}
|
||||
target:
|
||||
# native
|
||||
- x86_64-unknown-linux-gnu
|
||||
- x86_64-pc-windows-gnu
|
||||
- x86_64-pc-windows-msvc
|
||||
- aarch64-pc-windows-msvc
|
||||
- x86_64-apple-darwin
|
||||
# cross
|
||||
- x86_64-unknown-linux-musl
|
||||
- aarch64-unknown-linux-gnu
|
||||
- aarch64-unknown-linux-musl
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- armv7-unknown-linux-musleabihf
|
||||
|
||||
include:
|
||||
# runner overrides
|
||||
- target: x86_64-pc-windows-gnu
|
||||
os: windows-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
# targets that use cross
|
||||
- target: x86_64-unknown-linux-musl
|
||||
use-cross: true
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
use-cross: true
|
||||
- target: aarch64-unknown-linux-musl
|
||||
use-cross: true
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
use-cross: true
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
use-cross: true
|
||||
# features (unless `matrix_all_combinations` is true, we only run these on linux-gnu)
|
||||
- feature-unrar: false
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- feature-use-zlib: true
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- feature-use-zstd-thin: true
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- feature-bzip3: false
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install cross
|
||||
if: matrix.use-cross
|
||||
run: |
|
||||
pushd "$(mktemp -d)"
|
||||
wget https://github.com/cross-rs/cross/releases/download/v0.2.4/cross-x86_64-unknown-linux-musl.tar.gz
|
||||
tar xf cross-x86_64-unknown-linux-musl.tar.gz
|
||||
cp cross ~/.cargo/bin
|
||||
popd
|
||||
echo CARGO=cross >> $GITHUB_ENV
|
||||
|
||||
- name: Concatenate features
|
||||
id: concat-features
|
||||
shell: bash
|
||||
run: |
|
||||
FEATURES=(allow_piped_choice)
|
||||
if [[ "${{ matrix.feature-unrar }}" == true ]]; then FEATURES+=(unrar); fi
|
||||
if [[ "${{ matrix.feature-use-zlib }}" == true ]]; then FEATURES+=(use_zlib); fi
|
||||
if [[ "${{ matrix.feature-use-zstd-thin }}" == true ]]; then FEATURES+=(use_zstd_thin); fi
|
||||
if [[ "${{ matrix.feature-bzip3 }}" == true ]]; then FEATURES+=(bzip3); fi
|
||||
# Output plus-separated list for artifact names
|
||||
IFS='+'
|
||||
echo "FEATURES_PLUS=${FEATURES[*]}" >> $GITHUB_OUTPUT
|
||||
# Output comma-separated list for cargo flags
|
||||
IFS=','
|
||||
echo "FEATURES_COMMA=${FEATURES[*]}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up extra cargo flags
|
||||
env:
|
||||
FEATURES: ${{steps.concat-features.outputs.FEATURES_COMMA}}
|
||||
shell: bash
|
||||
run: |
|
||||
FLAGS="--no-default-features"
|
||||
if [[ -n "$FEATURES" ]]; then FLAGS+=" --features $FEATURES"; fi
|
||||
echo "EXTRA_CARGO_FLAGS=$FLAGS" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust
|
||||
run: |
|
||||
rustup toolchain install stable --profile minimal -t ${{ matrix.target }}
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
key: "${{ matrix.target }}-${{ matrix.feature-unrar }}-${{ matrix.feature-use-zlib }}-${{ matrix.feature-use-zstd-thin }}-${{ matrix.feature-bzip3 }}"
|
||||
|
||||
- name: Test on stable
|
||||
# there's no way to run tests for ARM64 Windows for now
|
||||
if: matrix.target != 'aarch64-pc-windows-msvc'
|
||||
run: |
|
||||
${{ env.CARGO }} +stable test --profile fast --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
|
||||
|
||||
- name: Build release artifacts (binary and completions)
|
||||
if: ${{ inputs.artifact_upload_mode != 'none' }}
|
||||
run: |
|
||||
${{ env.CARGO }} +stable build --release --target ${{ matrix.target }} $EXTRA_CARGO_FLAGS
|
||||
env:
|
||||
OUCH_ARTIFACTS_FOLDER: man-page-and-completions-artifacts
|
||||
|
||||
- name: Upload release artifacts
|
||||
if: |
|
||||
${{ inputs.artifact_upload_mode != 'none' &&
|
||||
(inputs.artifact_upload_mode == 'all' ||
|
||||
(matrix.feature-unrar && matrix.feature-use-zlib && matrix.feature-use-zstd-thin && matrix.feature-bzip3)) }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ouch-${{ matrix.target }}${{ steps.concat-features.outputs.FEATURES_PLUS != '' && format('-{0}', steps.concat-features.outputs.FEATURES_PLUS) || '' }}
|
||||
path: |
|
||||
target/${{ matrix.target }}/release/ouch
|
||||
target/${{ matrix.target }}/release/ouch.exe
|
||||
man-page-and-completions-artifacts/
|
36
.github/workflows/draft-release-automatic-trigger.yml
vendored
Normal file
36
.github/workflows/draft-release-automatic-trigger.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
name: Automatic trigger draft release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "[0-9]+.[0-9]+.[0-9]+-rc[0-9]+"
|
||||
|
||||
jobs:
|
||||
call-workflow-build-artifacts-and-run-tests:
|
||||
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
|
||||
with:
|
||||
matrix_all_combinations: true
|
||||
artifact_upload_mode: with_default_features
|
||||
|
||||
automated-draft-release:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
needs: call-workflow-build-artifacts-and-run-tests
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: downloaded_artifacts
|
||||
pattern: ouch-*
|
||||
|
||||
- name: Package release assets
|
||||
run: scripts/package-release-assets.sh
|
||||
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: true
|
||||
files: output_assets/ouch-*
|
33
.github/workflows/manual-release.yml
vendored
33
.github/workflows/manual-release.yml
vendored
@ -1,33 +0,0 @@
|
||||
name: manual-release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_id:
|
||||
description: Run id of the action run to pull artifacts from
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
github-release:
|
||||
name: github-release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Download artifacts
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
path: artifacts
|
||||
workflow: build-and-test.yml
|
||||
run_id: ${{ github.event.inputs.run_id }}
|
||||
|
||||
- name: Package release assets
|
||||
run: scripts/package-release-assets.sh
|
||||
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
name: manual release ${{ github.event.inputs.run_id }}
|
||||
files: release/ouch-*
|
35
.github/workflows/pr-workflow.yml
vendored
Normal file
35
.github/workflows/pr-workflow.yml
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
name: PR workflow
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
|
||||
jobs:
|
||||
rustfmt-nightly-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Cargo: fmt"
|
||||
run: |
|
||||
rustup toolchain install nightly --profile minimal -c rustfmt
|
||||
cargo +nightly fmt -- --check
|
||||
|
||||
clippy-checks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Cargo: clippy"
|
||||
run: |
|
||||
rustup toolchain install stable --profile minimal -c clippy
|
||||
cargo +stable clippy -- -D warnings
|
||||
|
||||
build-and-test:
|
||||
uses: ./.github/workflows/build-artifacts-and-run-tests.yml
|
||||
with:
|
||||
matrix_all_combinations: false
|
||||
artifact_upload_mode: none
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -16,3 +16,7 @@ artifacts/
|
||||
/benchmarks/input.*
|
||||
/benchmarks/*.md
|
||||
!/benchmarks/results.md
|
||||
|
||||
# IDE-specific setting
|
||||
.vscode
|
||||
.idea
|
||||
|
84
CHANGELOG.md
84
CHANGELOG.md
@ -5,7 +5,7 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
_This changelog was created after v0.3.1 was released. As a result, there may be slight inaccuracies with versions <= v0.3.1._
|
||||
_This changelog was created after v0.3.1. As a result, there may be slight inaccuracies with prior versions._
|
||||
|
||||
Categories Used:
|
||||
|
||||
@ -18,7 +18,87 @@ Categories Used:
|
||||
|
||||
**Bullet points in chronological order by PR**
|
||||
|
||||
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.4.2...HEAD)
|
||||
## [Unreleased](https://github.com/ouch-org/ouch/compare/0.6.1...HEAD)
|
||||
|
||||
### New Features
|
||||
|
||||
- Merge folders in decompression [\#798](https://github.com/ouch-org/ouch/pull/798) ([tommady](https://github.com/tommady))
|
||||
- Add `--no-smart-unpack` flag to decompression command to disable smart unpack [\#809](https://github.com/ouch-org/ouch/pull/809) ([talis-fb](https://github.com/talis-fb))
|
||||
|
||||
### Improvements
|
||||
|
||||
- Give better error messages when archive extensions are invalid [\#817](https://github.com/ouch-org/ouch/pull/817) ([marcospb19](https://github.com/marcospb19))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix tar extraction count when --quiet [\#824](https://github.com/ouch-org/ouch/pull/824) ([marcospb19](https://github.com/marcospb19))
|
||||
- Fix 7z BadSignature error when compressing and then listing [\#819](https://github.com/ouch-org/ouch/pull/819) ([tommady](https://github.com/tommady))
|
||||
|
||||
### Tweaks
|
||||
|
||||
- Make `.bz3` opt-out [\#814](https://github.com/ouch-org/ouch/pull/814) ([amyspark](https://github.com/amyspark))
|
||||
|
||||
## [0.6.1](https://github.com/ouch-org/ouch/compare/0.6.0...0.6.1)
|
||||
|
||||
- Fix .zip crash when file mode isn't present [\#804](https://github.com/ouch-org/ouch/pull/804) ([marcospb19](https://github.com/marcospb19))
|
||||
|
||||
## [0.6.0](https://github.com/ouch-org/ouch/compare/0.5.1...0.6.0)
|
||||
|
||||
### New Features
|
||||
|
||||
- Add multithreading support for `zstd` compression [\#689](https://github.com/ouch-org/ouch/pull/689) ([nalabrie](https://github.com/nalabrie))
|
||||
- Add `bzip3` support [\#522](https://github.com/ouch-org/ouch/pull/522) ([freijon](https://github.com/freijon))
|
||||
- Add `--remove` flag for decompression subcommand to remove files after successful decompression [\#757](https://github.com/ouch-org/ouch/pull/757) ([ttys3](https://github.com/ttys3))
|
||||
- Add `br` (Brotli) support [\#765](https://github.com/ouch-org/ouch/pull/765) ([killercup](https://github.com/killercup))
|
||||
- Add rename option in overwrite menu [\#779](https://github.com/ouch-org/ouch/pull/779) ([talis-fb](https://github.com/talis-fb))
|
||||
- Store symlinks by default and add `--follow-symlinks` to store the target files [\#789](https://github.com/ouch-org/ouch/pull/789) ([tommady](https://github.com/tommady))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix output corrupted on parallel decompression [\#642](https://github.com/ouch-org/ouch/pull/642) ([AntoniosBarotsis](https://github.com/AntoniosBarotsis))
|
||||
|
||||
### Tweaks
|
||||
|
||||
- CI refactor [\#578](https://github.com/ouch-org/ouch/pull/578) ([cyqsimon](https://github.com/cyqsimon))
|
||||
- Use a prefix `tmp-ouch-` for temporary decompression path name to avoid conflicts [\#725](https://github.com/ouch-org/ouch/pull/725) ([valoq](https://github.com/valoq)) & [\#788](https://github.com/ouch-org/ouch/pull/788) ([talis-fb](https://github.com/talis-fb))
|
||||
- Ignore `.git/` when `-g/--gitignore` is set [\#507](https://github.com/ouch-org/ouch/pull/507) ([talis-fb](https://github.com/talis-fb))
|
||||
- Run clippy for tests too [\#738](https://github.com/ouch-org/ouch/pull/738) ([marcospb19](https://github.com/marcospb19))
|
||||
- Sevenz-rust is unmaintained, switch to sevenz-rust2 [\#796](https://github.com/ouch-org/ouch/pull/796) ([tommady](https://github.com/tommady))
|
||||
|
||||
### Improvements
|
||||
|
||||
- Fix logging IO bottleneck [\#642](https://github.com/ouch-org/ouch/pull/642) ([AntoniosBarotsis](https://github.com/AntoniosBarotsis))
|
||||
- Support decompression over stdin [\#692](https://github.com/ouch-org/ouch/pull/692) ([rcorre](https://github.com/rcorre))
|
||||
- Make `--format` more forgiving with the formatting of the provided format [\#519](https://github.com/ouch-org/ouch/pull/519) ([marcospb19](https://github.com/marcospb19))
|
||||
- Use buffered writer for list output [\#764](https://github.com/ouch-org/ouch/pull/764) ([killercup](https://github.com/killercup))
|
||||
- Disable smart unpack when `--dir` flag is provided in decompress command [\#782](https://github.com/ouch-org/ouch/pull/782) ([talis-fb](https://github.com/talis-fb))
|
||||
- Align file sizes at left for each extracted file to make output clearer [\#792](https://github.com/ouch-org/ouch/pull/792) ([talis-fb](https://github.com/talis-fb))
|
||||
|
||||
## [0.5.1](https://github.com/ouch-org/ouch/compare/0.5.0...0.5.1)
|
||||
|
||||
### Improvements
|
||||
|
||||
- Explicitly declare feature flags `use_zlib` & `use_zstd_thin` [\#564](https://github.com/ouch-org/ouch/pull/564) ([cyqsimon](https://github.com/cyqsimon))
|
||||
|
||||
### Tweaks
|
||||
|
||||
- Mention support for `7z` and `rar` in help message.
|
||||
|
||||
## [0.5.0](https://github.com/ouch-org/ouch/compare/0.4.2...0.5.0)
|
||||
|
||||
### New Features
|
||||
|
||||
- Add support for listing and decompressing `.rar` archives [\#529](https://github.com/ouch-org/ouch/pull/529) ([lmkra](https://github.com/lmkra))
|
||||
- Add support for 7z [\#555](https://github.com/ouch-org/ouch/pull/555) ([Flat](https://github.com/flat) & [MisileLab](https://github.com/MisileLab))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix mime type detection [\#529](https://github.com/ouch-org/ouch/pull/529) ([lmkra](https://github.com/lmkra))
|
||||
- Fix size unit inconsistency [\#502](https://github.com/ouch-org/ouch/pull/502) ([marcospb19](https://github.com/marcospb19))
|
||||
|
||||
### Improvements
|
||||
|
||||
- Hint completions generator to expand file paths [\#508](https://github.com/ouch-org/ouch/pull/508) ([marcospb19](https://github.com/marcospb19))
|
||||
|
||||
## [0.4.2](https://github.com/ouch-org/ouch/compare/0.4.1...0.4.2)
|
||||
|
||||
|
@ -1,19 +1,48 @@
|
||||
Thanks for your interest in contributing to `ouch`!
|
||||
|
||||
# Code of Conduct
|
||||
# Table of contents:
|
||||
|
||||
- [Code of Conduct](#code-of-conduct)
|
||||
- [I want to ask a question or provide feedback](#i-want-to-ask-a-question-or-provide-feedback)
|
||||
- [Adding a new feature](#adding-a-new-feature)
|
||||
- [PRs](#prs)
|
||||
- [Dealing with UI tests](#dealing-with-ui-tests)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
We follow the [Rust Official Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct).
|
||||
|
||||
# I want to ask a question or provide feedback
|
||||
## I want to ask a question or provide feedback
|
||||
|
||||
Create [an issue](https://github.com/ouch-org/ouch/issues) or go to [Ouch Discussions](https://github.com/ouch-org/ouch/discussions).
|
||||
|
||||
# Adding a new feature
|
||||
## Adding a new feature
|
||||
|
||||
Before creating a PR with a new feature, please, open an issue to suggest your addition.
|
||||
Before opening the PR, open an issue to discuss your addition, this increases the chance of your PR being accepted.
|
||||
|
||||
This allows us to discuss the problem and solution, increasing the chance of your PR to be accepted.
|
||||
## PRs
|
||||
|
||||
# Don't forget to
|
||||
- Pass all CI checks.
|
||||
- After opening the PR, add a [CHANGELOG.md] entry.
|
||||
|
||||
- In your PR, add a CHANGELOG.md entry.
|
||||
[CHANGELOG.md]: https://github.com/ouch-org/ouch
|
||||
|
||||
## Dealing with UI tests
|
||||
|
||||
We use snapshots to do UI testing and guarantee a consistent output, this way, you can catch accidental changes or see what output changed in the PR diff.
|
||||
|
||||
- Run tests with `cargo` normally, or with a filter:
|
||||
|
||||
```sh
|
||||
cargo test
|
||||
# Only run UI tests
|
||||
cargo test -- ui
|
||||
```
|
||||
|
||||
- If some UI test failed, you should review it:
|
||||
|
||||
```sh
|
||||
cargo insta review
|
||||
```
|
||||
|
||||
- After addressing all, you should be able to `git add` and `commit` accordingly.
|
||||
|
1457
Cargo.lock
generated
1457
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
96
Cargo.toml
96
Cargo.toml
@ -1,7 +1,10 @@
|
||||
[package]
|
||||
name = "ouch"
|
||||
version = "0.4.2"
|
||||
authors = ["Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>", "João M. Bezerra <marcospb19@hotmail.com>"]
|
||||
version = "0.6.1"
|
||||
authors = [
|
||||
"João Marcos <marcospb19@hotmail.com>",
|
||||
"Vinícius Rodrigues Miguel <vrmiguel99@gmail.com>",
|
||||
]
|
||||
edition = "2021"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/ouch-org/ouch"
|
||||
@ -12,49 +15,82 @@ description = "A command-line utility for easily compressing and decompressing f
|
||||
|
||||
[dependencies]
|
||||
atty = "0.2.14"
|
||||
bstr = { version = "1.6.0", default-features = false, features = ["std"] }
|
||||
brotli = "7.0.0"
|
||||
bstr = { version = "1.10.0", default-features = false, features = ["std"] }
|
||||
bytesize = "1.3.0"
|
||||
bzip2 = "0.4.4"
|
||||
clap = { version = "4.3.19", features = ["derive", "env"] }
|
||||
filetime = "0.2.22"
|
||||
flate2 = { version = "1.0.26", default-features = false }
|
||||
fs-err = "2.9.0"
|
||||
gzp = { version = "0.11.3", default-features = false, features = ["snappy_default"] }
|
||||
ignore = "0.4.20"
|
||||
libc = "0.2.147"
|
||||
bzip3 = { version = "0.9.0", features = ["bundled"], optional = true }
|
||||
clap = { version = "4.5.20", features = ["derive", "env"] }
|
||||
filetime_creation = "0.2"
|
||||
flate2 = { version = "1.0.30", default-features = false }
|
||||
fs-err = "2.11.0"
|
||||
gzp = { version = "0.11.3", default-features = false, features = [
|
||||
"snappy_default",
|
||||
] }
|
||||
ignore = "0.4.23"
|
||||
libc = "0.2.155"
|
||||
linked-hash-map = "0.5.6"
|
||||
lzzzz = "1.0.4"
|
||||
once_cell = "1.18.0"
|
||||
rayon = "1.7.0"
|
||||
lz4_flex = "0.11.3"
|
||||
num_cpus = "1.16.0"
|
||||
once_cell = "1.20.2"
|
||||
rayon = "1.10.0"
|
||||
same-file = "1.0.6"
|
||||
snap = "1.1.0"
|
||||
tar = "0.4.39"
|
||||
tempfile = "3.7.0"
|
||||
time = { version = "0.3.25", default-features = false }
|
||||
ubyte = { version = "0.10.3", default-features = false }
|
||||
sevenz-rust2 = { version = "0.13.1", features = ["compress", "aes256"] }
|
||||
snap = "1.1.1"
|
||||
tar = "0.4.42"
|
||||
tempfile = "3.10.1"
|
||||
time = { version = "0.3.36", default-features = false }
|
||||
unrar = { version = "0.5.7", optional = true }
|
||||
xz2 = "0.1.7"
|
||||
zip = { version = "0.6.6", default-features = false, features = ["time"] }
|
||||
zstd = { version = "0.12.4", default-features = false }
|
||||
zip = { version = "0.6.6", default-features = false, features = [
|
||||
"time",
|
||||
"aes-crypto",
|
||||
] }
|
||||
zstd = { version = "0.13.2", default-features = false, features = ["zstdmt"] }
|
||||
|
||||
[target.'cfg(not(unix))'.dependencies]
|
||||
is_executable = "1.0.1"
|
||||
|
||||
[build-dependencies]
|
||||
clap = { version = "4.3.19", features = ["derive", "env", "string"] }
|
||||
clap_complete = "4.3.2"
|
||||
clap_mangen = "0.2.12"
|
||||
clap = { version = "4.5.20", features = ["derive", "env", "string"] }
|
||||
clap_complete = "4.5.28"
|
||||
clap_mangen = "0.2.24"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2.0.12"
|
||||
infer = "0.15.0"
|
||||
parse-display = "0.8.2"
|
||||
proptest = "1.2.0"
|
||||
rand = { version = "0.8.5", default-features = false, features = ["small_rng", "std"] }
|
||||
test-strategy = "0.3.1"
|
||||
assert_cmd = "2.0.14"
|
||||
glob = "0.3.2"
|
||||
infer = "0.16.0"
|
||||
insta = { version = "1.40.0", features = ["filters"] }
|
||||
itertools = "0.14.0"
|
||||
memchr = "2.7.4"
|
||||
parse-display = "0.9.1"
|
||||
pretty_assertions = "1.4.1"
|
||||
proptest = "1.5.0"
|
||||
rand = { version = "0.8.5", default-features = false, features = [
|
||||
"small_rng",
|
||||
"std",
|
||||
] }
|
||||
regex = "1.10.4"
|
||||
test-strategy = "0.4.0"
|
||||
|
||||
[features]
|
||||
default = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib", "zstd/thin"]
|
||||
default = ["unrar", "use_zlib", "use_zstd_thin", "bzip3"]
|
||||
use_zlib = ["flate2/zlib", "gzp/deflate_zlib", "zip/deflate-zlib"]
|
||||
use_zstd_thin = ["zstd/thin"]
|
||||
allow_piped_choice = []
|
||||
|
||||
# For generating binaries for releases
|
||||
[profile.release]
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
opt-level = 3
|
||||
strip = true
|
||||
|
||||
# When we need a fast binary that compiles slightly faster `release` (useful for CI)
|
||||
[profile.fast]
|
||||
inherits = "release"
|
||||
lto = false
|
||||
opt-level = 2
|
||||
incremental = true
|
||||
codegen-units = 32
|
||||
strip = false
|
||||
|
@ -1,2 +1,8 @@
|
||||
[build.env]
|
||||
passthrough = ["RUSTFLAGS"]
|
||||
passthrough = ["RUSTFLAGS", "OUCH_ARTIFACTS_FOLDER"]
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:edge"
|
||||
|
||||
[target.armv7-unknown-linux-gnueabihf]
|
||||
image = "ghcr.io/cross-rs/armv7-unknown-linux-gnueabihf:edge"
|
||||
|
15
LICENSE
15
LICENSE
@ -20,7 +20,18 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
Copyright notices from other projects:
|
||||
|
||||
Copyright (c) 2019 Bojan
|
||||
https://github.com/bojand/infer
|
||||
Infer crate (MIT LICENSE):
|
||||
> Copyright (c) 2019 Bojan
|
||||
> Code at https://github.com/bojand/infer
|
||||
|
||||
Bzip3-rs crate (LGPL 3.0):
|
||||
> Code for this crate is available at https://github.com/bczhc/bzip3-rs
|
||||
> See its license at https://github.com/bczhc/bzip3-rs/blob/master/LICENSE
|
||||
|
||||
Bzip3 library (LGPL 3.0):
|
||||
> Code for this library is available at https://github.com/kspalaiologos/bzip3
|
||||
> See its license at https://github.com/kspalaiologos/bzip3/blob/master/LICENSE
|
||||
|
38
README.md
38
README.md
@ -111,25 +111,28 @@ Output:
|
||||
|
||||
# Supported formats
|
||||
|
||||
| Format | `.tar` | `.zip` | `.gz` | `.xz`, `.lzma` | `.bz`, `.bz2` | `.lz4` | `.sz` | `.zst` |
|
||||
|:---------:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
|
||||
| Supported | ✓ | ✓¹ | ✓² | ✓ | ✓ | ✓ | ✓² | ✓ |
|
||||
| Format | `.tar` | `.zip` | `7z` | `.gz` | `.xz`, `.lzma` | `.bz`, `.bz2` | `.bz3` | `.lz4` | `.sz` (Snappy) | `.zst` | `.rar` | `.br` |
|
||||
|:---------:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
|
||||
| Supported | ✓ | ✓¹ | ✓¹ | ✓² | ✓ | ✓ | ✓ | ✓ | ✓² | ✓² | ✓³ | ✓ |
|
||||
|
||||
✓: Supports compression and decompression.
|
||||
|
||||
✓¹: Due to limitations of `.zip`, it doesn't support streaming (de)compression.
|
||||
✓¹: Due to limitations of the compression format itself, (de)compression can't be done with streaming.
|
||||
|
||||
✓²: Supported, and compression runs in parallel.
|
||||
|
||||
✓³: Due to RAR's restrictive license, only decompression and listing can be supported.
|
||||
If you wish to exclude non-free code from your build, you can disable RAR support
|
||||
by building without the `unrar` feature.
|
||||
|
||||
`tar` aliases are also supported: `tgz`, `tbz`, `tbz2`, `tlz4`, `txz`, `tlzma`, `tsz`, `tzst`.
|
||||
|
||||
Formats can be chained:
|
||||
|
||||
- `.zst.gz`
|
||||
- `.tar.gz.gz`
|
||||
- `.tar.gz.gz.gz.zst.xz.bz.lz4`
|
||||
- `.tar.gz`
|
||||
- `.tar.gz.xz.zst.gz.lz4.sz`
|
||||
|
||||
If the filename has no extensions, `Ouch` will try to infer the format by the [file signature](https://en.wikipedia.org/wiki/List_of_file_signatures).
|
||||
If the filename has no extensions, `Ouch` will try to infer the format by the [file signature](https://en.wikipedia.org/wiki/List_of_file_signatures) and ask the user for confirmation.
|
||||
|
||||
# Installation
|
||||
|
||||
@ -163,17 +166,20 @@ Check the [releases page](https://github.com/ouch-org/ouch/releases).
|
||||
|
||||
Check the [wiki guide on compiling](https://github.com/ouch-org/ouch/wiki/Compiling-and-installing-from-source-code).
|
||||
|
||||
# Dependencies
|
||||
# Runtime Dependencies
|
||||
|
||||
If you installed `ouch` using the download script, you will need no dependencies (static MUSL binary).
|
||||
If running `ouch` results in a linking error, it means you're missing a runtime dependency.
|
||||
|
||||
If you're downloading binaries from the [releases page](https://github.com/ouch-org/ouch/releases), try the `musl` variants, those are static binaries that require no runtime dependencies.
|
||||
|
||||
Otherwise, you'll need these libraries installed on your system:
|
||||
|
||||
* [liblzma](https://www.7-zip.org/sdk.html)
|
||||
* [libbz2](https://www.sourceware.org/bzip2/)
|
||||
* [libz](https://www.zlib.net/)
|
||||
* [libbz2](https://www.sourceware.org/bzip2)
|
||||
* [libbz3](https://github.com/kspalaiologos/bzip3)
|
||||
* [libz](https://www.zlib.net)
|
||||
|
||||
These are available on all mainstream _Linux_ distributions and on _macOS_.
|
||||
These should be available in your system's package manager.
|
||||
|
||||
# Benchmarks
|
||||
|
||||
@ -195,12 +201,14 @@ Versions used:
|
||||
|
||||
# Contributing
|
||||
|
||||
`ouch` is made out of voluntary work, contributors are very welcome! No contribution is too small and all contributions are valued.
|
||||
`ouch` is made out of voluntary work, contributors are very welcome! Contributions of all sizes are appreciated.
|
||||
|
||||
- Open an [issue](https://github.com/ouch-org/ouch/issues).
|
||||
- Package it for your favorite distribution or package manager.
|
||||
- Open a pull request.
|
||||
- Share it with a friend!
|
||||
- Open a pull request.
|
||||
|
||||
If you're creating a Pull Request, check [CONTRIBUTING.md](./CONTRIBUTING.md).
|
||||
|
||||
[`tar`]: https://www.gnu.org/software/tar/
|
||||
[infozip]: http://www.info-zip.org/
|
||||
|
@ -20,8 +20,8 @@
|
||||
|
||||
| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |
|
||||
|:---|---:|---:|---:|---:|
|
||||
| `zip output.zip -r compiler` | 581.3 ± 9.1 | 573.2 | 600.9 | 1.06 ± 0.02 |
|
||||
| `ouch compress compiler output.zip` | 549.7 ± 4.3 | 543.6 | 558.6 | 1.00 |
|
||||
| `zip output.zip -r compiler` | 581.3 ± 9.1 | 573.2 | 600.9 | 1.06 ± 0.02 |
|
||||
|
||||
| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |
|
||||
|:---|---:|---:|---:|---:|
|
||||
|
10
build.rs
10
build.rs
@ -5,18 +5,12 @@
|
||||
/// Set `OUCH_ARTIFACTS_FOLDER` to the name of the destination folder:
|
||||
///
|
||||
/// ```sh
|
||||
/// OUCH_ARTIFACTS_FOLDER=my-folder cargo build
|
||||
/// OUCH_ARTIFACTS_FOLDER=man-page-and-completions-artifacts cargo build
|
||||
/// ```
|
||||
///
|
||||
/// All completion files will be generated inside of the folder "my-folder".
|
||||
/// All completion files will be generated inside of the folder "man-page-and-completions-artifacts".
|
||||
///
|
||||
/// If the folder does not exist, it will be created.
|
||||
///
|
||||
/// We recommend you naming this folder "artifacts" for the sake of consistency.
|
||||
///
|
||||
/// ```sh
|
||||
/// OUCH_ARTIFACTS_FOLDER=artifacts cargo build
|
||||
/// ```
|
||||
use std::{
|
||||
env,
|
||||
fs::{create_dir_all, File},
|
||||
|
@ -1,22 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
mkdir release
|
||||
cd artifacts
|
||||
mkdir output_assets
|
||||
echo "created folder 'output_assets/'"
|
||||
ls -lA -w 1
|
||||
cd downloaded_artifacts
|
||||
echo "entered 'downloaded_artifacts/'"
|
||||
ls -lA -w 1
|
||||
|
||||
for dir in ouch-*; do
|
||||
cp -r artifacts "$dir/completions"
|
||||
mkdir "$dir/man"
|
||||
mv "$dir"/completions/*.1 "$dir/man"
|
||||
cp ../{README.md,LICENSE,CHANGELOG.md} "$dir"
|
||||
PLATFORMS=(
|
||||
"aarch64-pc-windows-msvc"
|
||||
"aarch64-unknown-linux-gnu"
|
||||
"aarch64-unknown-linux-musl"
|
||||
"armv7-unknown-linux-gnueabihf"
|
||||
"armv7-unknown-linux-musleabihf"
|
||||
"x86_64-apple-darwin"
|
||||
"x86_64-pc-windows-gnu"
|
||||
"x86_64-pc-windows-msvc"
|
||||
"x86_64-unknown-linux-gnu"
|
||||
"x86_64-unknown-linux-musl"
|
||||
)
|
||||
# TODO: remove allow_piped_choice later
|
||||
DEFAULT_FEATURES="allow_piped_choice+unrar+use_zlib+use_zstd_thin+bzip3"
|
||||
|
||||
if [[ "$dir" = *.exe ]]; then
|
||||
target=${dir%.exe}
|
||||
mv "$dir" "$target"
|
||||
zip -r "../release/$target.zip" "$target"
|
||||
for platform in "${PLATFORMS[@]}"; do
|
||||
path="ouch-${platform}"
|
||||
echo "Processing $path"
|
||||
|
||||
if [ ! -d "${path}-${DEFAULT_FEATURES}" ]; then
|
||||
echo "ERROR: Could not find artifact directory for $platform with default features ($path)"
|
||||
exit 1
|
||||
fi
|
||||
mv "${path}-${DEFAULT_FEATURES}" "$path" # remove the annoying suffix
|
||||
|
||||
cp ../{README.md,LICENSE,CHANGELOG.md} "$path"
|
||||
mkdir -p "$path/man"
|
||||
mkdir -p "$path/completions"
|
||||
|
||||
mv "$path"/man-page-and-completions-artifacts/*.1 "$path/man"
|
||||
mv "$path"/man-page-and-completions-artifacts/* "$path/completions"
|
||||
rm -r "$path/man-page-and-completions-artifacts"
|
||||
|
||||
if [[ "$platform" == *"-windows-"* ]]; then
|
||||
mv "$path/target/$platform/release/ouch.exe" "$path"
|
||||
rm -rf "$path/target"
|
||||
|
||||
zip -r "../output_assets/${path}.zip" "$path"
|
||||
echo "Created output_assets/${path}.zip"
|
||||
else
|
||||
chmod +x "$dir/ouch"
|
||||
tar czf "../release/$dir.tar.gz" "$dir"
|
||||
mv "$path/target/$platform/release/ouch" "$path"
|
||||
rm -rf "$path/target"
|
||||
chmod +x "$path/ouch"
|
||||
|
||||
tar czf "../output_assets/${path}.tar.gz" "$path"
|
||||
echo "Created output_assets/${path}.tar.gz"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Done."
|
||||
|
@ -1,13 +1,51 @@
|
||||
//! Accessibility mode functions.
|
||||
//!
|
||||
//! # Problem
|
||||
//!
|
||||
//! `Ouch`'s default output contains symbols which make it visually easier to
|
||||
//! read, but harder for people who are visually impaired and rely on
|
||||
//! text-to-voice readers.
|
||||
//!
|
||||
//! On top of that, people who use text-to-voice tools can't easily skim
|
||||
//! through verbose lines of text, so they strongly benefit from fewer lines
|
||||
//! of output.
|
||||
//!
|
||||
//! # Solution
|
||||
//!
|
||||
//! To tackle that, `Ouch` has an accessibility mode that filters out most of
|
||||
//! the verbose logging, displaying only the most important pieces of
|
||||
//! information.
|
||||
//!
|
||||
//! Accessible mode also changes how logs are displayed, to remove symbols
|
||||
//! which are "noise" to text-to-voice tools and change formatting of error
|
||||
//! messages.
|
||||
//!
|
||||
//! # Are impaired people actually benefiting from this?
|
||||
//!
|
||||
//! So far we don't know. Most CLI tools aren't accessible, so we can't expect
|
||||
//! many impaired people to be using the terminal and CLI tools, including
|
||||
//! `Ouch`.
|
||||
//!
|
||||
//! I consider this to be an experiment, and a tiny step towards the right
|
||||
//! direction, `Ouch` shows that this is possible and easy to do, hopefully
|
||||
//! we can use our experience to later create guides or libraries for other
|
||||
//! developers.
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
/// Whether to enable accessible output (removes info output and reduces other
|
||||
/// output, removes visual markers like '[' and ']').
|
||||
/// Global flag for accessible mode.
|
||||
pub static ACCESSIBLE: OnceCell<bool> = OnceCell::new();
|
||||
|
||||
/// Check if `Ouch` is running in accessible mode.
|
||||
///
|
||||
/// Check the module-level documentation for more details.
|
||||
pub fn is_running_in_accessible_mode() -> bool {
|
||||
ACCESSIBLE.get().copied().unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Set the value of the global [`ACCESSIBLE`] flag.
|
||||
///
|
||||
/// Check the module-level documentation for more details.
|
||||
pub fn set_accessible(value: bool) {
|
||||
if ACCESSIBLE.get().is_none() {
|
||||
ACCESSIBLE.set(value).unwrap();
|
||||
|
7
src/archive/bzip3_stub.rs
Normal file
7
src/archive/bzip3_stub.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use crate::Error;
|
||||
|
||||
pub fn no_support() -> Error {
|
||||
Error::UnsupportedFormat {
|
||||
reason: "BZip3 support is disabled for this build, possibly due to missing bindgen-cli dependency.".into(),
|
||||
}
|
||||
}
|
@ -1,4 +1,11 @@
|
||||
//! Archive compression algorithms
|
||||
|
||||
#[cfg(not(feature = "bzip3"))]
|
||||
pub mod bzip3_stub;
|
||||
#[cfg(feature = "unrar")]
|
||||
pub mod rar;
|
||||
#[cfg(not(feature = "unrar"))]
|
||||
pub mod rar_stub;
|
||||
pub mod sevenz;
|
||||
pub mod tar;
|
||||
pub mod zip;
|
||||
|
72
src/archive/rar.rs
Normal file
72
src/archive/rar.rs
Normal file
@ -0,0 +1,72 @@
|
||||
//! Contains RAR-specific building and unpacking functions
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use unrar::Archive;
|
||||
|
||||
use crate::{
|
||||
error::{Error, Result},
|
||||
list::FileInArchive,
|
||||
utils::{logger::info, Bytes},
|
||||
};
|
||||
|
||||
/// Unpacks the archive given by `archive_path` into the folder given by `output_folder`.
|
||||
/// Assumes that output_folder is empty
|
||||
pub fn unpack_archive(
|
||||
archive_path: &Path,
|
||||
output_folder: &Path,
|
||||
password: Option<&[u8]>,
|
||||
quiet: bool,
|
||||
) -> crate::Result<usize> {
|
||||
let archive = match password {
|
||||
Some(password) => Archive::with_password(archive_path, password),
|
||||
None => Archive::new(archive_path),
|
||||
};
|
||||
|
||||
let mut archive = archive.open_for_processing()?;
|
||||
let mut unpacked = 0;
|
||||
|
||||
while let Some(header) = archive.read_header()? {
|
||||
let entry = header.entry();
|
||||
archive = if entry.is_file() {
|
||||
if !quiet {
|
||||
info(format!(
|
||||
"extracted ({}) {}",
|
||||
Bytes::new(entry.unpacked_size),
|
||||
entry.filename.display(),
|
||||
));
|
||||
}
|
||||
unpacked += 1;
|
||||
header.extract_with_base(output_folder)?
|
||||
} else {
|
||||
header.skip()?
|
||||
};
|
||||
}
|
||||
|
||||
Ok(unpacked)
|
||||
}
|
||||
|
||||
/// List contents of `archive_path`, returning a vector of archive entries
|
||||
pub fn list_archive(
|
||||
archive_path: &Path,
|
||||
password: Option<&[u8]>,
|
||||
) -> Result<impl Iterator<Item = Result<FileInArchive>>> {
|
||||
let archive = match password {
|
||||
Some(password) => Archive::with_password(archive_path, password),
|
||||
None => Archive::new(archive_path),
|
||||
};
|
||||
|
||||
Ok(archive.open_for_listing()?.map(|item| {
|
||||
let item = item?;
|
||||
let is_dir = item.is_directory();
|
||||
let path = item.filename;
|
||||
|
||||
Ok(FileInArchive { path, is_dir })
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn no_compression() -> Error {
|
||||
Error::UnsupportedFormat {
|
||||
reason: "Creating RAR archives is not allowed due to licensing restrictions.".into(),
|
||||
}
|
||||
}
|
7
src/archive/rar_stub.rs
Normal file
7
src/archive/rar_stub.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use crate::Error;
|
||||
|
||||
pub fn no_support() -> Error {
|
||||
Error::UnsupportedFormat {
|
||||
reason: "RAR support is disabled for this build, possibly due to licensing restrictions.".into(),
|
||||
}
|
||||
}
|
209
src/archive/sevenz.rs
Normal file
209
src/archive/sevenz.rs
Normal file
@ -0,0 +1,209 @@
|
||||
//! SevenZip archive format compress function
|
||||
|
||||
use std::{
|
||||
env,
|
||||
io::{self, Read, Seek, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use bstr::ByteSlice;
|
||||
use fs_err as fs;
|
||||
use same_file::Handle;
|
||||
use sevenz_rust2::SevenZArchiveEntry;
|
||||
|
||||
use crate::{
|
||||
error::{Error, FinalError, Result},
|
||||
list::FileInArchive,
|
||||
utils::{
|
||||
cd_into_same_dir_as,
|
||||
logger::{info, warning},
|
||||
Bytes, EscapedPathDisplay, FileVisibilityPolicy,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn compress_sevenz<W>(
|
||||
files: &[PathBuf],
|
||||
output_path: &Path,
|
||||
writer: W,
|
||||
file_visibility_policy: FileVisibilityPolicy,
|
||||
quiet: bool,
|
||||
) -> crate::Result<W>
|
||||
where
|
||||
W: Write + Seek,
|
||||
{
|
||||
let mut writer = sevenz_rust2::SevenZWriter::new(writer)?;
|
||||
let output_handle = Handle::from_path(output_path);
|
||||
|
||||
for filename in files {
|
||||
let previous_location = cd_into_same_dir_as(filename)?;
|
||||
|
||||
// Unwrap safety:
|
||||
// paths should be canonicalized by now, and the root directory rejected.
|
||||
let filename = filename.file_name().unwrap();
|
||||
|
||||
for entry in file_visibility_policy.build_walker(filename) {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
// If the output_path is the same as the input file, warn the user and skip the input (in order to avoid compression recursion)
|
||||
if let Ok(handle) = &output_handle {
|
||||
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
|
||||
warning(format!(
|
||||
"Cannot compress `{}` into itself, skipping",
|
||||
output_path.display()
|
||||
));
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// This is printed for every file in `input_filenames` and has
|
||||
// little importance for most users, but would generate lots of
|
||||
// spoken text for users using screen readers, braille displays
|
||||
// and so on
|
||||
if !quiet {
|
||||
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
|
||||
}
|
||||
|
||||
let metadata = match path.metadata() {
|
||||
Ok(metadata) => metadata,
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
|
||||
// This path is for a broken symlink, ignore it
|
||||
continue;
|
||||
}
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
|
||||
let entry_name = path.to_str().ok_or_else(|| {
|
||||
FinalError::with_title("7z requires that all entry names are valid UTF-8")
|
||||
.detail(format!("File at '{path:?}' has a non-UTF-8 name"))
|
||||
})?;
|
||||
|
||||
let entry = sevenz_rust2::SevenZArchiveEntry::from_path(path, entry_name.to_owned());
|
||||
let entry_data = if metadata.is_dir() {
|
||||
None
|
||||
} else {
|
||||
Some(fs::File::open(path)?)
|
||||
};
|
||||
|
||||
writer.push_archive_entry::<fs::File>(entry, entry_data)?;
|
||||
}
|
||||
|
||||
env::set_current_dir(previous_location)?;
|
||||
}
|
||||
|
||||
let bytes = writer.finish()?;
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub fn decompress_sevenz<R>(reader: R, output_path: &Path, password: Option<&[u8]>, quiet: bool) -> crate::Result<usize>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
let mut count: usize = 0;
|
||||
|
||||
let entry_extract_fn = |entry: &SevenZArchiveEntry, reader: &mut dyn Read, path: &PathBuf| {
|
||||
count += 1;
|
||||
// Manually handle writing all files from 7z archive, due to library exluding empty files
|
||||
use std::io::BufWriter;
|
||||
|
||||
use filetime_creation as ft;
|
||||
|
||||
let file_path = output_path.join(entry.name());
|
||||
|
||||
if entry.is_directory() {
|
||||
if !quiet {
|
||||
info(format!(
|
||||
"File {} extracted to \"{}\"",
|
||||
entry.name(),
|
||||
file_path.display()
|
||||
));
|
||||
}
|
||||
if !path.exists() {
|
||||
fs::create_dir_all(path)?;
|
||||
}
|
||||
} else {
|
||||
if !quiet {
|
||||
info(format!(
|
||||
"extracted ({}) {:?}",
|
||||
Bytes::new(entry.size()),
|
||||
file_path.display(),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
if !parent.exists() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
}
|
||||
|
||||
let file = fs::File::create(path)?;
|
||||
let mut writer = BufWriter::new(file);
|
||||
io::copy(reader, &mut writer)?;
|
||||
|
||||
ft::set_file_handle_times(
|
||||
writer.get_ref().file(),
|
||||
Some(ft::FileTime::from_system_time(entry.access_date().into())),
|
||||
Some(ft::FileTime::from_system_time(entry.last_modified_date().into())),
|
||||
Some(ft::FileTime::from_system_time(entry.creation_date().into())),
|
||||
)
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
};
|
||||
|
||||
match password {
|
||||
Some(password) => sevenz_rust2::decompress_with_extract_fn_and_password(
|
||||
reader,
|
||||
output_path,
|
||||
sevenz_rust2::Password::from(password.to_str().map_err(|err| Error::InvalidPassword {
|
||||
reason: err.to_string(),
|
||||
})?),
|
||||
entry_extract_fn,
|
||||
)?,
|
||||
None => sevenz_rust2::decompress_with_extract_fn(reader, output_path, entry_extract_fn)?,
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// List contents of `archive_path`, returning a vector of archive entries
|
||||
pub fn list_archive<R>(reader: R, password: Option<&[u8]>) -> Result<impl Iterator<Item = crate::Result<FileInArchive>>>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
let mut files = Vec::new();
|
||||
|
||||
let entry_extract_fn = |entry: &SevenZArchiveEntry, _: &mut dyn Read, _: &PathBuf| {
|
||||
files.push(Ok(FileInArchive {
|
||||
path: entry.name().into(),
|
||||
is_dir: entry.is_directory(),
|
||||
}));
|
||||
Ok(true)
|
||||
};
|
||||
|
||||
match password {
|
||||
Some(password) => {
|
||||
let password = match password.to_str() {
|
||||
Ok(p) => p,
|
||||
Err(err) => {
|
||||
return Err(Error::InvalidPassword {
|
||||
reason: err.to_string(),
|
||||
})
|
||||
}
|
||||
};
|
||||
sevenz_rust2::decompress_with_extract_fn_and_password(
|
||||
reader,
|
||||
".",
|
||||
sevenz_rust2::Password::from(password),
|
||||
entry_extract_fn,
|
||||
)?;
|
||||
}
|
||||
None => sevenz_rust2::decompress_with_extract_fn(reader, ".", entry_extract_fn)?,
|
||||
}
|
||||
|
||||
Ok(files.into_iter())
|
||||
}
|
@ -10,42 +10,57 @@ use std::{
|
||||
|
||||
use fs_err as fs;
|
||||
use same_file::Handle;
|
||||
use ubyte::ToByteUnit;
|
||||
|
||||
use crate::{
|
||||
error::FinalError,
|
||||
info,
|
||||
list::FileInArchive,
|
||||
utils::{self, EscapedPathDisplay, FileVisibilityPolicy},
|
||||
warning,
|
||||
utils::{
|
||||
self,
|
||||
logger::{info, warning},
|
||||
Bytes, EscapedPathDisplay, FileVisibilityPolicy,
|
||||
},
|
||||
};
|
||||
|
||||
/// Unpacks the archive given by `archive` into the folder given by `into`.
|
||||
/// Assumes that output_folder is empty
|
||||
pub fn unpack_archive(reader: Box<dyn Read>, output_folder: &Path, quiet: bool) -> crate::Result<usize> {
|
||||
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
|
||||
let mut archive = tar::Archive::new(reader);
|
||||
|
||||
let mut files_unpacked = 0;
|
||||
for file in archive.entries()? {
|
||||
let mut file = file?;
|
||||
|
||||
file.unpack_in(output_folder)?;
|
||||
match file.header().entry_type() {
|
||||
tar::EntryType::Symlink => {
|
||||
let relative_path = file.path()?.to_path_buf();
|
||||
let full_path = output_folder.join(&relative_path);
|
||||
let target = file
|
||||
.link_name()?
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::InvalidData, "Missing symlink target"))?;
|
||||
|
||||
#[cfg(unix)]
|
||||
std::os::unix::fs::symlink(&target, &full_path)?;
|
||||
#[cfg(windows)]
|
||||
std::os::windows::fs::symlink_file(&target, &full_path)?;
|
||||
}
|
||||
tar::EntryType::Regular | tar::EntryType::Directory => {
|
||||
file.unpack_in(output_folder)?;
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
|
||||
// This is printed for every file in the archive and has little
|
||||
// importance for most users, but would generate lots of
|
||||
// spoken text for users using screen readers, braille displays
|
||||
// and so on
|
||||
if !quiet {
|
||||
info!(
|
||||
inaccessible,
|
||||
"{:?} extracted. ({})",
|
||||
info(format!(
|
||||
"extracted ({}) {:?}",
|
||||
Bytes::new(file.size()),
|
||||
utils::strip_cur_dir(&output_folder.join(file.path()?)),
|
||||
file.size().bytes(),
|
||||
);
|
||||
|
||||
files_unpacked += 1;
|
||||
));
|
||||
}
|
||||
files_unpacked += 1;
|
||||
}
|
||||
|
||||
Ok(files_unpacked)
|
||||
@ -87,6 +102,7 @@ pub fn build_archive_from_paths<W>(
|
||||
writer: W,
|
||||
file_visibility_policy: FileVisibilityPolicy,
|
||||
quiet: bool,
|
||||
follow_symlinks: bool,
|
||||
) -> crate::Result<W>
|
||||
where
|
||||
W: Write,
|
||||
@ -97,7 +113,8 @@ where
|
||||
for filename in input_filenames {
|
||||
let previous_location = utils::cd_into_same_dir_as(filename)?;
|
||||
|
||||
// Safe unwrap, input shall be treated before
|
||||
// Unwrap safety:
|
||||
// paths should be canonicalized by now, and the root directory rejected.
|
||||
let filename = filename.file_name().unwrap();
|
||||
|
||||
for entry in file_visibility_policy.build_walker(filename) {
|
||||
@ -105,12 +122,13 @@ where
|
||||
let path = entry.path();
|
||||
|
||||
// If the output_path is the same as the input file, warn the user and skip the input (in order to avoid compression recursion)
|
||||
if let Ok(ref handle) = output_handle {
|
||||
if let Ok(handle) = &output_handle {
|
||||
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
|
||||
warning!(
|
||||
"The output file and the input file are the same: `{}`, skipping...",
|
||||
warning(format!(
|
||||
"Cannot compress `{}` into itself, skipping",
|
||||
output_path.display()
|
||||
);
|
||||
));
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -120,18 +138,29 @@ where
|
||||
// spoken text for users using screen readers, braille displays
|
||||
// and so on
|
||||
if !quiet {
|
||||
info!(inaccessible, "Compressing '{}'.", EscapedPathDisplay::new(path));
|
||||
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
builder.append_dir(path, path)?;
|
||||
} else if path.is_symlink() && !follow_symlinks {
|
||||
let target_path = path.read_link()?;
|
||||
|
||||
let mut header = tar::Header::new_gnu();
|
||||
header.set_entry_type(tar::EntryType::Symlink);
|
||||
header.set_size(0);
|
||||
|
||||
builder.append_link(&mut header, path, &target_path).map_err(|err| {
|
||||
FinalError::with_title("Could not create archive")
|
||||
.detail("Unexpected error while trying to read link")
|
||||
.detail(format!("Error: {err}."))
|
||||
})?;
|
||||
} else {
|
||||
let mut file = match fs::File::open(path) {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
|
||||
// This path is for a broken symlink
|
||||
// We just ignore it
|
||||
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
|
||||
// This path is for a broken symlink, ignore it
|
||||
continue;
|
||||
}
|
||||
return Err(e.into());
|
||||
|
@ -10,36 +10,42 @@ use std::{
|
||||
thread,
|
||||
};
|
||||
|
||||
use filetime::{set_file_mtime, FileTime};
|
||||
use filetime_creation::{set_file_mtime, FileTime};
|
||||
use fs_err as fs;
|
||||
use same_file::Handle;
|
||||
use time::OffsetDateTime;
|
||||
use ubyte::ToByteUnit;
|
||||
use zip::{self, read::ZipFile, DateTime, ZipArchive};
|
||||
|
||||
use crate::{
|
||||
error::FinalError,
|
||||
info,
|
||||
list::FileInArchive,
|
||||
utils::{
|
||||
self, cd_into_same_dir_as, get_invalid_utf8_paths, pretty_format_list_of_paths, strip_cur_dir,
|
||||
EscapedPathDisplay, FileVisibilityPolicy,
|
||||
cd_into_same_dir_as, get_invalid_utf8_paths,
|
||||
logger::{info, info_accessible, warning},
|
||||
pretty_format_list_of_paths, strip_cur_dir, Bytes, EscapedPathDisplay, FileVisibilityPolicy,
|
||||
},
|
||||
warning,
|
||||
};
|
||||
|
||||
/// Unpacks the archive given by `archive` into the folder given by `output_folder`.
|
||||
/// Assumes that output_folder is empty
|
||||
pub fn unpack_archive<R>(mut archive: ZipArchive<R>, output_folder: &Path, quiet: bool) -> crate::Result<usize>
|
||||
pub fn unpack_archive<R>(
|
||||
mut archive: ZipArchive<R>,
|
||||
output_folder: &Path,
|
||||
password: Option<&[u8]>,
|
||||
quiet: bool,
|
||||
) -> crate::Result<usize>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
assert!(output_folder.read_dir().expect("dir exists").count() == 0);
|
||||
|
||||
let mut unpacked_files = 0;
|
||||
|
||||
for idx in 0..archive.len() {
|
||||
let mut file = archive.by_index(idx)?;
|
||||
let mut file = match password {
|
||||
Some(password) => archive
|
||||
.by_index_decrypt(idx, password)?
|
||||
.map_err(|_| zip::result::ZipError::UnsupportedArchive("Password required to decrypt file"))?,
|
||||
None => archive.by_index(idx)?,
|
||||
};
|
||||
let file_path = match file.enclosed_name() {
|
||||
Some(path) => path.to_owned(),
|
||||
None => continue,
|
||||
@ -56,7 +62,7 @@ where
|
||||
// spoken text for users using screen readers, braille displays
|
||||
// and so on
|
||||
if !quiet {
|
||||
info!(inaccessible, "File {} extracted to \"{}\"", idx, file_path.display());
|
||||
info(format!("File {} extracted to \"{}\"", idx, file_path.display()));
|
||||
}
|
||||
fs::create_dir_all(&file_path)?;
|
||||
}
|
||||
@ -70,16 +76,28 @@ where
|
||||
|
||||
// same reason is in _is_dir: long, often not needed text
|
||||
if !quiet {
|
||||
info!(
|
||||
inaccessible,
|
||||
"{:?} extracted. ({})",
|
||||
info(format!(
|
||||
"extracted ({}) {:?}",
|
||||
Bytes::new(file.size()),
|
||||
file_path.display(),
|
||||
file.size().bytes()
|
||||
);
|
||||
));
|
||||
}
|
||||
|
||||
let mut output_file = fs::File::create(file_path)?;
|
||||
io::copy(&mut file, &mut output_file)?;
|
||||
let mode = file.unix_mode();
|
||||
let is_symlink = mode.is_some_and(|mode| mode & 0o170000 == 0o120000);
|
||||
|
||||
if is_symlink {
|
||||
let mut target = String::new();
|
||||
file.read_to_string(&mut target)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
std::os::unix::fs::symlink(&target, file_path)?;
|
||||
#[cfg(windows)]
|
||||
std::os::windows::fs::symlink_file(&target, file_path)?;
|
||||
} else {
|
||||
let mut output_file = fs::File::create(file_path)?;
|
||||
io::copy(&mut file, &mut output_file)?;
|
||||
}
|
||||
|
||||
set_last_modified_time(&file, file_path)?;
|
||||
}
|
||||
@ -95,7 +113,10 @@ where
|
||||
}
|
||||
|
||||
/// List contents of `archive`, returning a vector of archive entries
|
||||
pub fn list_archive<R>(mut archive: ZipArchive<R>) -> impl Iterator<Item = crate::Result<FileInArchive>>
|
||||
pub fn list_archive<R>(
|
||||
mut archive: ZipArchive<R>,
|
||||
password: Option<&[u8]>,
|
||||
) -> impl Iterator<Item = crate::Result<FileInArchive>>
|
||||
where
|
||||
R: Read + Seek + Send + 'static,
|
||||
{
|
||||
@ -108,23 +129,30 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
let password = password.map(|p| p.to_owned());
|
||||
|
||||
let (tx, rx) = mpsc::channel();
|
||||
thread::spawn(move || {
|
||||
for idx in 0..archive.len() {
|
||||
let maybe_file_in_archive = (|| {
|
||||
let file = match archive.by_index(idx) {
|
||||
Ok(f) => f,
|
||||
Err(e) => return Some(Err(e.into())),
|
||||
let file_in_archive = (|| {
|
||||
let zip_result = match password.clone() {
|
||||
Some(password) => archive
|
||||
.by_index_decrypt(idx, &password)?
|
||||
.map_err(|_| zip::result::ZipError::UnsupportedArchive("Password required to decrypt file")),
|
||||
None => archive.by_index(idx),
|
||||
};
|
||||
|
||||
let path = file.enclosed_name()?.to_owned();
|
||||
let file = match zip_result {
|
||||
Ok(f) => f,
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
let path = file.enclosed_name().unwrap_or(&*file.mangled_name()).to_owned();
|
||||
let is_dir = file.is_dir();
|
||||
|
||||
Some(Ok(FileInArchive { path, is_dir }))
|
||||
Ok(FileInArchive { path, is_dir })
|
||||
})();
|
||||
if let Some(file_in_archive) = maybe_file_in_archive {
|
||||
tx.send(file_in_archive).unwrap();
|
||||
}
|
||||
tx.send(file_in_archive).unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
@ -138,6 +166,7 @@ pub fn build_archive_from_paths<W>(
|
||||
writer: W,
|
||||
file_visibility_policy: FileVisibilityPolicy,
|
||||
quiet: bool,
|
||||
follow_symlinks: bool,
|
||||
) -> crate::Result<W>
|
||||
where
|
||||
W: Write + Seek,
|
||||
@ -168,7 +197,8 @@ where
|
||||
for filename in input_filenames {
|
||||
let previous_location = cd_into_same_dir_as(filename)?;
|
||||
|
||||
// Safe unwrap, input shall be treated before
|
||||
// Unwrap safety:
|
||||
// paths should be canonicalized by now, and the root directory rejected.
|
||||
let filename = filename.file_name().unwrap();
|
||||
|
||||
for entry in file_visibility_policy.build_walker(filename) {
|
||||
@ -176,13 +206,12 @@ where
|
||||
let path = entry.path();
|
||||
|
||||
// If the output_path is the same as the input file, warn the user and skip the input (in order to avoid compression recursion)
|
||||
if let Ok(ref handle) = output_handle {
|
||||
if let Ok(handle) = &output_handle {
|
||||
if matches!(Handle::from_path(path), Ok(x) if &x == handle) {
|
||||
warning!(
|
||||
"The output file and the input file are the same: `{}`, skipping...",
|
||||
warning(format!(
|
||||
"Cannot compress `{}` into itself, skipping",
|
||||
output_path.display()
|
||||
);
|
||||
continue;
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,15 +220,14 @@ where
|
||||
// spoken text for users using screen readers, braille displays
|
||||
// and so on
|
||||
if !quiet {
|
||||
info!(inaccessible, "Compressing '{}'.", EscapedPathDisplay::new(path));
|
||||
info(format!("Compressing '{}'", EscapedPathDisplay::new(path)));
|
||||
}
|
||||
|
||||
let metadata = match path.metadata() {
|
||||
Ok(metadata) => metadata,
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound && utils::is_symlink(path) {
|
||||
// This path is for a broken symlink
|
||||
// We just ignore it
|
||||
if e.kind() == std::io::ErrorKind::NotFound && path.is_symlink() {
|
||||
// This path is for a broken symlink, ignore it
|
||||
continue;
|
||||
}
|
||||
return Err(e.into());
|
||||
@ -207,10 +235,30 @@ where
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
let options = options.unix_permissions(metadata.permissions().mode());
|
||||
let mode = metadata.permissions().mode();
|
||||
|
||||
let entry_name = path.to_str().ok_or_else(|| {
|
||||
FinalError::with_title("Zip requires that all directories names are valid UTF-8")
|
||||
.detail(format!("File at '{path:?}' has a non-UTF-8 name"))
|
||||
})?;
|
||||
|
||||
if metadata.is_dir() {
|
||||
writer.add_directory(path.to_str().unwrap().to_owned(), options)?;
|
||||
writer.add_directory(entry_name, options)?;
|
||||
} else if path.is_symlink() && !follow_symlinks {
|
||||
let target_path = path.read_link()?;
|
||||
let target_name = target_path.to_str().ok_or_else(|| {
|
||||
FinalError::with_title("Zip requires that all directories names are valid UTF-8")
|
||||
.detail(format!("File at '{target_path:?}' has a non-UTF-8 name"))
|
||||
})?;
|
||||
|
||||
// This approach writes the symlink target path as the content of the symlink entry.
|
||||
// We detect symlinks during extraction by checking for the Unix symlink mode (0o120000) in the entry's permissions.
|
||||
#[cfg(unix)]
|
||||
let symlink_options = options.unix_permissions(0o120000 | (mode & 0o777));
|
||||
#[cfg(windows)]
|
||||
let symlink_options = options.unix_permissions(0o120777);
|
||||
|
||||
writer.add_symlink(entry_name, target_name, symlink_options)?;
|
||||
} else {
|
||||
#[cfg(not(unix))]
|
||||
let options = if is_executable::is_executable(path) {
|
||||
@ -220,10 +268,13 @@ where
|
||||
};
|
||||
|
||||
let mut file = fs::File::open(path)?;
|
||||
writer.start_file(
|
||||
path.to_str().unwrap(),
|
||||
options.last_modified_time(get_last_modified_time(&file)),
|
||||
)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
let options = options.unix_permissions(mode);
|
||||
// Updated last modified time
|
||||
let last_modified_time = options.last_modified_time(get_last_modified_time(&file));
|
||||
|
||||
writer.start_file(entry_name, last_modified_time)?;
|
||||
io::copy(&mut file, &mut writer)?;
|
||||
}
|
||||
}
|
||||
@ -248,7 +299,7 @@ fn display_zip_comment_if_exists(file: &ZipFile) {
|
||||
// the future, maybe asking the user if he wants to display the comment
|
||||
// (informing him of its size) would be sensible for both normal and
|
||||
// accessibility mode..
|
||||
info!(accessible, "Found comment in {}: {}", file.name(), comment);
|
||||
info_accessible(format!("Found comment in {}: {}", file.name(), comment));
|
||||
}
|
||||
}
|
||||
|
||||
|
81
src/check.rs
81
src/check.rs
@ -11,9 +11,11 @@ use std::{
|
||||
use crate::{
|
||||
error::FinalError,
|
||||
extension::{build_archive_file_suggestion, Extension},
|
||||
info,
|
||||
utils::{pretty_format_list_of_paths, try_infer_extension, user_wants_to_continue, EscapedPathDisplay},
|
||||
warning, QuestionAction, QuestionPolicy, Result,
|
||||
utils::{
|
||||
logger::{info_accessible, warning},
|
||||
pretty_format_list_of_paths, try_infer_extension, user_wants_to_continue, EscapedPathDisplay,
|
||||
},
|
||||
QuestionAction, QuestionPolicy, Result,
|
||||
};
|
||||
|
||||
/// Check if the mime type matches the detected extensions.
|
||||
@ -33,12 +35,11 @@ pub fn check_mime_type(
|
||||
if let Some(detected_format) = try_infer_extension(path) {
|
||||
// Inferring the file extension can have unpredicted consequences (e.g. the user just
|
||||
// mistyped, ...) which we should always inform the user about.
|
||||
info!(
|
||||
accessible,
|
||||
"Detected file: `{}` extension as `{}`",
|
||||
warning(format!(
|
||||
"We detected a file named `{}`, do you want to decompress it?",
|
||||
path.display(),
|
||||
detected_format
|
||||
);
|
||||
));
|
||||
|
||||
if user_wants_to_continue(path, question_policy, QuestionAction::Decompression)? {
|
||||
formats.push(detected_format);
|
||||
} else {
|
||||
@ -54,11 +55,11 @@ pub fn check_mime_type(
|
||||
.compression_formats
|
||||
.ends_with(detected_format.compression_formats)
|
||||
{
|
||||
warning!(
|
||||
warning(format!(
|
||||
"The file extension: `{}` differ from the detected extension: `{}`",
|
||||
outer_ext,
|
||||
detected_format
|
||||
);
|
||||
outer_ext, detected_format
|
||||
));
|
||||
|
||||
if !user_wants_to_continue(path, question_policy, QuestionAction::Decompression)? {
|
||||
return Ok(ControlFlow::Break(()));
|
||||
}
|
||||
@ -66,7 +67,10 @@ pub fn check_mime_type(
|
||||
} else {
|
||||
// NOTE: If this actually produces no false positives, we can upgrade it in the future
|
||||
// to a warning and ask the user if he wants to continue decompressing.
|
||||
info!(accessible, "Could not detect the extension of `{}`", path.display());
|
||||
info_accessible(format!(
|
||||
"Failed to confirm the format of `{}` by sniffing the contents, file might be misnamed",
|
||||
path.display()
|
||||
));
|
||||
}
|
||||
Ok(ControlFlow::Continue(()))
|
||||
}
|
||||
@ -123,32 +127,53 @@ pub fn check_archive_formats_position(formats: &[Extension], output_path: &Path)
|
||||
|
||||
/// Check if all provided files have formats to decompress.
|
||||
pub fn check_missing_formats_when_decompressing(files: &[PathBuf], formats: &[Vec<Extension>]) -> Result<()> {
|
||||
let files_missing_format: Vec<PathBuf> = files
|
||||
let files_with_broken_extension: Vec<&PathBuf> = files
|
||||
.iter()
|
||||
.zip(formats)
|
||||
.filter(|(_, format)| format.is_empty())
|
||||
.map(|(input_path, _)| PathBuf::from(input_path))
|
||||
.map(|(input_path, _)| input_path)
|
||||
.collect();
|
||||
|
||||
if let Some(path) = files_missing_format.first() {
|
||||
let error = FinalError::with_title("Cannot decompress files without extensions")
|
||||
.detail(format!(
|
||||
"Files without supported extensions: {}",
|
||||
pretty_format_list_of_paths(&files_missing_format)
|
||||
))
|
||||
.detail("Decompression formats are detected automatically by the file extension")
|
||||
.hint("Provide a file with a supported extension:")
|
||||
.hint(" ouch decompress example.tar.gz")
|
||||
if files_with_broken_extension.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (files_with_unsupported_extensions, files_missing_extension): (Vec<&PathBuf>, Vec<&PathBuf>) =
|
||||
files_with_broken_extension
|
||||
.iter()
|
||||
.partition(|path| path.extension().is_some());
|
||||
|
||||
let mut error = FinalError::with_title("Cannot decompress files");
|
||||
|
||||
if !files_with_unsupported_extensions.is_empty() {
|
||||
error = error.detail(format!(
|
||||
"Files with unsupported extensions: {}",
|
||||
pretty_format_list_of_paths(&files_with_unsupported_extensions)
|
||||
));
|
||||
}
|
||||
|
||||
if !files_missing_extension.is_empty() {
|
||||
error = error.detail(format!(
|
||||
"Files with missing extensions: {}",
|
||||
pretty_format_list_of_paths(&files_missing_extension)
|
||||
));
|
||||
}
|
||||
|
||||
error = error.detail("Decompression formats are detected automatically from file extension");
|
||||
error = error.hint_all_supported_formats();
|
||||
|
||||
// If there's exactly one file, give a suggestion to use `--format`
|
||||
if let &[path] = files_with_broken_extension.as_slice() {
|
||||
error = error
|
||||
.hint("")
|
||||
.hint("Or overwrite this option with the '--format' flag:")
|
||||
.hint("Alternatively, you can pass an extension to the '--format' flag:")
|
||||
.hint(format!(
|
||||
" ouch decompress {} --format tar.gz",
|
||||
EscapedPathDisplay::new(path),
|
||||
));
|
||||
|
||||
return Err(error.into());
|
||||
}
|
||||
Ok(())
|
||||
|
||||
Err(error.into())
|
||||
}
|
||||
|
||||
/// Check if there is a first format when compressing, and returns it.
|
||||
|
212
src/cli/args.rs
212
src/cli/args.rs
@ -5,19 +5,19 @@ use clap::{Parser, ValueHint};
|
||||
// Ouch command line options (docstrings below are part of --help)
|
||||
/// A command-line utility for easily compressing and decompressing files and directories.
|
||||
///
|
||||
/// Supported formats: tar, zip, gz, xz/lzma, bz/bz2, lz4, sz, zst.
|
||||
/// Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, bz3, lz4, sz (Snappy), zst, rar and br.
|
||||
///
|
||||
/// Repository: https://github.com/ouch-org/ouch
|
||||
#[derive(Parser, Debug)]
|
||||
#[derive(Parser, Debug, PartialEq)]
|
||||
#[command(about, version)]
|
||||
// Disable rustdoc::bare_urls because rustdoc parses URLs differently than Clap
|
||||
#[allow(rustdoc::bare_urls)]
|
||||
pub struct CliArgs {
|
||||
/// Skip [Y/n] questions positively
|
||||
/// Skip [Y/n] questions, default to yes
|
||||
#[arg(short, long, conflicts_with = "no", global = true)]
|
||||
pub yes: bool,
|
||||
|
||||
/// Skip [Y/n] questions negatively
|
||||
/// Skip [Y/n] questions, default to no
|
||||
#[arg(short, long, global = true)]
|
||||
pub no: bool,
|
||||
|
||||
@ -25,15 +25,15 @@ pub struct CliArgs {
|
||||
#[arg(short = 'A', long, env = "ACCESSIBLE", global = true)]
|
||||
pub accessible: bool,
|
||||
|
||||
/// Ignores hidden files
|
||||
/// Ignore hidden files
|
||||
#[arg(short = 'H', long, global = true)]
|
||||
pub hidden: bool,
|
||||
|
||||
/// Silences output
|
||||
/// Silence output
|
||||
#[arg(short = 'q', long, global = true)]
|
||||
pub quiet: bool,
|
||||
|
||||
/// Ignores files matched by git's ignore files
|
||||
/// Ignore files matched by git's ignore files
|
||||
#[arg(short = 'g', long, global = true)]
|
||||
pub gitignore: bool,
|
||||
|
||||
@ -41,7 +41,15 @@ pub struct CliArgs {
|
||||
#[arg(short, long, global = true)]
|
||||
pub format: Option<OsString>,
|
||||
|
||||
/// Ouch and claps subcommands
|
||||
/// Decompress or list with password
|
||||
#[arg(short = 'p', long = "password", global = true)]
|
||||
pub password: Option<OsString>,
|
||||
|
||||
/// Concurrent working threads
|
||||
#[arg(short = 'c', long, global = true)]
|
||||
pub threads: Option<usize>,
|
||||
|
||||
// Ouch and claps subcommands
|
||||
#[command(subcommand)]
|
||||
pub cmd: Subcommand,
|
||||
}
|
||||
@ -53,7 +61,7 @@ pub enum Subcommand {
|
||||
#[command(visible_alias = "c")]
|
||||
Compress {
|
||||
/// Files to be compressed
|
||||
#[arg(required = true, num_args = 1..)]
|
||||
#[arg(required = true, value_hint = ValueHint::FilePath)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// The resulting file. Its extensions can be used to specify the compression formats
|
||||
@ -73,23 +81,35 @@ pub enum Subcommand {
|
||||
/// conflicts with --level and --fast
|
||||
#[arg(long, group = "compression-level")]
|
||||
slow: bool,
|
||||
|
||||
/// Archive target files instead of storing symlinks (supported by `tar` and `zip`)
|
||||
#[arg(long, short = 'S')]
|
||||
follow_symlinks: bool,
|
||||
},
|
||||
/// Decompresses one or more files, optionally into another folder
|
||||
#[command(visible_alias = "d")]
|
||||
Decompress {
|
||||
/// Files to be decompressed
|
||||
#[arg(required = true, num_args = 1..)]
|
||||
/// Files to be decompressed, or "-" for stdin
|
||||
#[arg(required = true, num_args = 1.., value_hint = ValueHint::FilePath)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// Place results in a directory other than the current one
|
||||
#[arg(short = 'd', long = "dir", value_hint = ValueHint::DirPath)]
|
||||
#[arg(short = 'd', long = "dir", value_hint = ValueHint::FilePath)]
|
||||
output_dir: Option<PathBuf>,
|
||||
|
||||
/// Remove the source file after successful decompression
|
||||
#[arg(short = 'r', long)]
|
||||
remove: bool,
|
||||
|
||||
/// Disable Smart Unpack
|
||||
#[arg(long)]
|
||||
no_smart_unpack: bool,
|
||||
},
|
||||
/// List contents of an archive
|
||||
#[command(visible_aliases = ["l", "ls"])]
|
||||
List {
|
||||
/// Archives whose contents should be listed
|
||||
#[arg(required = true, num_args = 1..)]
|
||||
#[arg(required = true, num_args = 1.., value_hint = ValueHint::FilePath)]
|
||||
archives: Vec<PathBuf>,
|
||||
|
||||
/// Show archive contents as a tree
|
||||
@ -97,3 +117,169 @@ pub enum Subcommand {
|
||||
tree: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn args_splitter(input: &str) -> impl Iterator<Item = &str> {
|
||||
input.split_whitespace()
|
||||
}
|
||||
|
||||
fn to_paths(iter: impl IntoIterator<Item = &'static str>) -> Vec<PathBuf> {
|
||||
iter.into_iter().map(PathBuf::from).collect()
|
||||
}
|
||||
|
||||
macro_rules! test {
|
||||
($args:expr, $expected:expr) => {
|
||||
let result = match CliArgs::try_parse_from(args_splitter($args)) {
|
||||
Ok(result) => result,
|
||||
Err(err) => panic!(
|
||||
"CLI result is Err, expected Ok, input: '{}'.\nResult: '{err}'",
|
||||
$args
|
||||
),
|
||||
};
|
||||
assert_eq!(result, $expected, "CLI result mismatched, input: '{}'.", $args);
|
||||
};
|
||||
}
|
||||
|
||||
fn mock_cli_args() -> CliArgs {
|
||||
CliArgs {
|
||||
yes: false,
|
||||
no: false,
|
||||
accessible: false,
|
||||
hidden: false,
|
||||
quiet: false,
|
||||
gitignore: false,
|
||||
format: None,
|
||||
// This is usually replaced in assertion tests
|
||||
password: None,
|
||||
threads: None,
|
||||
cmd: Subcommand::Decompress {
|
||||
// Put a crazy value here so no test can assert it unintentionally
|
||||
files: vec!["\x00\x11\x22".into()],
|
||||
output_dir: None,
|
||||
remove: false,
|
||||
no_smart_unpack: false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clap_cli_ok() {
|
||||
test!(
|
||||
"ouch decompress file.tar.gz",
|
||||
CliArgs {
|
||||
cmd: Subcommand::Decompress {
|
||||
files: to_paths(["file.tar.gz"]),
|
||||
output_dir: None,
|
||||
remove: false,
|
||||
no_smart_unpack: false,
|
||||
},
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
test!(
|
||||
"ouch d file.tar.gz",
|
||||
CliArgs {
|
||||
cmd: Subcommand::Decompress {
|
||||
files: to_paths(["file.tar.gz"]),
|
||||
output_dir: None,
|
||||
remove: false,
|
||||
no_smart_unpack: false,
|
||||
},
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
test!(
|
||||
"ouch d a b c",
|
||||
CliArgs {
|
||||
cmd: Subcommand::Decompress {
|
||||
files: to_paths(["a", "b", "c"]),
|
||||
output_dir: None,
|
||||
remove: false,
|
||||
no_smart_unpack: false,
|
||||
},
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
|
||||
test!(
|
||||
"ouch compress file file.tar.gz",
|
||||
CliArgs {
|
||||
cmd: Subcommand::Compress {
|
||||
files: to_paths(["file"]),
|
||||
output: PathBuf::from("file.tar.gz"),
|
||||
level: None,
|
||||
fast: false,
|
||||
slow: false,
|
||||
follow_symlinks: false,
|
||||
},
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
test!(
|
||||
"ouch compress a b c archive.tar.gz",
|
||||
CliArgs {
|
||||
cmd: Subcommand::Compress {
|
||||
files: to_paths(["a", "b", "c"]),
|
||||
output: PathBuf::from("archive.tar.gz"),
|
||||
level: None,
|
||||
fast: false,
|
||||
slow: false,
|
||||
follow_symlinks: false,
|
||||
},
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
test!(
|
||||
"ouch compress a b c archive.tar.gz",
|
||||
CliArgs {
|
||||
cmd: Subcommand::Compress {
|
||||
files: to_paths(["a", "b", "c"]),
|
||||
output: PathBuf::from("archive.tar.gz"),
|
||||
level: None,
|
||||
fast: false,
|
||||
slow: false,
|
||||
follow_symlinks: false,
|
||||
},
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
|
||||
let inputs = [
|
||||
"ouch compress a b c output --format tar.gz",
|
||||
// https://github.com/clap-rs/clap/issues/5115
|
||||
// "ouch compress a b c --format tar.gz output",
|
||||
// "ouch compress a b --format tar.gz c output",
|
||||
// "ouch compress a --format tar.gz b c output",
|
||||
"ouch compress --format tar.gz a b c output",
|
||||
"ouch --format tar.gz compress a b c output",
|
||||
];
|
||||
for input in inputs {
|
||||
test!(
|
||||
input,
|
||||
CliArgs {
|
||||
cmd: Subcommand::Compress {
|
||||
files: to_paths(["a", "b", "c"]),
|
||||
output: PathBuf::from("output"),
|
||||
level: None,
|
||||
fast: false,
|
||||
slow: false,
|
||||
follow_symlinks: false,
|
||||
},
|
||||
format: Some("tar.gz".into()),
|
||||
..mock_cli_args()
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clap_cli_err() {
|
||||
assert!(CliArgs::try_parse_from(args_splitter("ouch c")).is_err());
|
||||
assert!(CliArgs::try_parse_from(args_splitter("ouch c input")).is_err());
|
||||
assert!(CliArgs::try_parse_from(args_splitter("ouch d")).is_err());
|
||||
assert!(CliArgs::try_parse_from(args_splitter("ouch l")).is_err());
|
||||
}
|
||||
}
|
||||
|
@ -5,14 +5,17 @@ mod args;
|
||||
use std::{
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
vec::Vec,
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use fs_err as fs;
|
||||
|
||||
pub use self::args::{CliArgs, Subcommand};
|
||||
use crate::{accessible::set_accessible, utils::FileVisibilityPolicy, QuestionPolicy};
|
||||
use crate::{
|
||||
accessible::set_accessible,
|
||||
utils::{is_path_stdin, FileVisibilityPolicy},
|
||||
QuestionPolicy,
|
||||
};
|
||||
|
||||
impl CliArgs {
|
||||
/// A helper method that calls `clap::Parser::parse`.
|
||||
@ -20,7 +23,7 @@ impl CliArgs {
|
||||
/// And:
|
||||
/// 1. Make paths absolute.
|
||||
/// 2. Checks the QuestionPolicy.
|
||||
pub fn parse_args() -> crate::Result<(Self, QuestionPolicy, FileVisibilityPolicy)> {
|
||||
pub fn parse_and_validate_args() -> crate::Result<(Self, QuestionPolicy, FileVisibilityPolicy)> {
|
||||
let mut args = Self::parse();
|
||||
|
||||
set_accessible(args.accessible);
|
||||
@ -48,5 +51,14 @@ impl CliArgs {
|
||||
}
|
||||
|
||||
fn canonicalize_files(files: &[impl AsRef<Path>]) -> io::Result<Vec<PathBuf>> {
|
||||
files.iter().map(fs::canonicalize).collect()
|
||||
files
|
||||
.iter()
|
||||
.map(|f| {
|
||||
if is_path_stdin(f.as_ref()) || f.as_ref().is_symlink() {
|
||||
Ok(f.as_ref().to_path_buf())
|
||||
} else {
|
||||
fs::canonicalize(f)
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -5,11 +5,12 @@ use std::{
|
||||
|
||||
use fs_err as fs;
|
||||
|
||||
use super::warn_user_about_loading_sevenz_in_memory;
|
||||
use crate::{
|
||||
archive,
|
||||
commands::warn_user_about_loading_zip_in_memory,
|
||||
extension::{split_first_compression_format, CompressionFormat::*, Extension},
|
||||
utils::{user_wants_to_continue, FileVisibilityPolicy},
|
||||
utils::{io::lock_and_flush_output_stdio, user_wants_to_continue, FileVisibilityPolicy},
|
||||
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
|
||||
};
|
||||
|
||||
@ -30,6 +31,7 @@ pub fn compress_files(
|
||||
output_file: fs::File,
|
||||
output_path: &Path,
|
||||
quiet: bool,
|
||||
follow_symlinks: bool,
|
||||
question_policy: QuestionPolicy,
|
||||
file_visibility_policy: FileVisibilityPolicy,
|
||||
level: Option<i16>,
|
||||
@ -55,12 +57,17 @@ pub fn compress_files(
|
||||
encoder,
|
||||
level.map_or_else(Default::default, |l| bzip2::Compression::new((l as u32).clamp(1, 9))),
|
||||
)),
|
||||
Lz4 => Box::new(lzzzz::lz4f::WriteCompressor::new(
|
||||
encoder,
|
||||
lzzzz::lz4f::PreferencesBuilder::new()
|
||||
.compression_level(level.map_or(1, |l| (l as i32).clamp(1, lzzzz::lz4f::CLEVEL_MAX)))
|
||||
.build(),
|
||||
)?),
|
||||
Bzip3 => {
|
||||
#[cfg(not(feature = "bzip3"))]
|
||||
return Err(archive::bzip3_stub::no_support());
|
||||
|
||||
#[cfg(feature = "bzip3")]
|
||||
Box::new(
|
||||
// Use block size of 16 MiB
|
||||
bzip3::write::Bz3Encoder::new(encoder, 16 * 2_usize.pow(20))?,
|
||||
)
|
||||
}
|
||||
Lz4 => Box::new(lz4_flex::frame::FrameEncoder::new(encoder).auto_finish()),
|
||||
Lzma => Box::new(xz2::write::XzEncoder::new(
|
||||
encoder,
|
||||
level.map_or(6, |l| (l as u32).clamp(0, 9)),
|
||||
@ -73,18 +80,23 @@ pub fn compress_files(
|
||||
.from_writer(encoder),
|
||||
),
|
||||
Zstd => {
|
||||
let zstd_encoder = zstd::stream::write::Encoder::new(
|
||||
let mut zstd_encoder = zstd::stream::write::Encoder::new(
|
||||
encoder,
|
||||
level.map_or(zstd::DEFAULT_COMPRESSION_LEVEL, |l| {
|
||||
(l as i32).clamp(zstd::zstd_safe::min_c_level(), zstd::zstd_safe::max_c_level())
|
||||
}),
|
||||
);
|
||||
// Safety:
|
||||
// Encoder::new() can only fail if `level` is invalid, but the level
|
||||
// is `clamp`ed and therefore guaranteed to be valid
|
||||
Box::new(zstd_encoder.unwrap().auto_finish())
|
||||
)?;
|
||||
// Use all available PHYSICAL cores for compression
|
||||
zstd_encoder.multithread(num_cpus::get_physical() as u32)?;
|
||||
Box::new(zstd_encoder.auto_finish())
|
||||
}
|
||||
Tar | Zip => unreachable!(),
|
||||
Brotli => {
|
||||
let default_level = 11; // Same as brotli CLI, default to highest compression
|
||||
let level = level.unwrap_or(default_level).clamp(0, 11) as u32;
|
||||
let win_size = 22; // default to 2^22 = 4 MiB window size
|
||||
Box::new(brotli::CompressorWriter::new(encoder, BUFFER_CAPACITY, level, win_size))
|
||||
}
|
||||
Tar | Zip | Rar | SevenZip => unreachable!(),
|
||||
};
|
||||
Ok(encoder)
|
||||
};
|
||||
@ -96,20 +108,30 @@ pub fn compress_files(
|
||||
}
|
||||
|
||||
match first_format {
|
||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
||||
writer = chain_writer_encoder(&first_format, writer)?;
|
||||
let mut reader = fs::File::open(&files[0]).unwrap();
|
||||
let mut reader = fs::File::open(&files[0])?;
|
||||
|
||||
io::copy(&mut reader, &mut writer)?;
|
||||
}
|
||||
Tar => {
|
||||
archive::tar::build_archive_from_paths(&files, output_path, &mut writer, file_visibility_policy, quiet)?;
|
||||
archive::tar::build_archive_from_paths(
|
||||
&files,
|
||||
output_path,
|
||||
&mut writer,
|
||||
file_visibility_policy,
|
||||
quiet,
|
||||
follow_symlinks,
|
||||
)?;
|
||||
writer.flush()?;
|
||||
}
|
||||
Zip => {
|
||||
if !formats.is_empty() {
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
// Locking necessary to guarantee that warning and question
|
||||
// messages stay adjacent
|
||||
let _locks = lock_and_flush_output_stdio();
|
||||
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
if !user_wants_to_continue(output_path, question_policy, QuestionAction::Compression)? {
|
||||
return Ok(false);
|
||||
}
|
||||
@ -123,10 +145,35 @@ pub fn compress_files(
|
||||
&mut vec_buffer,
|
||||
file_visibility_policy,
|
||||
quiet,
|
||||
follow_symlinks,
|
||||
)?;
|
||||
vec_buffer.rewind()?;
|
||||
io::copy(&mut vec_buffer, &mut writer)?;
|
||||
}
|
||||
Rar => {
|
||||
#[cfg(feature = "unrar")]
|
||||
return Err(archive::rar::no_compression());
|
||||
|
||||
#[cfg(not(feature = "unrar"))]
|
||||
return Err(archive::rar_stub::no_support());
|
||||
}
|
||||
SevenZip => {
|
||||
if !formats.is_empty() {
|
||||
// Locking necessary to guarantee that warning and question
|
||||
// messages stay adjacent
|
||||
let _locks = lock_and_flush_output_stdio();
|
||||
|
||||
warn_user_about_loading_sevenz_in_memory();
|
||||
if !user_wants_to_continue(output_path, question_policy, QuestionAction::Compression)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
let mut vec_buffer = Cursor::new(vec![]);
|
||||
archive::sevenz::compress_sevenz(&files, output_path, &mut vec_buffer, file_visibility_policy, quiet)?;
|
||||
vec_buffer.rewind()?;
|
||||
io::copy(&mut vec_buffer, &mut writer)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
|
@ -6,34 +6,50 @@ use std::{
|
||||
|
||||
use fs_err as fs;
|
||||
|
||||
#[cfg(not(feature = "bzip3"))]
|
||||
use crate::archive;
|
||||
use crate::{
|
||||
commands::warn_user_about_loading_zip_in_memory,
|
||||
commands::{warn_user_about_loading_sevenz_in_memory, warn_user_about_loading_zip_in_memory},
|
||||
extension::{
|
||||
split_first_compression_format,
|
||||
CompressionFormat::{self, *},
|
||||
Extension,
|
||||
},
|
||||
info,
|
||||
utils::{self, nice_directory_display, user_wants_to_continue},
|
||||
utils::{
|
||||
self,
|
||||
io::lock_and_flush_output_stdio,
|
||||
is_path_stdin,
|
||||
logger::{info, info_accessible},
|
||||
nice_directory_display, user_wants_to_continue,
|
||||
},
|
||||
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
|
||||
};
|
||||
|
||||
// Decompress a file
|
||||
//
|
||||
// File at input_file_path is opened for reading, example: "archive.tar.gz"
|
||||
// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
|
||||
// output_dir it's where the file will be decompressed to, this function assumes that the directory exists
|
||||
// output_file_path is only used when extracting single file formats, not archive formats like .tar or .zip
|
||||
pub fn decompress_file(
|
||||
input_file_path: &Path,
|
||||
formats: Vec<Extension>,
|
||||
output_dir: &Path,
|
||||
output_file_path: PathBuf,
|
||||
question_policy: QuestionPolicy,
|
||||
quiet: bool,
|
||||
) -> crate::Result<()> {
|
||||
assert!(output_dir.exists());
|
||||
let reader = fs::File::open(input_file_path)?;
|
||||
trait ReadSeek: Read + io::Seek {}
|
||||
impl<T: Read + io::Seek> ReadSeek for T {}
|
||||
|
||||
pub struct DecompressOptions<'a> {
|
||||
pub input_file_path: &'a Path,
|
||||
pub formats: Vec<Extension>,
|
||||
pub output_dir: &'a Path,
|
||||
pub output_file_path: PathBuf,
|
||||
pub is_output_dir_provided: bool,
|
||||
pub is_smart_unpack: bool,
|
||||
pub question_policy: QuestionPolicy,
|
||||
pub quiet: bool,
|
||||
pub password: Option<&'a [u8]>,
|
||||
pub remove: bool,
|
||||
}
|
||||
|
||||
/// Decompress a file
|
||||
///
|
||||
/// File at input_file_path is opened for reading, example: "archive.tar.gz"
|
||||
/// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
|
||||
/// output_dir it's where the file will be decompressed to, this function assumes that the directory exists
|
||||
/// output_file_path is only used when extracting single file formats, not archive formats like .tar or .zip
|
||||
pub fn decompress_file(options: DecompressOptions) -> crate::Result<()> {
|
||||
assert!(options.output_dir.exists());
|
||||
let input_is_stdin = is_path_stdin(options.input_file_path);
|
||||
|
||||
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
|
||||
// from decoder chaining.
|
||||
@ -45,14 +61,24 @@ pub fn decompress_file(
|
||||
if let [Extension {
|
||||
compression_formats: [Zip],
|
||||
..
|
||||
}] = formats.as_slice()
|
||||
}] = options.formats.as_slice()
|
||||
{
|
||||
let mut vec = vec![];
|
||||
let reader: Box<dyn ReadSeek> = if input_is_stdin {
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
io::copy(&mut io::stdin(), &mut vec)?;
|
||||
Box::new(io::Cursor::new(vec))
|
||||
} else {
|
||||
Box::new(fs::File::open(options.input_file_path)?)
|
||||
};
|
||||
let zip_archive = zip::ZipArchive::new(reader)?;
|
||||
let files_unpacked = if let ControlFlow::Continue(files) = smart_unpack(
|
||||
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, quiet),
|
||||
output_dir,
|
||||
&output_file_path,
|
||||
question_policy,
|
||||
let files_unpacked = if let ControlFlow::Continue(files) = execute_decompression(
|
||||
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, options.password, options.quiet),
|
||||
options.output_dir,
|
||||
&options.output_file_path,
|
||||
options.question_policy,
|
||||
options.is_output_dir_provided,
|
||||
options.is_smart_unpack,
|
||||
)? {
|
||||
files
|
||||
} else {
|
||||
@ -63,17 +89,29 @@ pub fn decompress_file(
|
||||
// having a final status message is important especially in an accessibility context
|
||||
// as screen readers may not read a commands exit code, making it hard to reason
|
||||
// about whether the command succeeded without such a message
|
||||
info!(
|
||||
accessible,
|
||||
"Successfully decompressed archive in {} ({} files).",
|
||||
nice_directory_display(output_dir),
|
||||
info_accessible(format!(
|
||||
"Successfully decompressed archive in {} ({} files)",
|
||||
nice_directory_display(options.output_dir),
|
||||
files_unpacked
|
||||
);
|
||||
));
|
||||
|
||||
if !input_is_stdin && options.remove {
|
||||
fs::remove_file(options.input_file_path)?;
|
||||
info(format!(
|
||||
"Removed input file {}",
|
||||
nice_directory_display(options.input_file_path)
|
||||
));
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Will be used in decoder chaining
|
||||
let reader: Box<dyn Read> = if input_is_stdin {
|
||||
Box::new(io::stdin())
|
||||
} else {
|
||||
Box::new(fs::File::open(options.input_file_path)?)
|
||||
};
|
||||
let reader = BufReader::with_capacity(BUFFER_CAPACITY, reader);
|
||||
let mut reader: Box<dyn Read> = Box::new(reader);
|
||||
|
||||
@ -82,26 +120,38 @@ pub fn decompress_file(
|
||||
let decoder: Box<dyn Read> = match format {
|
||||
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
|
||||
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
|
||||
Lz4 => Box::new(lzzzz::lz4f::ReadDecompressor::new(decoder)?),
|
||||
Bzip3 => {
|
||||
#[cfg(not(feature = "bzip3"))]
|
||||
return Err(archive::bzip3_stub::no_support());
|
||||
|
||||
#[cfg(feature = "bzip3")]
|
||||
Box::new(bzip3::read::Bz3Decoder::new(decoder)?)
|
||||
}
|
||||
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
|
||||
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
|
||||
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
|
||||
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
|
||||
Tar | Zip => unreachable!(),
|
||||
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
|
||||
Tar | Zip | Rar | SevenZip => decoder,
|
||||
};
|
||||
Ok(decoder)
|
||||
};
|
||||
|
||||
let (first_extension, extensions) = split_first_compression_format(&formats);
|
||||
let (first_extension, extensions) = split_first_compression_format(&options.formats);
|
||||
|
||||
for format in extensions.iter().rev() {
|
||||
reader = chain_reader_decoder(format, reader)?;
|
||||
}
|
||||
|
||||
let files_unpacked = match first_extension {
|
||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
||||
reader = chain_reader_decoder(&first_extension, reader)?;
|
||||
|
||||
let mut writer = match utils::ask_to_create_file(&output_file_path, question_policy)? {
|
||||
let mut writer = match utils::ask_to_create_file(
|
||||
&options.output_file_path,
|
||||
options.question_policy,
|
||||
QuestionAction::Decompression,
|
||||
)? {
|
||||
Some(file) => file,
|
||||
None => return Ok(()),
|
||||
};
|
||||
@ -111,11 +161,13 @@ pub fn decompress_file(
|
||||
1
|
||||
}
|
||||
Tar => {
|
||||
if let ControlFlow::Continue(files) = smart_unpack(
|
||||
|output_dir| crate::archive::tar::unpack_archive(reader, output_dir, quiet),
|
||||
output_dir,
|
||||
&output_file_path,
|
||||
question_policy,
|
||||
if let ControlFlow::Continue(files) = execute_decompression(
|
||||
|output_dir| crate::archive::tar::unpack_archive(reader, output_dir, options.quiet),
|
||||
options.output_dir,
|
||||
&options.output_file_path,
|
||||
options.question_policy,
|
||||
options.is_output_dir_provided,
|
||||
options.is_smart_unpack,
|
||||
)? {
|
||||
files
|
||||
} else {
|
||||
@ -123,10 +175,17 @@ pub fn decompress_file(
|
||||
}
|
||||
}
|
||||
Zip => {
|
||||
if formats.len() > 1 {
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
if options.formats.len() > 1 {
|
||||
// Locking necessary to guarantee that warning and question
|
||||
// messages stay adjacent
|
||||
let _locks = lock_and_flush_output_stdio();
|
||||
|
||||
if !user_wants_to_continue(input_file_path, question_policy, QuestionAction::Decompression)? {
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
if !user_wants_to_continue(
|
||||
options.input_file_path,
|
||||
options.question_policy,
|
||||
QuestionAction::Decompression,
|
||||
)? {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
@ -135,11 +194,91 @@ pub fn decompress_file(
|
||||
io::copy(&mut reader, &mut vec)?;
|
||||
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
|
||||
|
||||
if let ControlFlow::Continue(files) = smart_unpack(
|
||||
|output_dir| crate::archive::zip::unpack_archive(zip_archive, output_dir, quiet),
|
||||
output_dir,
|
||||
&output_file_path,
|
||||
question_policy,
|
||||
if let ControlFlow::Continue(files) = execute_decompression(
|
||||
|output_dir| {
|
||||
crate::archive::zip::unpack_archive(zip_archive, output_dir, options.password, options.quiet)
|
||||
},
|
||||
options.output_dir,
|
||||
&options.output_file_path,
|
||||
options.question_policy,
|
||||
options.is_output_dir_provided,
|
||||
options.is_smart_unpack,
|
||||
)? {
|
||||
files
|
||||
} else {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "unrar")]
|
||||
Rar => {
|
||||
type UnpackResult = crate::Result<usize>;
|
||||
let unpack_fn: Box<dyn FnOnce(&Path) -> UnpackResult> = if options.formats.len() > 1 || input_is_stdin {
|
||||
let mut temp_file = tempfile::NamedTempFile::new()?;
|
||||
io::copy(&mut reader, &mut temp_file)?;
|
||||
Box::new(move |output_dir| {
|
||||
crate::archive::rar::unpack_archive(temp_file.path(), output_dir, options.password, options.quiet)
|
||||
})
|
||||
} else {
|
||||
Box::new(|output_dir| {
|
||||
crate::archive::rar::unpack_archive(
|
||||
options.input_file_path,
|
||||
output_dir,
|
||||
options.password,
|
||||
options.quiet,
|
||||
)
|
||||
})
|
||||
};
|
||||
|
||||
if let ControlFlow::Continue(files) = execute_decompression(
|
||||
unpack_fn,
|
||||
options.output_dir,
|
||||
&options.output_file_path,
|
||||
options.question_policy,
|
||||
options.is_output_dir_provided,
|
||||
options.is_smart_unpack,
|
||||
)? {
|
||||
files
|
||||
} else {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "unrar"))]
|
||||
Rar => {
|
||||
return Err(crate::archive::rar_stub::no_support());
|
||||
}
|
||||
SevenZip => {
|
||||
if options.formats.len() > 1 {
|
||||
// Locking necessary to guarantee that warning and question
|
||||
// messages stay adjacent
|
||||
let _locks = lock_and_flush_output_stdio();
|
||||
|
||||
warn_user_about_loading_sevenz_in_memory();
|
||||
if !user_wants_to_continue(
|
||||
options.input_file_path,
|
||||
options.question_policy,
|
||||
QuestionAction::Decompression,
|
||||
)? {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let mut vec = vec![];
|
||||
io::copy(&mut reader, &mut vec)?;
|
||||
|
||||
if let ControlFlow::Continue(files) = execute_decompression(
|
||||
|output_dir| {
|
||||
crate::archive::sevenz::decompress_sevenz(
|
||||
io::Cursor::new(vec),
|
||||
output_dir,
|
||||
options.password,
|
||||
options.quiet,
|
||||
)
|
||||
},
|
||||
options.output_dir,
|
||||
&options.output_file_path,
|
||||
options.question_policy,
|
||||
options.is_output_dir_provided,
|
||||
options.is_smart_unpack,
|
||||
)? {
|
||||
files
|
||||
} else {
|
||||
@ -152,20 +291,78 @@ pub fn decompress_file(
|
||||
// having a final status message is important especially in an accessibility context
|
||||
// as screen readers may not read a commands exit code, making it hard to reason
|
||||
// about whether the command succeeded without such a message
|
||||
info!(
|
||||
accessible,
|
||||
"Successfully decompressed archive in {}.",
|
||||
nice_directory_display(output_dir)
|
||||
);
|
||||
info!(accessible, "Files unpacked: {}", files_unpacked);
|
||||
info_accessible(format!(
|
||||
"Successfully decompressed archive in {}",
|
||||
nice_directory_display(options.output_dir)
|
||||
));
|
||||
info_accessible(format!("Files unpacked: {}", files_unpacked));
|
||||
|
||||
if !input_is_stdin && options.remove {
|
||||
fs::remove_file(options.input_file_path)?;
|
||||
info(format!(
|
||||
"Removed input file {}",
|
||||
nice_directory_display(options.input_file_path)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_decompression(
|
||||
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
|
||||
output_dir: &Path,
|
||||
output_file_path: &Path,
|
||||
question_policy: QuestionPolicy,
|
||||
is_output_dir_provided: bool,
|
||||
is_smart_unpack: bool,
|
||||
) -> crate::Result<ControlFlow<(), usize>> {
|
||||
if is_smart_unpack {
|
||||
return smart_unpack(unpack_fn, output_dir, output_file_path, question_policy);
|
||||
}
|
||||
|
||||
let target_output_dir = if is_output_dir_provided {
|
||||
output_dir
|
||||
} else {
|
||||
output_file_path
|
||||
};
|
||||
|
||||
unpack(unpack_fn, target_output_dir, question_policy)
|
||||
}
|
||||
|
||||
/// Unpacks an archive creating the output directory, this function will create the output_dir
|
||||
/// directory or replace it if it already exists. The `output_dir` needs to be empty
|
||||
/// - If `output_dir` does not exist OR is a empty directory, it will unpack there
|
||||
/// - If `output_dir` exist OR is a directory not empty, the user will be asked what to do
|
||||
fn unpack(
|
||||
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
|
||||
output_dir: &Path,
|
||||
question_policy: QuestionPolicy,
|
||||
) -> crate::Result<ControlFlow<(), usize>> {
|
||||
let is_valid_output_dir = !output_dir.exists() || (output_dir.is_dir() && output_dir.read_dir()?.next().is_none());
|
||||
|
||||
let output_dir_cleaned = if is_valid_output_dir {
|
||||
output_dir.to_owned()
|
||||
} else {
|
||||
match utils::resolve_path_conflict(output_dir, question_policy, QuestionAction::Decompression)? {
|
||||
Some(path) => path,
|
||||
None => return Ok(ControlFlow::Break(())),
|
||||
}
|
||||
};
|
||||
|
||||
if !output_dir_cleaned.exists() {
|
||||
fs::create_dir(&output_dir_cleaned)?;
|
||||
}
|
||||
|
||||
let files = unpack_fn(&output_dir_cleaned)?;
|
||||
|
||||
Ok(ControlFlow::Continue(files))
|
||||
}
|
||||
|
||||
/// Unpacks an archive with some heuristics
|
||||
/// - If the archive contains only one file, it will be extracted to the `output_dir`
|
||||
/// - If the archive contains multiple files, it will be extracted to a subdirectory of the
|
||||
/// output_dir named after the archive (given by `output_file_path`)
|
||||
///
|
||||
/// Note: This functions assumes that `output_dir` exists
|
||||
fn smart_unpack(
|
||||
unpack_fn: impl FnOnce(&Path) -> crate::Result<usize>,
|
||||
@ -174,18 +371,19 @@ fn smart_unpack(
|
||||
question_policy: QuestionPolicy,
|
||||
) -> crate::Result<ControlFlow<(), usize>> {
|
||||
assert!(output_dir.exists());
|
||||
let temp_dir = tempfile::tempdir_in(output_dir)?;
|
||||
let temp_dir = tempfile::Builder::new().prefix("tmp-ouch-").tempdir_in(output_dir)?;
|
||||
let temp_dir_path = temp_dir.path();
|
||||
info!(
|
||||
accessible,
|
||||
"Created temporary directory {} to hold decompressed elements.",
|
||||
|
||||
info_accessible(format!(
|
||||
"Created temporary directory {} to hold decompressed elements",
|
||||
nice_directory_display(temp_dir_path)
|
||||
);
|
||||
));
|
||||
|
||||
let files = unpack_fn(temp_dir_path)?;
|
||||
|
||||
let root_contains_only_one_element = fs::read_dir(temp_dir_path)?.count() == 1;
|
||||
if root_contains_only_one_element {
|
||||
let root_contains_only_one_element = fs::read_dir(temp_dir_path)?.take(2).count() == 1;
|
||||
|
||||
let (previous_path, mut new_path) = if root_contains_only_one_element {
|
||||
// Only one file in the root directory, so we can just move it to the output directory
|
||||
let file = fs::read_dir(temp_dir_path)?.next().expect("item exists")?;
|
||||
let file_path = file.path();
|
||||
@ -193,32 +391,26 @@ fn smart_unpack(
|
||||
.file_name()
|
||||
.expect("Should be safe because paths in archives should not end with '..'");
|
||||
let correct_path = output_dir.join(file_name);
|
||||
// Before moving, need to check if a file with the same name already exists
|
||||
if !utils::clear_path(&correct_path, question_policy)? {
|
||||
return Ok(ControlFlow::Break(()));
|
||||
}
|
||||
fs::rename(&file_path, &correct_path)?;
|
||||
info!(
|
||||
accessible,
|
||||
"Successfully moved {} to {}.",
|
||||
nice_directory_display(&file_path),
|
||||
nice_directory_display(&correct_path)
|
||||
);
|
||||
|
||||
(file_path, correct_path)
|
||||
} else {
|
||||
// Multiple files in the root directory, so:
|
||||
// Rename the temporary directory to the archive name, which is output_file_path
|
||||
// One case to handle tough is we need to check if a file with the same name already exists
|
||||
if !utils::clear_path(output_file_path, question_policy)? {
|
||||
return Ok(ControlFlow::Break(()));
|
||||
}
|
||||
fs::rename(temp_dir_path, output_file_path)?;
|
||||
info!(
|
||||
accessible,
|
||||
"Successfully moved {} to {}.",
|
||||
nice_directory_display(temp_dir_path),
|
||||
nice_directory_display(output_file_path)
|
||||
);
|
||||
}
|
||||
(temp_dir_path.to_owned(), output_file_path.to_owned())
|
||||
};
|
||||
|
||||
// Before moving, need to check if a file with the same name already exists
|
||||
// If it does, need to ask the user what to do
|
||||
new_path = match utils::resolve_path_conflict(&new_path, question_policy, QuestionAction::Decompression)? {
|
||||
Some(path) => path,
|
||||
None => return Ok(ControlFlow::Break(())),
|
||||
};
|
||||
|
||||
// Rename the temporary directory to the archive name, which is output_file_path
|
||||
fs::rename(&previous_path, &new_path)?;
|
||||
info_accessible(format!(
|
||||
"Successfully moved \"{}\" to \"{}\"",
|
||||
nice_directory_display(&previous_path),
|
||||
nice_directory_display(&new_path),
|
||||
));
|
||||
|
||||
Ok(ControlFlow::Continue(files))
|
||||
}
|
||||
|
@ -6,20 +6,22 @@ use std::{
|
||||
use fs_err as fs;
|
||||
|
||||
use crate::{
|
||||
archive,
|
||||
commands::warn_user_about_loading_zip_in_memory,
|
||||
extension::CompressionFormat::{self, *},
|
||||
list::{self, FileInArchive, ListOptions},
|
||||
utils::user_wants_to_continue,
|
||||
utils::{io::lock_and_flush_output_stdio, user_wants_to_continue},
|
||||
QuestionAction, QuestionPolicy, BUFFER_CAPACITY,
|
||||
};
|
||||
|
||||
// File at input_file_path is opened for reading, example: "archive.tar.gz"
|
||||
// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
|
||||
/// File at input_file_path is opened for reading, example: "archive.tar.gz"
|
||||
/// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
|
||||
pub fn list_archive_contents(
|
||||
archive_path: &Path,
|
||||
formats: Vec<CompressionFormat>,
|
||||
list_options: ListOptions,
|
||||
question_policy: QuestionPolicy,
|
||||
password: Option<&[u8]>,
|
||||
) -> crate::Result<()> {
|
||||
let reader = fs::File::open(archive_path)?;
|
||||
|
||||
@ -32,9 +34,8 @@ pub fn list_archive_contents(
|
||||
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
|
||||
if let &[Zip] = formats.as_slice() {
|
||||
let zip_archive = zip::ZipArchive::new(reader)?;
|
||||
let files = crate::archive::zip::list_archive(zip_archive);
|
||||
let files = crate::archive::zip::list_archive(zip_archive, password);
|
||||
list::list_files(archive_path, files, list_options)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@ -44,29 +45,46 @@ pub fn list_archive_contents(
|
||||
|
||||
// Grab previous decoder and wrap it inside of a new one
|
||||
let chain_reader_decoder =
|
||||
|format: &CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
|
||||
|format: CompressionFormat, decoder: Box<dyn Read + Send>| -> crate::Result<Box<dyn Read + Send>> {
|
||||
let decoder: Box<dyn Read + Send> = match format {
|
||||
Gzip => Box::new(flate2::read::GzDecoder::new(decoder)),
|
||||
Bzip => Box::new(bzip2::read::BzDecoder::new(decoder)),
|
||||
Lz4 => Box::new(lzzzz::lz4f::ReadDecompressor::new(decoder)?),
|
||||
Bzip3 => {
|
||||
#[cfg(not(feature = "bzip3"))]
|
||||
return Err(archive::bzip3_stub::no_support());
|
||||
|
||||
#[cfg(feature = "bzip3")]
|
||||
Box::new(bzip3::read::Bz3Decoder::new(decoder).unwrap())
|
||||
}
|
||||
Lz4 => Box::new(lz4_flex::frame::FrameDecoder::new(decoder)),
|
||||
Lzma => Box::new(xz2::read::XzDecoder::new(decoder)),
|
||||
Snappy => Box::new(snap::read::FrameDecoder::new(decoder)),
|
||||
Zstd => Box::new(zstd::stream::Decoder::new(decoder)?),
|
||||
Tar | Zip => unreachable!(),
|
||||
Brotli => Box::new(brotli::Decompressor::new(decoder, BUFFER_CAPACITY)),
|
||||
Tar | Zip | Rar | SevenZip => unreachable!("should be treated by caller"),
|
||||
};
|
||||
Ok(decoder)
|
||||
};
|
||||
|
||||
for format in formats.iter().skip(1).rev() {
|
||||
let mut misplaced_archive_format = None;
|
||||
for &format in formats.iter().skip(1).rev() {
|
||||
if format.archive_format() {
|
||||
misplaced_archive_format = Some(format);
|
||||
break;
|
||||
}
|
||||
reader = chain_reader_decoder(format, reader)?;
|
||||
}
|
||||
|
||||
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match formats[0] {
|
||||
let archive_format = misplaced_archive_format.unwrap_or(formats[0]);
|
||||
let files: Box<dyn Iterator<Item = crate::Result<FileInArchive>>> = match archive_format {
|
||||
Tar => Box::new(crate::archive::tar::list_archive(tar::Archive::new(reader))),
|
||||
Zip => {
|
||||
if formats.len() > 1 {
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
// Locking necessary to guarantee that warning and question
|
||||
// messages stay adjacent
|
||||
let _locks = lock_and_flush_output_stdio();
|
||||
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
if !user_wants_to_continue(archive_path, question_policy, QuestionAction::Decompression)? {
|
||||
return Ok(());
|
||||
}
|
||||
@ -76,12 +94,43 @@ pub fn list_archive_contents(
|
||||
io::copy(&mut reader, &mut vec)?;
|
||||
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
|
||||
|
||||
Box::new(crate::archive::zip::list_archive(zip_archive))
|
||||
Box::new(crate::archive::zip::list_archive(zip_archive, password))
|
||||
}
|
||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||
panic!("Not an archive! This should never happen, if it does, something is wrong with `CompressionFormat::is_archive()`. Please report this error!");
|
||||
#[cfg(feature = "unrar")]
|
||||
Rar => {
|
||||
if formats.len() > 1 {
|
||||
let mut temp_file = tempfile::NamedTempFile::new()?;
|
||||
io::copy(&mut reader, &mut temp_file)?;
|
||||
Box::new(crate::archive::rar::list_archive(temp_file.path(), password)?)
|
||||
} else {
|
||||
Box::new(crate::archive::rar::list_archive(archive_path, password)?)
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "unrar"))]
|
||||
Rar => {
|
||||
return Err(crate::archive::rar_stub::no_support());
|
||||
}
|
||||
SevenZip => {
|
||||
if formats.len() > 1 {
|
||||
// Locking necessary to guarantee that warning and question
|
||||
// messages stay adjacent
|
||||
let _locks = lock_and_flush_output_stdio();
|
||||
|
||||
warn_user_about_loading_zip_in_memory();
|
||||
if !user_wants_to_continue(archive_path, question_policy, QuestionAction::Decompression)? {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let mut vec = vec![];
|
||||
io::copy(&mut reader, &mut vec)?;
|
||||
|
||||
Box::new(archive::sevenz::list_archive(io::Cursor::new(vec), password)?)
|
||||
}
|
||||
Gzip | Bzip | Bzip3 | Lz4 | Lzma | Snappy | Zstd | Brotli => {
|
||||
unreachable!("Not an archive, should be validated before calling this function.");
|
||||
}
|
||||
};
|
||||
list::list_files(archive_path, files, list_options)?;
|
||||
Ok(())
|
||||
|
||||
list::list_files(archive_path, files, list_options)
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ mod list;
|
||||
|
||||
use std::{ops::ControlFlow, path::PathBuf};
|
||||
|
||||
use bstr::ByteSlice;
|
||||
use decompress::DecompressOptions;
|
||||
use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator};
|
||||
use utils::colors;
|
||||
|
||||
@ -14,21 +16,33 @@ use crate::{
|
||||
cli::Subcommand,
|
||||
commands::{compress::compress_files, decompress::decompress_file, list::list_archive_contents},
|
||||
error::{Error, FinalError},
|
||||
extension::{self, parse_format},
|
||||
info,
|
||||
extension::{self, parse_format_flag},
|
||||
list::ListOptions,
|
||||
utils::{self, to_utf, EscapedPathDisplay, FileVisibilityPolicy},
|
||||
warning, CliArgs, QuestionPolicy,
|
||||
utils::{
|
||||
self, colors::*, is_path_stdin, logger::info_accessible, path_to_str, EscapedPathDisplay, FileVisibilityPolicy,
|
||||
QuestionAction,
|
||||
},
|
||||
CliArgs, QuestionPolicy,
|
||||
};
|
||||
|
||||
/// Warn the user that (de)compressing this .zip archive might freeze their system.
|
||||
fn warn_user_about_loading_zip_in_memory() {
|
||||
const ZIP_IN_MEMORY_LIMITATION_WARNING: &str = "\n\
|
||||
\tThe format '.zip' is limited and cannot be (de)compressed using encoding streams.\n\
|
||||
\tWhen using '.zip' with other formats, (de)compression must be done in-memory\n\
|
||||
\tCareful, you might run out of RAM if the archive is too large!";
|
||||
const ZIP_IN_MEMORY_LIMITATION_WARNING: &str = "\n \
|
||||
The format '.zip' is limited by design and cannot be (de)compressed with encoding streams.\n \
|
||||
When chaining '.zip' with other formats, all (de)compression needs to be done in-memory\n \
|
||||
Careful, you might run out of RAM if the archive is too large!";
|
||||
|
||||
warning!("{}", ZIP_IN_MEMORY_LIMITATION_WARNING);
|
||||
eprintln!("{}[WARNING]{}: {ZIP_IN_MEMORY_LIMITATION_WARNING}", *ORANGE, *RESET);
|
||||
}
|
||||
|
||||
/// Warn the user that (de)compressing this .7z archive might freeze their system.
|
||||
fn warn_user_about_loading_sevenz_in_memory() {
|
||||
const SEVENZ_IN_MEMORY_LIMITATION_WARNING: &str = "\n \
|
||||
The format '.7z' is limited by design and cannot be (de)compressed with encoding streams.\n \
|
||||
When chaining '.7z' with other formats, all (de)compression needs to be done in-memory\n \
|
||||
Careful, you might run out of RAM if the archive is too large!";
|
||||
|
||||
eprintln!("{}[WARNING]{}: {SEVENZ_IN_MEMORY_LIMITATION_WARNING}", *ORANGE, *RESET);
|
||||
}
|
||||
|
||||
/// This function checks what command needs to be run and performs A LOT of ahead-of-time checks
|
||||
@ -40,6 +54,13 @@ pub fn run(
|
||||
question_policy: QuestionPolicy,
|
||||
file_visibility_policy: FileVisibilityPolicy,
|
||||
) -> crate::Result<()> {
|
||||
if let Some(threads) = args.threads {
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(threads)
|
||||
.build_global()
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
match args.cmd {
|
||||
Subcommand::Compress {
|
||||
files,
|
||||
@ -47,6 +68,7 @@ pub fn run(
|
||||
level,
|
||||
fast,
|
||||
slow,
|
||||
follow_symlinks,
|
||||
} => {
|
||||
// After cleaning, if there are no input files left, exit
|
||||
if files.is_empty() {
|
||||
@ -56,10 +78,10 @@ pub fn run(
|
||||
// Formats from path extension, like "file.tar.gz.xz" -> vec![Tar, Gzip, Lzma]
|
||||
let (formats_from_flag, formats) = match args.format {
|
||||
Some(formats) => {
|
||||
let parsed_formats = parse_format(&formats)?;
|
||||
let parsed_formats = parse_format_flag(&formats)?;
|
||||
(Some(formats), parsed_formats)
|
||||
}
|
||||
None => (None, extension::extensions_from_path(&output_path)),
|
||||
None => (None, extension::extensions_from_path(&output_path)?),
|
||||
};
|
||||
|
||||
check::check_invalid_compression_with_non_archive_format(
|
||||
@ -70,10 +92,11 @@ pub fn run(
|
||||
)?;
|
||||
check::check_archive_formats_position(&formats, &output_path)?;
|
||||
|
||||
let output_file = match utils::ask_to_create_file(&output_path, question_policy)? {
|
||||
Some(writer) => writer,
|
||||
None => return Ok(()),
|
||||
};
|
||||
let output_file =
|
||||
match utils::ask_to_create_file(&output_path, question_policy, QuestionAction::Compression)? {
|
||||
Some(writer) => writer,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let level = if fast {
|
||||
Some(1) // Lowest level of compression
|
||||
@ -89,6 +112,7 @@ pub fn run(
|
||||
output_file,
|
||||
&output_path,
|
||||
args.quiet,
|
||||
follow_symlinks,
|
||||
question_policy,
|
||||
file_visibility_policy,
|
||||
level,
|
||||
@ -99,7 +123,7 @@ pub fn run(
|
||||
// having a final status message is important especially in an accessibility context
|
||||
// as screen readers may not read a commands exit code, making it hard to reason
|
||||
// about whether the command succeeded without such a message
|
||||
info!(accessible, "Successfully compressed '{}'.", to_utf(&output_path));
|
||||
info_accessible(format!("Successfully compressed '{}'", path_to_str(&output_path)));
|
||||
} else {
|
||||
// If Ok(false) or Err() occurred, delete incomplete file at `output_path`
|
||||
//
|
||||
@ -120,15 +144,21 @@ pub fn run(
|
||||
}
|
||||
}
|
||||
|
||||
compress_result?;
|
||||
compress_result.map(|_| ())
|
||||
}
|
||||
Subcommand::Decompress { files, output_dir } => {
|
||||
Subcommand::Decompress {
|
||||
files,
|
||||
output_dir,
|
||||
remove,
|
||||
no_smart_unpack,
|
||||
} => {
|
||||
let mut output_paths = vec![];
|
||||
let mut formats = vec![];
|
||||
|
||||
if let Some(format) = args.format {
|
||||
let format = parse_format(&format)?;
|
||||
let format = parse_format_flag(&format)?;
|
||||
for path in files.iter() {
|
||||
// TODO: use Error::Custom
|
||||
let file_name = path.file_name().ok_or_else(|| Error::NotFound {
|
||||
error_title: format!("{} does not have a file name", EscapedPathDisplay::new(path)),
|
||||
})?;
|
||||
@ -137,19 +167,22 @@ pub fn run(
|
||||
}
|
||||
} else {
|
||||
for path in files.iter() {
|
||||
let (path, mut file_formats) = extension::separate_known_extensions_from_name(path);
|
||||
let (pathbase, mut file_formats) = extension::separate_known_extensions_from_name(path)?;
|
||||
|
||||
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
output_paths.push(path);
|
||||
output_paths.push(pathbase);
|
||||
formats.push(file_formats);
|
||||
}
|
||||
}
|
||||
|
||||
check::check_missing_formats_when_decompressing(&files, &formats)?;
|
||||
|
||||
let is_output_dir_provided = output_dir.is_some();
|
||||
let is_smart_unpack = !is_output_dir_provided && !no_smart_unpack;
|
||||
|
||||
// The directory that will contain the output files
|
||||
// We default to the current directory if the user didn't specify an output directory with --dir
|
||||
let output_dir = if let Some(dir) = output_dir {
|
||||
@ -164,28 +197,39 @@ pub fn run(
|
||||
.zip(formats)
|
||||
.zip(output_paths)
|
||||
.try_for_each(|((input_path, formats), file_name)| {
|
||||
let output_file_path = output_dir.join(file_name); // Path used by single file format archives
|
||||
decompress_file(
|
||||
input_path,
|
||||
// Path used by single file format archives
|
||||
let output_file_path = if is_path_stdin(file_name) {
|
||||
output_dir.join("stdin-output")
|
||||
} else {
|
||||
output_dir.join(file_name)
|
||||
};
|
||||
decompress_file(DecompressOptions {
|
||||
input_file_path: input_path,
|
||||
formats,
|
||||
&output_dir,
|
||||
is_output_dir_provided,
|
||||
output_dir: &output_dir,
|
||||
output_file_path,
|
||||
is_smart_unpack,
|
||||
question_policy,
|
||||
args.quiet,
|
||||
)
|
||||
})?;
|
||||
quiet: args.quiet,
|
||||
password: args.password.as_deref().map(|str| {
|
||||
<[u8] as ByteSlice>::from_os_str(str).expect("convert password to bytes failed")
|
||||
}),
|
||||
remove,
|
||||
})
|
||||
})
|
||||
}
|
||||
Subcommand::List { archives: files, tree } => {
|
||||
let mut formats = vec![];
|
||||
|
||||
if let Some(format) = args.format {
|
||||
let format = parse_format(&format)?;
|
||||
let format = parse_format_flag(&format)?;
|
||||
for _ in 0..files.len() {
|
||||
formats.push(format.clone());
|
||||
}
|
||||
} else {
|
||||
for path in files.iter() {
|
||||
let mut file_formats = extension::extensions_from_path(path);
|
||||
let mut file_formats = extension::extensions_from_path(path)?;
|
||||
|
||||
if let ControlFlow::Break(_) = check::check_mime_type(path, &mut file_formats, question_policy)? {
|
||||
return Ok(());
|
||||
@ -205,9 +249,18 @@ pub fn run(
|
||||
println!();
|
||||
}
|
||||
let formats = extension::flatten_compression_formats(&formats);
|
||||
list_archive_contents(archive_path, formats, list_options, question_policy)?;
|
||||
list_archive_contents(
|
||||
archive_path,
|
||||
formats,
|
||||
list_options,
|
||||
question_policy,
|
||||
args.password
|
||||
.as_deref()
|
||||
.map(|str| <[u8] as ByteSlice>::from_os_str(str).expect("convert password to bytes failed")),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
132
src/error.rs
132
src/error.rs
@ -4,15 +4,21 @@
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
ffi::OsString,
|
||||
fmt::{self, Display},
|
||||
io,
|
||||
};
|
||||
|
||||
use crate::{accessible::is_running_in_accessible_mode, utils::colors::*};
|
||||
use crate::{
|
||||
accessible::is_running_in_accessible_mode,
|
||||
extension::{PRETTY_SUPPORTED_ALIASES, PRETTY_SUPPORTED_EXTENSIONS},
|
||||
utils::os_str_to_str,
|
||||
};
|
||||
|
||||
/// All errors that can be generated by `ouch`
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Error {
|
||||
/// Not every IoError, some of them get filtered by `From<io::Error>` into other variants
|
||||
/// An IoError that doesn't have a dedicated error variant
|
||||
IoError { reason: String },
|
||||
/// From lzzzz::lz4f::Error
|
||||
Lz4Error { reason: String },
|
||||
@ -26,14 +32,21 @@ pub enum Error {
|
||||
PermissionDenied { error_title: String },
|
||||
/// From zip::result::ZipError::UnsupportedArchive
|
||||
UnsupportedZipArchive(&'static str),
|
||||
/// TO BE REMOVED
|
||||
/// We don't support compressing the root folder.
|
||||
CompressingRootFolder,
|
||||
/// Specialized walkdir's io::Error wrapper with additional information on the error
|
||||
WalkdirError { reason: String },
|
||||
/// Custom and unique errors are reported in this variant
|
||||
Custom { reason: FinalError },
|
||||
/// Invalid format passed to `--format`
|
||||
InvalidFormat { reason: String },
|
||||
InvalidFormatFlag { text: OsString, reason: String },
|
||||
/// From sevenz_rust::Error
|
||||
SevenzipError { reason: String },
|
||||
/// Recognised but unsupported format
|
||||
// currently only RAR when built without the `unrar` feature
|
||||
UnsupportedFormat { reason: String },
|
||||
/// Invalid password provided
|
||||
InvalidPassword { reason: String },
|
||||
}
|
||||
|
||||
/// Alias to std's Result with ouch's Error
|
||||
@ -55,6 +68,8 @@ pub struct FinalError {
|
||||
|
||||
impl Display for FinalError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use crate::utils::colors::*;
|
||||
|
||||
// Title
|
||||
//
|
||||
// When in ACCESSIBLE mode, the square brackets are suppressed
|
||||
@ -115,59 +130,85 @@ impl FinalError {
|
||||
self.hints.push(hint.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds all supported formats as hints.
|
||||
///
|
||||
/// This is what it looks like:
|
||||
/// ```
|
||||
/// hint: Supported extensions are: tar, zip, bz, bz2, gz, lz4, xz, lzma, sz, zst
|
||||
/// hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
/// ```
|
||||
pub fn hint_all_supported_formats(self) -> Self {
|
||||
self.hint(format!("Supported extensions are: {}", PRETTY_SUPPORTED_EXTENSIONS))
|
||||
.hint(format!("Supported aliases are: {}", PRETTY_SUPPORTED_ALIASES))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let err = match self {
|
||||
Error::WalkdirError { reason } => FinalError::with_title(reason.to_string()),
|
||||
Error::NotFound { error_title } => FinalError::with_title(error_title.to_string()).detail("File not found"),
|
||||
impl From<Error> for FinalError {
|
||||
fn from(err: Error) -> Self {
|
||||
match err {
|
||||
Error::WalkdirError { reason } => FinalError::with_title(reason),
|
||||
Error::NotFound { error_title } => FinalError::with_title(error_title).detail("File not found"),
|
||||
Error::CompressingRootFolder => {
|
||||
FinalError::with_title("It seems you're trying to compress the root folder.")
|
||||
.detail("This is unadvisable since ouch does compressions in-memory.")
|
||||
.hint("Use a more appropriate tool for this, such as rsync.")
|
||||
}
|
||||
Error::IoError { reason } => FinalError::with_title(reason.to_string()),
|
||||
Error::Lz4Error { reason } => FinalError::with_title(reason.to_string()),
|
||||
Error::AlreadyExists { error_title } => {
|
||||
FinalError::with_title(error_title.to_string()).detail("File already exists")
|
||||
Error::IoError { reason } => FinalError::with_title(reason),
|
||||
Error::Lz4Error { reason } => FinalError::with_title(reason),
|
||||
Error::AlreadyExists { error_title } => FinalError::with_title(error_title).detail("File already exists"),
|
||||
Error::InvalidZipArchive(reason) => FinalError::with_title("Invalid zip archive").detail(reason),
|
||||
Error::PermissionDenied { error_title } => FinalError::with_title(error_title).detail("Permission denied"),
|
||||
Error::UnsupportedZipArchive(reason) => FinalError::with_title("Unsupported zip archive").detail(reason),
|
||||
Error::InvalidFormatFlag { reason, text } => {
|
||||
FinalError::with_title(format!("Failed to parse `--format {}`", os_str_to_str(&text)))
|
||||
.detail(reason)
|
||||
.hint_all_supported_formats()
|
||||
.hint("")
|
||||
.hint("Examples:")
|
||||
.hint(" --format tar")
|
||||
.hint(" --format gz")
|
||||
.hint(" --format tar.gz")
|
||||
}
|
||||
Error::InvalidZipArchive(reason) => FinalError::with_title("Invalid zip archive").detail(*reason),
|
||||
Error::PermissionDenied { error_title } => {
|
||||
FinalError::with_title(error_title.to_string()).detail("Permission denied")
|
||||
}
|
||||
Error::UnsupportedZipArchive(reason) => FinalError::with_title("Unsupported zip archive").detail(*reason),
|
||||
Error::InvalidFormat { reason } => FinalError::with_title("Invalid archive format").detail(reason.clone()),
|
||||
Error::Custom { reason } => reason.clone(),
|
||||
};
|
||||
Error::SevenzipError { reason } => FinalError::with_title("7z error").detail(reason),
|
||||
Error::UnsupportedFormat { reason } => {
|
||||
FinalError::with_title("Recognised but unsupported format").detail(reason.clone())
|
||||
}
|
||||
Error::InvalidPassword { reason } => FinalError::with_title("Invalid password").detail(reason.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let err = FinalError::from(self.clone());
|
||||
write!(f, "{err}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
let error_title = err.to_string();
|
||||
|
||||
match err.kind() {
|
||||
std::io::ErrorKind::NotFound => Self::NotFound {
|
||||
error_title: err.to_string(),
|
||||
},
|
||||
std::io::ErrorKind::PermissionDenied => Self::PermissionDenied {
|
||||
error_title: err.to_string(),
|
||||
},
|
||||
std::io::ErrorKind::AlreadyExists => Self::AlreadyExists {
|
||||
error_title: err.to_string(),
|
||||
},
|
||||
_other => Self::IoError {
|
||||
reason: err.to_string(),
|
||||
},
|
||||
io::ErrorKind::NotFound => Self::NotFound { error_title },
|
||||
io::ErrorKind::PermissionDenied => Self::PermissionDenied { error_title },
|
||||
io::ErrorKind::AlreadyExists => Self::AlreadyExists { error_title },
|
||||
_other => Self::IoError { reason: error_title },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lzzzz::lz4f::Error> for Error {
|
||||
fn from(err: lzzzz::lz4f::Error) -> Self {
|
||||
Self::Lz4Error {
|
||||
reason: err.to_string(),
|
||||
#[cfg(feature = "bzip3")]
|
||||
impl From<bzip3::Error> for Error {
|
||||
fn from(err: bzip3::Error) -> Self {
|
||||
use bzip3::Error as Bz3Error;
|
||||
match err {
|
||||
Bz3Error::Io(inner) => inner.into(),
|
||||
Bz3Error::BlockSize | Bz3Error::ProcessBlock(_) | Bz3Error::InvalidSignature => {
|
||||
FinalError::with_title("bzip3 error").detail(err.to_string()).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -186,6 +227,23 @@ impl From<zip::result::ZipError> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "unrar")]
|
||||
impl From<unrar::error::UnrarError> for Error {
|
||||
fn from(err: unrar::error::UnrarError) -> Self {
|
||||
Self::Custom {
|
||||
reason: FinalError::with_title("Unexpected error in rar archive").detail(format!("{:?}", err.code)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<sevenz_rust2::Error> for Error {
|
||||
fn from(err: sevenz_rust2::Error) -> Self {
|
||||
Self::SevenzipError {
|
||||
reason: err.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ignore::Error> for Error {
|
||||
fn from(err: ignore::Error) -> Self {
|
||||
Self::WalkdirError {
|
||||
|
239
src/extension.rs
239
src/extension.rs
@ -3,12 +3,45 @@
|
||||
use std::{ffi::OsStr, fmt, path::Path};
|
||||
|
||||
use bstr::ByteSlice;
|
||||
use CompressionFormat::*;
|
||||
|
||||
use self::CompressionFormat::*;
|
||||
use crate::{error::Error, warning};
|
||||
use crate::{
|
||||
error::{Error, FinalError, Result},
|
||||
utils::logger::warning,
|
||||
};
|
||||
|
||||
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
|
||||
"tar",
|
||||
"zip",
|
||||
"bz",
|
||||
"bz2",
|
||||
"gz",
|
||||
"lz4",
|
||||
"xz",
|
||||
"lzma",
|
||||
"sz",
|
||||
"zst",
|
||||
#[cfg(feature = "unrar")]
|
||||
"rar",
|
||||
"7z",
|
||||
"br",
|
||||
];
|
||||
|
||||
pub const SUPPORTED_ALIASES: &[&str] = &["tgz", "tbz", "tlz4", "txz", "tzlma", "tsz", "tzst"];
|
||||
|
||||
#[cfg(not(feature = "unrar"))]
|
||||
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z";
|
||||
#[cfg(feature = "unrar")]
|
||||
pub const PRETTY_SUPPORTED_EXTENSIONS: &str = "tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z";
|
||||
|
||||
pub const PRETTY_SUPPORTED_ALIASES: &str = "tgz, tbz, tlz4, txz, tzlma, tsz, tzst";
|
||||
|
||||
/// A wrapper around `CompressionFormat` that allows combinations like `tgz`
|
||||
#[derive(Debug, Clone, Eq)]
|
||||
#[derive(Debug, Clone)]
|
||||
// Keep `PartialEq` only for testing because two formats are the same even if
|
||||
// their `display_text` does not match (beware of aliases)
|
||||
#[cfg_attr(test, derive(PartialEq))]
|
||||
// Should only be built with constructors
|
||||
#[non_exhaustive]
|
||||
pub struct Extension {
|
||||
/// One extension like "tgz" can be made of multiple CompressionFormats ([Tar, Gz])
|
||||
@ -17,13 +50,6 @@ pub struct Extension {
|
||||
display_text: String,
|
||||
}
|
||||
|
||||
// The display_text should be ignored when comparing extensions
|
||||
impl PartialEq for Extension {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.compression_formats == other.compression_formats
|
||||
}
|
||||
}
|
||||
|
||||
impl Extension {
|
||||
/// # Panics:
|
||||
/// Will panic if `formats` is empty
|
||||
@ -37,8 +63,8 @@ impl Extension {
|
||||
|
||||
/// Checks if the first format in `compression_formats` is an archive
|
||||
pub fn is_archive(&self) -> bool {
|
||||
// Safety: we check that `compression_formats` is not empty in `Self::new`
|
||||
self.compression_formats[0].is_archive_format()
|
||||
// Index Safety: we check that `compression_formats` is not empty in `Self::new`
|
||||
self.compression_formats[0].archive_format()
|
||||
}
|
||||
}
|
||||
|
||||
@ -55,85 +81,108 @@ pub enum CompressionFormat {
|
||||
Gzip,
|
||||
/// .bz .bz2
|
||||
Bzip,
|
||||
/// .bz3
|
||||
Bzip3,
|
||||
/// .lz4
|
||||
Lz4,
|
||||
/// .xz .lzma
|
||||
Lzma,
|
||||
/// .sz
|
||||
Snappy,
|
||||
/// tar, tgz, tbz, tbz2, txz, tlz4, tlzma, tsz, tzst
|
||||
/// tar, tgz, tbz, tbz2, tbz3, txz, tlz4, tlzma, tsz, tzst
|
||||
Tar,
|
||||
/// .zst
|
||||
Zstd,
|
||||
/// .zip
|
||||
Zip,
|
||||
// even if built without RAR support, we still want to recognise the format
|
||||
/// .rar
|
||||
Rar,
|
||||
/// .7z
|
||||
SevenZip,
|
||||
/// .br
|
||||
Brotli,
|
||||
}
|
||||
|
||||
impl CompressionFormat {
|
||||
/// Currently supported archive formats are .tar (and aliases to it) and .zip
|
||||
fn is_archive_format(&self) -> bool {
|
||||
pub fn archive_format(&self) -> bool {
|
||||
// Keep this match like that without a wildcard `_` so we don't forget to update it
|
||||
match self {
|
||||
Tar | Zip => true,
|
||||
Tar | Zip | Rar | SevenZip => true,
|
||||
Gzip => false,
|
||||
Bzip => false,
|
||||
Bzip3 => false,
|
||||
Lz4 => false,
|
||||
Lzma => false,
|
||||
Snappy => false,
|
||||
Zstd => false,
|
||||
Brotli => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const SUPPORTED_EXTENSIONS: &[&str] = &[
|
||||
"tar", "tgz", "tbz", "tlz4", "txz", "tzlma", "tsz", "tzst", "zip", "bz", "bz2", "gz", "lz4", "xz", "lzma", "sz",
|
||||
"zst",
|
||||
];
|
||||
|
||||
fn to_extension(ext: &[u8]) -> Option<Extension> {
|
||||
Some(Extension::new(
|
||||
match ext {
|
||||
b"tar" => &[Tar],
|
||||
b"tgz" => &[Tar, Gzip],
|
||||
b"tbz" | b"tbz2" => &[Tar, Bzip],
|
||||
b"tbz3" => &[Tar, Bzip3],
|
||||
b"tlz4" => &[Tar, Lz4],
|
||||
b"txz" | b"tlzma" => &[Tar, Lzma],
|
||||
b"tsz" => &[Tar, Snappy],
|
||||
b"tzst" => &[Tar, Zstd],
|
||||
b"zip" => &[Zip],
|
||||
b"bz" | b"bz2" => &[Bzip],
|
||||
b"bz3" => &[Bzip3],
|
||||
b"gz" => &[Gzip],
|
||||
b"lz4" => &[Lz4],
|
||||
b"xz" | b"lzma" => &[Lzma],
|
||||
b"sz" => &[Snappy],
|
||||
b"zst" => &[Zstd],
|
||||
b"rar" => &[Rar],
|
||||
b"7z" => &[SevenZip],
|
||||
b"br" => &[Brotli],
|
||||
_ => return None,
|
||||
},
|
||||
ext.to_str_lossy(),
|
||||
))
|
||||
}
|
||||
|
||||
fn split_extension(name: &mut &[u8]) -> Option<Extension> {
|
||||
fn split_extension_at_end(name: &[u8]) -> Option<(&[u8], Extension)> {
|
||||
let (new_name, ext) = name.rsplit_once_str(b".")?;
|
||||
if matches!(new_name, b"" | b"." | b"..") {
|
||||
return None;
|
||||
}
|
||||
let ext = to_extension(ext)?;
|
||||
*name = new_name;
|
||||
Some(ext)
|
||||
Some((new_name, ext))
|
||||
}
|
||||
|
||||
pub fn parse_format(fmt: &OsStr) -> crate::Result<Vec<Extension>> {
|
||||
let fmt = <[u8] as ByteSlice>::from_os_str(fmt).ok_or_else(|| Error::InvalidFormat {
|
||||
reason: "Invalid UTF-8".into(),
|
||||
pub fn parse_format_flag(input: &OsStr) -> crate::Result<Vec<Extension>> {
|
||||
let format = input.as_encoded_bytes();
|
||||
|
||||
let format = std::str::from_utf8(format).map_err(|_| Error::InvalidFormatFlag {
|
||||
text: input.to_owned(),
|
||||
reason: "Invalid UTF-8.".to_string(),
|
||||
})?;
|
||||
|
||||
let mut extensions = Vec::new();
|
||||
for extension in fmt.split_str(b".") {
|
||||
let extension = to_extension(extension).ok_or_else(|| Error::InvalidFormat {
|
||||
reason: format!("Unsupported extension: {}", extension.to_str_lossy()),
|
||||
})?;
|
||||
extensions.push(extension);
|
||||
let extensions: Vec<Extension> = format
|
||||
.split('.')
|
||||
.filter(|extension| !extension.is_empty())
|
||||
.map(|extension| {
|
||||
to_extension(extension.as_bytes()).ok_or_else(|| Error::InvalidFormatFlag {
|
||||
text: input.to_owned(),
|
||||
reason: format!("Unsupported extension '{}'", extension),
|
||||
})
|
||||
})
|
||||
.collect::<crate::Result<_>>()?;
|
||||
|
||||
if extensions.is_empty() {
|
||||
return Err(Error::InvalidFormatFlag {
|
||||
text: input.to_owned(),
|
||||
reason: "Parsing got an empty list of extensions.".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(extensions)
|
||||
@ -141,36 +190,60 @@ pub fn parse_format(fmt: &OsStr) -> crate::Result<Vec<Extension>> {
|
||||
|
||||
/// Extracts extensions from a path.
|
||||
///
|
||||
/// Returns both the remaining path and the list of extension objects
|
||||
pub fn separate_known_extensions_from_name(path: &Path) -> (&Path, Vec<Extension>) {
|
||||
/// Returns both the remaining path and the list of extension objects.
|
||||
pub fn separate_known_extensions_from_name(path: &Path) -> Result<(&Path, Vec<Extension>)> {
|
||||
let mut extensions = vec![];
|
||||
|
||||
let Some(mut name) = path.file_name().and_then(<[u8] as ByteSlice>::from_os_str) else {
|
||||
return (path, extensions);
|
||||
return Ok((path, extensions));
|
||||
};
|
||||
|
||||
// While there is known extensions at the tail, grab them
|
||||
while let Some(extension) = split_extension(&mut name) {
|
||||
while let Some((new_name, extension)) = split_extension_at_end(name) {
|
||||
name = new_name;
|
||||
extensions.insert(0, extension);
|
||||
if extensions[0].is_archive() {
|
||||
if let Some((_, misplaced_extension)) = split_extension_at_end(name) {
|
||||
let mut error = FinalError::with_title("File extensions are invalid for operation").detail(format!(
|
||||
"The archive extension '.{}' can only be placed at the start of the extension list",
|
||||
extensions[0].display_text,
|
||||
));
|
||||
|
||||
if misplaced_extension.compression_formats == extensions[0].compression_formats {
|
||||
error = error.detail(format!(
|
||||
"File: '{path:?}' contains '.{}' and '.{}'",
|
||||
misplaced_extension.display_text, extensions[0].display_text,
|
||||
));
|
||||
}
|
||||
|
||||
return Err(error
|
||||
.hint("You can use `--format` to specify what format to use, examples:")
|
||||
.hint(" ouch compress file.zip.zip file --format zip")
|
||||
.hint(" ouch decompress file --format zst")
|
||||
.hint(" ouch list archive --format tar.gz")
|
||||
.into());
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(name) = name.to_str() {
|
||||
let file_stem = name.trim_matches('.');
|
||||
if SUPPORTED_EXTENSIONS.contains(&file_stem) {
|
||||
warning!("Received a file with name '{file_stem}', but {file_stem} was expected as the extension.");
|
||||
if SUPPORTED_EXTENSIONS.contains(&file_stem) || SUPPORTED_ALIASES.contains(&file_stem) {
|
||||
warning(format!(
|
||||
"Received a file with name '{file_stem}', but {file_stem} was expected as the extension"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
(name.to_path().unwrap(), extensions)
|
||||
Ok((name.to_path().unwrap(), extensions))
|
||||
}
|
||||
|
||||
/// Extracts extensions from a path, return only the list of extension objects
|
||||
pub fn extensions_from_path(path: &Path) -> Vec<Extension> {
|
||||
let (_, extensions) = separate_known_extensions_from_name(path);
|
||||
extensions
|
||||
pub fn extensions_from_path(path: &Path) -> Result<Vec<Extension>> {
|
||||
separate_known_extensions_from_name(path).map(|(_, extensions)| extensions)
|
||||
}
|
||||
|
||||
// Panics if formats has an empty list of compression formats
|
||||
/// Panics if formats has an empty list of compression formats
|
||||
pub fn split_first_compression_format(formats: &[Extension]) -> (CompressionFormat, Vec<CompressionFormat>) {
|
||||
let mut extensions: Vec<CompressionFormat> = flatten_compression_formats(formats);
|
||||
let first_extension = extensions.remove(0);
|
||||
@ -208,7 +281,7 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
|
||||
|
||||
// If the extension we got is a supported extension, generate the suggestion
|
||||
// at the position we found
|
||||
if SUPPORTED_EXTENSIONS.contains(&maybe_extension) {
|
||||
if SUPPORTED_EXTENSIONS.contains(&maybe_extension) || SUPPORTED_ALIASES.contains(&maybe_extension) {
|
||||
let mut path = path.to_string();
|
||||
path.insert_str(position_to_insert - 1, suggested_extension);
|
||||
|
||||
@ -221,21 +294,81 @@ pub fn build_archive_file_suggestion(path: &Path, suggested_extension: &str) ->
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_extensions_from_path() {
|
||||
use CompressionFormat::*;
|
||||
let path = Path::new("bolovo.tar.gz");
|
||||
|
||||
let extensions: Vec<Extension> = extensions_from_path(path);
|
||||
let formats: Vec<CompressionFormat> = flatten_compression_formats(&extensions);
|
||||
let extensions = extensions_from_path(path).unwrap();
|
||||
let formats = flatten_compression_formats(&extensions);
|
||||
|
||||
assert_eq!(formats, vec![Tar, Gzip]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Test extension parsing for input/output files
|
||||
fn test_separate_known_extensions_from_name() {
|
||||
assert_eq!(
|
||||
separate_known_extensions_from_name("file".as_ref()).unwrap(),
|
||||
("file".as_ref(), vec![])
|
||||
);
|
||||
assert_eq!(
|
||||
separate_known_extensions_from_name("tar".as_ref()).unwrap(),
|
||||
("tar".as_ref(), vec![])
|
||||
);
|
||||
assert_eq!(
|
||||
separate_known_extensions_from_name(".tar".as_ref()).unwrap(),
|
||||
(".tar".as_ref(), vec![])
|
||||
);
|
||||
assert_eq!(
|
||||
separate_known_extensions_from_name("file.tar".as_ref()).unwrap(),
|
||||
("file".as_ref(), vec![Extension::new(&[Tar], "tar")])
|
||||
);
|
||||
assert_eq!(
|
||||
separate_known_extensions_from_name("file.tar.gz".as_ref()).unwrap(),
|
||||
(
|
||||
"file".as_ref(),
|
||||
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
separate_known_extensions_from_name(".tar.gz".as_ref()).unwrap(),
|
||||
(".tar".as_ref(), vec![Extension::new(&[Gzip], "gz")])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Test extension parsing of `--format FORMAT`
|
||||
fn test_parse_of_format_flag() {
|
||||
assert_eq!(
|
||||
parse_format_flag(OsStr::new("tar")).unwrap(),
|
||||
vec![Extension::new(&[Tar], "tar")]
|
||||
);
|
||||
assert_eq!(
|
||||
parse_format_flag(OsStr::new(".tar")).unwrap(),
|
||||
vec![Extension::new(&[Tar], "tar")]
|
||||
);
|
||||
assert_eq!(
|
||||
parse_format_flag(OsStr::new("tar.gz")).unwrap(),
|
||||
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
|
||||
);
|
||||
assert_eq!(
|
||||
parse_format_flag(OsStr::new(".tar.gz")).unwrap(),
|
||||
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
|
||||
);
|
||||
assert_eq!(
|
||||
parse_format_flag(OsStr::new("..tar..gz.....")).unwrap(),
|
||||
vec![Extension::new(&[Tar], "tar"), Extension::new(&[Gzip], "gz")]
|
||||
);
|
||||
|
||||
assert!(parse_format_flag(OsStr::new("../tar.gz")).is_err());
|
||||
assert!(parse_format_flag(OsStr::new("targz")).is_err());
|
||||
assert!(parse_format_flag(OsStr::new("tar.gz.unknown")).is_err());
|
||||
assert!(parse_format_flag(OsStr::new(".tar.gz.unknown")).is_err());
|
||||
assert!(parse_format_flag(OsStr::new(".tar.!@#.gz")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builds_suggestion_correctly() {
|
||||
assert_eq!(build_archive_file_suggestion(Path::new("linux.png"), ".tar"), None);
|
||||
@ -256,4 +389,10 @@ mod tests {
|
||||
"linux.pkg.info.tar.zst"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extension_parsing_with_multiple_archive_formats() {
|
||||
assert!(separate_known_extensions_from_name("file.tar.zip".as_ref()).is_err());
|
||||
assert!(separate_known_extensions_from_name("file.7z.zst.zip.lz4".as_ref()).is_err());
|
||||
}
|
||||
}
|
||||
|
17
src/list.rs
17
src/list.rs
@ -1,7 +1,7 @@
|
||||
//! Some implementation helpers related to the 'list' command.
|
||||
|
||||
use std::{
|
||||
io::{stdout, Write},
|
||||
io::{stdout, BufWriter, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
@ -32,16 +32,16 @@ pub fn list_files(
|
||||
files: impl IntoIterator<Item = crate::Result<FileInArchive>>,
|
||||
list_options: ListOptions,
|
||||
) -> crate::Result<()> {
|
||||
let out = &mut stdout().lock();
|
||||
let mut out = BufWriter::new(stdout().lock());
|
||||
let _ = writeln!(out, "Archive: {}", EscapedPathDisplay::new(archive));
|
||||
|
||||
if list_options.tree {
|
||||
let tree = files.into_iter().collect::<crate::Result<Tree>>()?;
|
||||
tree.print(out);
|
||||
tree.print(&mut out);
|
||||
} else {
|
||||
for file in files {
|
||||
let FileInArchive { path, is_dir } = file?;
|
||||
print_entry(out, EscapedPathDisplay::new(&path), is_dir);
|
||||
print_entry(&mut out, EscapedPathDisplay::new(&path), is_dir);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@ -78,7 +78,6 @@ mod tree {
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
io::Write,
|
||||
iter::FromIterator,
|
||||
path,
|
||||
};
|
||||
|
||||
@ -86,7 +85,7 @@ mod tree {
|
||||
use linked_hash_map::LinkedHashMap;
|
||||
|
||||
use super::FileInArchive;
|
||||
use crate::{utils::EscapedPathDisplay, warning};
|
||||
use crate::utils::{logger::warning, EscapedPathDisplay};
|
||||
|
||||
/// Directory tree
|
||||
#[derive(Debug, Default)]
|
||||
@ -120,10 +119,10 @@ mod tree {
|
||||
match &self.file {
|
||||
None => self.file = Some(file),
|
||||
Some(file) => {
|
||||
warning!(
|
||||
warning(format!(
|
||||
"multiple files with the same name in a single directory ({})",
|
||||
EscapedPathDisplay::new(&file.path),
|
||||
);
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -144,7 +143,7 @@ mod tree {
|
||||
false => draw::FINAL_BRANCH,
|
||||
};
|
||||
|
||||
print!("{prefix}{final_part}");
|
||||
let _ = write!(out, "{prefix}{final_part}");
|
||||
let is_dir = match self.file {
|
||||
Some(FileInArchive { is_dir, .. }) => is_dir,
|
||||
None => true,
|
||||
|
@ -1,58 +0,0 @@
|
||||
//! Macros used on ouch.
|
||||
|
||||
/// Macro that prints \[INFO\] messages, wraps [`eprintln`].
|
||||
///
|
||||
/// There are essentially two different versions of the `info!()` macro:
|
||||
/// - `info!(accessible, ...)` should only be used for short, important
|
||||
/// information which is expected to be useful for e.g. blind users whose
|
||||
/// text-to-speech systems read out every output line, which is why we
|
||||
/// should reduce nonessential output to a minimum when running in
|
||||
/// ACCESSIBLE mode
|
||||
/// - `info!(inaccessible, ...)` can be used more carelessly / for less
|
||||
/// important information. A seeing user can easily skim through more lines
|
||||
/// of output, so e.g. reporting every single processed file can be helpful,
|
||||
/// while it would generate long and hard to navigate text for blind people
|
||||
/// who have to have each line of output read to them aloud, without to
|
||||
/// ability to skip some lines deemed not important like a seeing person would.
|
||||
#[macro_export]
|
||||
macro_rules! info {
|
||||
// Accessible (short/important) info message.
|
||||
// Show info message even in ACCESSIBLE mode
|
||||
(accessible, $($arg:tt)*) => {{
|
||||
use $crate::utils::colors::{YELLOW, RESET};
|
||||
|
||||
if $crate::accessible::is_running_in_accessible_mode() {
|
||||
eprint!("{}Info:{} ", *YELLOW, *RESET);
|
||||
} else {
|
||||
eprint!("{}[INFO]{} ", *YELLOW, *RESET);
|
||||
}
|
||||
|
||||
eprintln!($($arg)*);
|
||||
}};
|
||||
// Inccessible (long/no important) info message.
|
||||
// Print info message if ACCESSIBLE is not turned on
|
||||
(inaccessible, $($arg:tt)*) => {{
|
||||
use $crate::utils::colors::{YELLOW, RESET};
|
||||
|
||||
if !$crate::accessible::is_running_in_accessible_mode() {
|
||||
eprint!("{}[INFO]{} ", *YELLOW, *RESET);
|
||||
eprintln!($($arg)*);
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
/// Macro that prints WARNING messages, wraps [`eprintln`].
|
||||
#[macro_export]
|
||||
macro_rules! warning {
|
||||
($($arg:tt)*) => {{
|
||||
use $crate::utils::colors::{ORANGE, RESET};
|
||||
|
||||
if $crate::accessible::is_running_in_accessible_mode() {
|
||||
eprint!("{}Warning:{} ", *ORANGE, *RESET);
|
||||
} else {
|
||||
eprint!("{}[WARNING]{} ", *ORANGE, *RESET);
|
||||
}
|
||||
|
||||
eprintln!($($arg)*);
|
||||
}};
|
||||
}
|
21
src/main.rs
21
src/main.rs
@ -1,6 +1,3 @@
|
||||
// Macros should be declared first
|
||||
pub mod macros;
|
||||
|
||||
pub mod accessible;
|
||||
pub mod archive;
|
||||
pub mod check;
|
||||
@ -14,9 +11,15 @@ pub mod utils;
|
||||
use std::{env, path::PathBuf};
|
||||
|
||||
use cli::CliArgs;
|
||||
use error::{Error, Result};
|
||||
use once_cell::sync::Lazy;
|
||||
use utils::{QuestionAction, QuestionPolicy};
|
||||
|
||||
use self::{
|
||||
error::{Error, Result},
|
||||
utils::{
|
||||
logger::{shutdown_logger_and_wait, spawn_logger_thread},
|
||||
QuestionAction, QuestionPolicy,
|
||||
},
|
||||
};
|
||||
|
||||
// Used in BufReader and BufWriter to perform less syscalls
|
||||
const BUFFER_CAPACITY: usize = 1024 * 32;
|
||||
@ -28,13 +31,17 @@ static CURRENT_DIRECTORY: Lazy<PathBuf> = Lazy::new(|| env::current_dir().unwrap
|
||||
pub const EXIT_FAILURE: i32 = libc::EXIT_FAILURE;
|
||||
|
||||
fn main() {
|
||||
if let Err(err) = run() {
|
||||
spawn_logger_thread();
|
||||
let result = run();
|
||||
shutdown_logger_and_wait();
|
||||
|
||||
if let Err(err) = result {
|
||||
eprintln!("{err}");
|
||||
std::process::exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
fn run() -> Result<()> {
|
||||
let (args, skip_questions_positively, file_visibility_policy) = CliArgs::parse_args()?;
|
||||
let (args, skip_questions_positively, file_visibility_policy) = CliArgs::parse_and_validate_args()?;
|
||||
commands::run(args, skip_questions_positively, file_visibility_policy)
|
||||
}
|
||||
|
@ -69,11 +69,18 @@ impl FileVisibilityPolicy {
|
||||
|
||||
/// Walks through a directory using [`ignore::Walk`]
|
||||
pub fn build_walker(&self, path: impl AsRef<Path>) -> ignore::Walk {
|
||||
ignore::WalkBuilder::new(path)
|
||||
let mut builder = ignore::WalkBuilder::new(path);
|
||||
|
||||
builder
|
||||
.git_exclude(self.read_git_exclude)
|
||||
.git_ignore(self.read_git_ignore)
|
||||
.ignore(self.read_ignore)
|
||||
.hidden(self.read_hidden)
|
||||
.build()
|
||||
.hidden(self.read_hidden);
|
||||
|
||||
if self.read_git_ignore {
|
||||
builder.filter_entry(|p| p.path().file_name().is_some_and(|name| name != ".git"));
|
||||
}
|
||||
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::{borrow::Cow, fmt::Display, path::Path};
|
||||
use std::{borrow::Cow, cmp, ffi::OsStr, fmt::Display, path::Path};
|
||||
|
||||
use crate::CURRENT_DIRECTORY;
|
||||
|
||||
@ -45,7 +45,11 @@ impl Display for EscapedPathDisplay<'_> {
|
||||
/// This is different from [`Path::display`].
|
||||
///
|
||||
/// See <https://gist.github.com/marcospb19/ebce5572be26397cf08bbd0fd3b65ac1> for a comparison.
|
||||
pub fn to_utf(os_str: &Path) -> Cow<str> {
|
||||
pub fn path_to_str(path: &Path) -> Cow<str> {
|
||||
os_str_to_str(path.as_ref())
|
||||
}
|
||||
|
||||
pub fn os_str_to_str(os_str: &OsStr) -> Cow<str> {
|
||||
let format = || {
|
||||
let text = format!("{os_str:?}");
|
||||
Cow::Owned(text.trim_matches('"').to_string())
|
||||
@ -65,15 +69,15 @@ pub fn strip_cur_dir(source_path: &Path) -> &Path {
|
||||
/// Converts a slice of `AsRef<OsStr>` to comma separated String
|
||||
///
|
||||
/// Panics if the slice is empty.
|
||||
pub fn pretty_format_list_of_paths(os_strs: &[impl AsRef<Path>]) -> String {
|
||||
let mut iter = os_strs.iter().map(AsRef::as_ref);
|
||||
pub fn pretty_format_list_of_paths(paths: &[impl AsRef<Path>]) -> String {
|
||||
let mut iter = paths.iter().map(AsRef::as_ref);
|
||||
|
||||
let first_element = iter.next().unwrap();
|
||||
let mut string = to_utf(first_element).into_owned();
|
||||
let first_path = iter.next().unwrap();
|
||||
let mut string = path_to_str(first_path).into_owned();
|
||||
|
||||
for os_str in iter {
|
||||
for path in iter {
|
||||
string += ", ";
|
||||
string += &to_utf(os_str);
|
||||
string += &path_to_str(path);
|
||||
}
|
||||
string
|
||||
}
|
||||
@ -83,6 +87,84 @@ pub fn nice_directory_display(path: &Path) -> Cow<str> {
|
||||
if path == Path::new(".") {
|
||||
Cow::Borrowed("current directory")
|
||||
} else {
|
||||
to_utf(path)
|
||||
path_to_str(path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Struct useful to printing bytes as kB, MB, GB, etc.
|
||||
pub struct Bytes(f64);
|
||||
|
||||
impl Bytes {
|
||||
const UNIT_PREFIXES: [&'static str; 6] = ["", "ki", "Mi", "Gi", "Ti", "Pi"];
|
||||
|
||||
/// Create a new Bytes.
|
||||
pub fn new(bytes: u64) -> Self {
|
||||
Self(bytes as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Bytes {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let num = self.0;
|
||||
|
||||
debug_assert!(num >= 0.0);
|
||||
if num < 1_f64 {
|
||||
return write!(f, "{:>6.2} B", num);
|
||||
}
|
||||
|
||||
let delimiter = 1000_f64;
|
||||
let exponent = cmp::min((num.ln() / 6.90775).floor() as i32, 4);
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{:>6.2} {:>2}B",
|
||||
num / delimiter.powi(exponent),
|
||||
Bytes::UNIT_PREFIXES[exponent as usize],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_pretty_bytes_formatting() {
|
||||
fn format_bytes(bytes: u64) -> String {
|
||||
format!("{}", Bytes::new(bytes))
|
||||
}
|
||||
let b = 1;
|
||||
let kb = b * 1000;
|
||||
let mb = kb * 1000;
|
||||
let gb = mb * 1000;
|
||||
|
||||
assert_eq!(" 0.00 B", format_bytes(0)); // This is weird
|
||||
assert_eq!(" 1.00 B", format_bytes(b));
|
||||
assert_eq!("999.00 B", format_bytes(b * 999));
|
||||
assert_eq!(" 12.00 MiB", format_bytes(mb * 12));
|
||||
assert_eq!("123.00 MiB", format_bytes(mb * 123));
|
||||
assert_eq!(" 5.50 MiB", format_bytes(mb * 5 + kb * 500));
|
||||
assert_eq!(" 7.54 GiB", format_bytes(gb * 7 + 540 * mb));
|
||||
assert_eq!(" 1.20 TiB", format_bytes(gb * 1200));
|
||||
|
||||
// bytes
|
||||
assert_eq!("234.00 B", format_bytes(234));
|
||||
assert_eq!("999.00 B", format_bytes(999));
|
||||
// kilobytes
|
||||
assert_eq!(" 2.23 kiB", format_bytes(2234));
|
||||
assert_eq!(" 62.50 kiB", format_bytes(62500));
|
||||
assert_eq!("329.99 kiB", format_bytes(329990));
|
||||
// megabytes
|
||||
assert_eq!(" 2.75 MiB", format_bytes(2750000));
|
||||
assert_eq!(" 55.00 MiB", format_bytes(55000000));
|
||||
assert_eq!("987.65 MiB", format_bytes(987654321));
|
||||
// gigabytes
|
||||
assert_eq!(" 5.28 GiB", format_bytes(5280000000));
|
||||
assert_eq!(" 95.20 GiB", format_bytes(95200000000));
|
||||
assert_eq!("302.00 GiB", format_bytes(302000000000));
|
||||
assert_eq!("302.99 GiB", format_bytes(302990000000));
|
||||
// Weird aproximation cases:
|
||||
assert_eq!("999.90 GiB", format_bytes(999900000000));
|
||||
assert_eq!(" 1.00 TiB", format_bytes(999990000000));
|
||||
}
|
||||
}
|
||||
|
113
src/utils/fs.rs
113
src/utils/fs.rs
@ -8,22 +8,45 @@ use std::{
|
||||
|
||||
use fs_err as fs;
|
||||
|
||||
use super::user_wants_to_overwrite;
|
||||
use crate::{extension::Extension, info, utils::EscapedPathDisplay, QuestionPolicy};
|
||||
use super::{question::FileConflitOperation, user_wants_to_overwrite};
|
||||
use crate::{
|
||||
extension::Extension,
|
||||
utils::{logger::info_accessible, EscapedPathDisplay, QuestionAction},
|
||||
QuestionPolicy,
|
||||
};
|
||||
|
||||
/// Remove `path` asking the user to overwrite if necessary.
|
||||
pub fn is_path_stdin(path: &Path) -> bool {
|
||||
path.as_os_str() == "-"
|
||||
}
|
||||
|
||||
/// Check if &Path exists, if it does then ask the user if they want to overwrite or rename it.
|
||||
/// If the user want to overwrite then the file or directory will be removed and returned the same input path
|
||||
/// If the user want to rename then nothing will be removed and a new path will be returned with a new name
|
||||
///
|
||||
/// * `Ok(true)` means the path is clear,
|
||||
/// * `Ok(false)` means the user doesn't want to overwrite
|
||||
/// * `Ok(None)` means the user wants to cancel the operation
|
||||
/// * `Ok(Some(path))` returns a valid PathBuf without any another file or directory with the same name
|
||||
/// * `Err(_)` is an error
|
||||
pub fn clear_path(path: &Path, question_policy: QuestionPolicy) -> crate::Result<bool> {
|
||||
if path.exists() && !user_wants_to_overwrite(path, question_policy)? {
|
||||
return Ok(false);
|
||||
pub fn resolve_path_conflict(
|
||||
path: &Path,
|
||||
question_policy: QuestionPolicy,
|
||||
question_action: QuestionAction,
|
||||
) -> crate::Result<Option<PathBuf>> {
|
||||
if path.exists() {
|
||||
match user_wants_to_overwrite(path, question_policy, question_action)? {
|
||||
FileConflitOperation::Cancel => Ok(None),
|
||||
FileConflitOperation::Overwrite => {
|
||||
remove_file_or_dir(path)?;
|
||||
Ok(Some(path.to_path_buf()))
|
||||
}
|
||||
FileConflitOperation::Rename => {
|
||||
let renamed_path = rename_for_available_filename(path);
|
||||
Ok(Some(renamed_path))
|
||||
}
|
||||
FileConflitOperation::Merge => Ok(Some(path.to_path_buf())),
|
||||
}
|
||||
} else {
|
||||
Ok(Some(path.to_path_buf()))
|
||||
}
|
||||
|
||||
remove_file_or_dir(path)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn remove_file_or_dir(path: &Path) -> crate::Result<()> {
|
||||
@ -35,13 +58,48 @@ pub fn remove_file_or_dir(path: &Path) -> crate::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a new path renaming the "filename" from &Path for a available name in the same directory
|
||||
pub fn rename_for_available_filename(path: &Path) -> PathBuf {
|
||||
let mut renamed_path = rename_or_increment_filename(path);
|
||||
while renamed_path.exists() {
|
||||
renamed_path = rename_or_increment_filename(&renamed_path);
|
||||
}
|
||||
renamed_path
|
||||
}
|
||||
|
||||
/// Create a new path renaming the "filename" from &Path to `filename_1`
|
||||
/// if its name already ends with `_` and some number, then it increments the number
|
||||
/// Example:
|
||||
/// - `file.txt` -> `file_1.txt`
|
||||
/// - `file_1.txt` -> `file_2.txt`
|
||||
pub fn rename_or_increment_filename(path: &Path) -> PathBuf {
|
||||
let parent = path.parent().unwrap_or_else(|| Path::new(""));
|
||||
let filename = path.file_stem().and_then(|s| s.to_str()).unwrap_or("");
|
||||
let extension = path.extension().and_then(|s| s.to_str()).unwrap_or("");
|
||||
|
||||
let new_filename = match filename.rsplit_once('_') {
|
||||
Some((base, number_str)) if number_str.chars().all(char::is_numeric) => {
|
||||
let number = number_str.parse::<u32>().unwrap_or(0);
|
||||
format!("{}_{}", base, number + 1)
|
||||
}
|
||||
_ => format!("{}_1", filename),
|
||||
};
|
||||
|
||||
let mut new_path = parent.join(new_filename);
|
||||
if !extension.is_empty() {
|
||||
new_path.set_extension(extension);
|
||||
}
|
||||
|
||||
new_path
|
||||
}
|
||||
|
||||
/// Creates a directory at the path, if there is nothing there.
|
||||
pub fn create_dir_if_non_existent(path: &Path) -> crate::Result<()> {
|
||||
if !path.exists() {
|
||||
fs::create_dir_all(path)?;
|
||||
// creating a directory is an important change to the file system we
|
||||
// should always inform the user about
|
||||
info!(accessible, "directory {} created.", EscapedPathDisplay::new(path));
|
||||
info_accessible(format!("Directory {} created", EscapedPathDisplay::new(path)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -74,6 +132,9 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
|
||||
fn is_bz2(buf: &[u8]) -> bool {
|
||||
buf.starts_with(&[0x42, 0x5A, 0x68])
|
||||
}
|
||||
fn is_bz3(buf: &[u8]) -> bool {
|
||||
buf.starts_with(b"BZ3v1")
|
||||
}
|
||||
fn is_xz(buf: &[u8]) -> bool {
|
||||
buf.starts_with(&[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])
|
||||
}
|
||||
@ -86,6 +147,17 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
|
||||
fn is_zst(buf: &[u8]) -> bool {
|
||||
buf.starts_with(&[0x28, 0xB5, 0x2F, 0xFD])
|
||||
}
|
||||
fn is_rar(buf: &[u8]) -> bool {
|
||||
// ref https://www.rarlab.com/technote.htm#rarsign
|
||||
// RAR 5.0 8 bytes length signature: 0x52 0x61 0x72 0x21 0x1A 0x07 0x01 0x00
|
||||
// RAR 4.x 7 bytes length signature: 0x52 0x61 0x72 0x21 0x1A 0x07 0x00
|
||||
buf.len() >= 7
|
||||
&& buf.starts_with(&[0x52, 0x61, 0x72, 0x21, 0x1A, 0x07])
|
||||
&& (buf[6] == 0x00 || (buf.len() >= 8 && buf[6..=7] == [0x01, 0x00]))
|
||||
}
|
||||
fn is_sevenz(buf: &[u8]) -> bool {
|
||||
buf.starts_with(&[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
|
||||
}
|
||||
|
||||
let buf = {
|
||||
let mut buf = [0; 270];
|
||||
@ -109,6 +181,8 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
|
||||
Some(Extension::new(&[Gzip], "gz"))
|
||||
} else if is_bz2(&buf) {
|
||||
Some(Extension::new(&[Bzip], "bz2"))
|
||||
} else if is_bz3(&buf) {
|
||||
Some(Extension::new(&[Bzip3], "bz3"))
|
||||
} else if is_xz(&buf) {
|
||||
Some(Extension::new(&[Lzma], "xz"))
|
||||
} else if is_lz4(&buf) {
|
||||
@ -117,16 +191,11 @@ pub fn try_infer_extension(path: &Path) -> Option<Extension> {
|
||||
Some(Extension::new(&[Snappy], "sz"))
|
||||
} else if is_zst(&buf) {
|
||||
Some(Extension::new(&[Zstd], "zst"))
|
||||
} else if is_rar(&buf) {
|
||||
Some(Extension::new(&[Rar], "rar"))
|
||||
} else if is_sevenz(&buf) {
|
||||
Some(Extension::new(&[SevenZip], "7z"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if a path is a symlink.
|
||||
/// This is the same as the nightly <https://doc.rust-lang.org/std/path/struct.Path.html#method.is_symlink>
|
||||
/// Useful to detect broken symlinks when compressing. (So we can safely ignore them)
|
||||
pub fn is_symlink(path: &Path) -> bool {
|
||||
fs::symlink_metadata(path)
|
||||
.map(|m| m.file_type().is_symlink())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
16
src/utils/io.rs
Normal file
16
src/utils/io.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use std::io::{self, stderr, stdout, StderrLock, StdoutLock, Write};
|
||||
|
||||
use crate::utils::logger;
|
||||
|
||||
type StdioOutputLocks = (StdoutLock<'static>, StderrLock<'static>);
|
||||
|
||||
pub fn lock_and_flush_output_stdio() -> io::Result<StdioOutputLocks> {
|
||||
logger::flush_messages();
|
||||
|
||||
let mut stdout = stdout().lock();
|
||||
stdout.flush()?;
|
||||
let mut stderr = stderr().lock();
|
||||
stderr.flush()?;
|
||||
|
||||
Ok((stdout, stderr))
|
||||
}
|
231
src/utils/logger.rs
Normal file
231
src/utils/logger.rs
Normal file
@ -0,0 +1,231 @@
|
||||
use std::{
|
||||
sync::{mpsc, Arc, Barrier, OnceLock},
|
||||
thread,
|
||||
};
|
||||
|
||||
pub use logger_thread::spawn_logger_thread;
|
||||
|
||||
use super::colors::{ORANGE, RESET, YELLOW};
|
||||
use crate::accessible::is_running_in_accessible_mode;
|
||||
|
||||
/// Asks logger to shutdown and waits till it flushes all pending messages.
|
||||
#[track_caller]
|
||||
pub fn shutdown_logger_and_wait() {
|
||||
logger_thread::send_shutdown_command_and_wait();
|
||||
}
|
||||
|
||||
/// Asks logger to flush all messages, useful before starting STDIN interaction.
|
||||
#[track_caller]
|
||||
pub fn flush_messages() {
|
||||
logger_thread::send_flush_command_and_wait();
|
||||
}
|
||||
|
||||
/// An `[INFO]` log to be displayed if we're not running accessibility mode.
|
||||
///
|
||||
/// Same as `.info_accessible()`, but only displayed if accessibility mode
|
||||
/// is turned off, which is detected by the function
|
||||
/// `is_running_in_accessible_mode`.
|
||||
///
|
||||
/// Read more about accessibility mode in `accessible.rs`.
|
||||
#[track_caller]
|
||||
pub fn info(contents: String) {
|
||||
info_with_accessibility(contents, false);
|
||||
}
|
||||
|
||||
/// An `[INFO]` log to be displayed.
|
||||
///
|
||||
/// Same as `.info()`, but also displays if `is_running_in_accessible_mode`
|
||||
/// returns `true`.
|
||||
///
|
||||
/// Read more about accessibility mode in `accessible.rs`.
|
||||
#[track_caller]
|
||||
pub fn info_accessible(contents: String) {
|
||||
info_with_accessibility(contents, true);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn info_with_accessibility(contents: String, accessible: bool) {
|
||||
logger_thread::send_print_command(PrintMessage {
|
||||
contents,
|
||||
accessible,
|
||||
level: MessageLevel::Info,
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn warning(contents: String) {
|
||||
logger_thread::send_print_command(PrintMessage {
|
||||
contents,
|
||||
// Warnings are important and unlikely to flood, so they should be displayed
|
||||
accessible: true,
|
||||
level: MessageLevel::Warning,
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum LoggerCommand {
|
||||
Print(PrintMessage),
|
||||
Flush { finished_barrier: Arc<Barrier> },
|
||||
FlushAndShutdown { finished_barrier: Arc<Barrier> },
|
||||
}
|
||||
|
||||
/// Message object used for sending logs from worker threads to a logging thread via channels.
|
||||
/// See <https://github.com/ouch-org/ouch/issues/643>
|
||||
#[derive(Debug)]
|
||||
struct PrintMessage {
|
||||
contents: String,
|
||||
accessible: bool,
|
||||
level: MessageLevel,
|
||||
}
|
||||
|
||||
impl PrintMessage {
|
||||
fn to_formatted_message(&self) -> Option<String> {
|
||||
match self.level {
|
||||
MessageLevel::Info => {
|
||||
if self.accessible {
|
||||
if is_running_in_accessible_mode() {
|
||||
Some(format!("{}Info:{} {}", *YELLOW, *RESET, self.contents))
|
||||
} else {
|
||||
Some(format!("{}[INFO]{} {}", *YELLOW, *RESET, self.contents))
|
||||
}
|
||||
} else if !is_running_in_accessible_mode() {
|
||||
Some(format!("{}[INFO]{} {}", *YELLOW, *RESET, self.contents))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
MessageLevel::Warning => {
|
||||
if is_running_in_accessible_mode() {
|
||||
Some(format!("{}Warning:{} {}", *ORANGE, *RESET, self.contents))
|
||||
} else {
|
||||
Some(format!("{}[WARNING]{} {}", *ORANGE, *RESET, self.contents))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum MessageLevel {
|
||||
Info,
|
||||
Warning,
|
||||
}
|
||||
|
||||
mod logger_thread {
|
||||
use std::{
|
||||
sync::{mpsc::RecvTimeoutError, Arc, Barrier},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
type LogReceiver = mpsc::Receiver<LoggerCommand>;
|
||||
type LogSender = mpsc::Sender<LoggerCommand>;
|
||||
|
||||
static SENDER: OnceLock<LogSender> = OnceLock::new();
|
||||
|
||||
#[track_caller]
|
||||
fn setup_channel() -> Option<LogReceiver> {
|
||||
let mut optional = None;
|
||||
SENDER.get_or_init(|| {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
optional = Some(rx);
|
||||
tx
|
||||
});
|
||||
optional
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_sender() -> &'static LogSender {
|
||||
SENDER.get().expect("No sender, you need to call `setup_channel` first")
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(super) fn send_print_command(msg: PrintMessage) {
|
||||
if cfg!(test) {
|
||||
spawn_logger_thread();
|
||||
}
|
||||
get_sender()
|
||||
.send(LoggerCommand::Print(msg))
|
||||
.expect("Failed to send print command");
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(super) fn send_flush_command_and_wait() {
|
||||
let barrier = Arc::new(Barrier::new(2));
|
||||
|
||||
get_sender()
|
||||
.send(LoggerCommand::Flush {
|
||||
finished_barrier: barrier.clone(),
|
||||
})
|
||||
.expect("Failed to send flush command");
|
||||
|
||||
barrier.wait();
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(super) fn send_shutdown_command_and_wait() {
|
||||
let barrier = Arc::new(Barrier::new(2));
|
||||
|
||||
get_sender()
|
||||
.send(LoggerCommand::FlushAndShutdown {
|
||||
finished_barrier: barrier.clone(),
|
||||
})
|
||||
.expect("Failed to send shutdown command");
|
||||
|
||||
barrier.wait();
|
||||
}
|
||||
|
||||
pub fn spawn_logger_thread() {
|
||||
if let Some(log_receiver) = setup_channel() {
|
||||
thread::spawn(move || run_logger(log_receiver));
|
||||
}
|
||||
}
|
||||
|
||||
fn run_logger(log_receiver: LogReceiver) {
|
||||
const FLUSH_TIMEOUT: Duration = Duration::from_millis(200);
|
||||
|
||||
let mut buffer = Vec::<String>::with_capacity(16);
|
||||
|
||||
loop {
|
||||
let msg = match log_receiver.recv_timeout(FLUSH_TIMEOUT) {
|
||||
Ok(msg) => msg,
|
||||
Err(RecvTimeoutError::Timeout) => {
|
||||
flush_logs_to_stderr(&mut buffer);
|
||||
continue;
|
||||
}
|
||||
Err(RecvTimeoutError::Disconnected) => unreachable!("sender is static"),
|
||||
};
|
||||
|
||||
match msg {
|
||||
LoggerCommand::Print(msg) => {
|
||||
// Append message to buffer
|
||||
if let Some(msg) = msg.to_formatted_message() {
|
||||
buffer.push(msg);
|
||||
}
|
||||
|
||||
if buffer.len() == buffer.capacity() {
|
||||
flush_logs_to_stderr(&mut buffer);
|
||||
}
|
||||
}
|
||||
LoggerCommand::Flush { finished_barrier } => {
|
||||
flush_logs_to_stderr(&mut buffer);
|
||||
finished_barrier.wait();
|
||||
}
|
||||
LoggerCommand::FlushAndShutdown { finished_barrier } => {
|
||||
flush_logs_to_stderr(&mut buffer);
|
||||
finished_barrier.wait();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush_logs_to_stderr(buffer: &mut Vec<String>) {
|
||||
if !buffer.is_empty() {
|
||||
let text = buffer.join("\n");
|
||||
eprintln!("{text}");
|
||||
buffer.clear();
|
||||
}
|
||||
}
|
||||
}
|
@ -7,17 +7,26 @@ pub mod colors;
|
||||
mod file_visibility;
|
||||
mod formatting;
|
||||
mod fs;
|
||||
pub mod io;
|
||||
pub mod logger;
|
||||
mod question;
|
||||
|
||||
pub use file_visibility::FileVisibilityPolicy;
|
||||
pub use formatting::{nice_directory_display, pretty_format_list_of_paths, strip_cur_dir, to_utf, EscapedPathDisplay};
|
||||
pub use fs::{
|
||||
cd_into_same_dir_as, clear_path, create_dir_if_non_existent, is_symlink, remove_file_or_dir, try_infer_extension,
|
||||
pub use self::{
|
||||
file_visibility::FileVisibilityPolicy,
|
||||
formatting::{
|
||||
nice_directory_display, os_str_to_str, path_to_str, pretty_format_list_of_paths, strip_cur_dir, Bytes,
|
||||
EscapedPathDisplay,
|
||||
},
|
||||
fs::{
|
||||
cd_into_same_dir_as, create_dir_if_non_existent, is_path_stdin, remove_file_or_dir,
|
||||
rename_for_available_filename, resolve_path_conflict, try_infer_extension,
|
||||
},
|
||||
question::{
|
||||
ask_to_create_file, user_wants_to_continue, user_wants_to_overwrite, FileConflitOperation, QuestionAction,
|
||||
QuestionPolicy,
|
||||
},
|
||||
utf8::{get_invalid_utf8_paths, is_invalid_utf8},
|
||||
};
|
||||
pub use question::{
|
||||
ask_to_create_file, user_wants_to_continue, user_wants_to_overwrite, QuestionAction, QuestionPolicy,
|
||||
};
|
||||
pub use utf8::{get_invalid_utf8_paths, is_invalid_utf8};
|
||||
|
||||
mod utf8 {
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
@ -29,9 +38,6 @@ mod utf8 {
|
||||
|
||||
/// Filter out list of paths that are not utf8 valid
|
||||
pub fn get_invalid_utf8_paths(paths: &[PathBuf]) -> Vec<&PathBuf> {
|
||||
paths
|
||||
.iter()
|
||||
.filter_map(|path| is_invalid_utf8(path).then_some(path))
|
||||
.collect()
|
||||
paths.iter().filter(|path| is_invalid_utf8(path)).collect()
|
||||
}
|
||||
}
|
||||
|
@ -5,17 +5,16 @@
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
io::{self, Write},
|
||||
io::{stdin, BufRead, IsTerminal},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use fs_err as fs;
|
||||
|
||||
use super::{strip_cur_dir, to_utf};
|
||||
use crate::{
|
||||
accessible::is_running_in_accessible_mode,
|
||||
error::{Error, FinalError, Result},
|
||||
utils::{self, colors},
|
||||
utils::{self, colors, formatting::path_to_str, io::lock_and_flush_output_stdio, strip_cur_dir},
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
@ -38,31 +37,91 @@ pub enum QuestionAction {
|
||||
Decompression,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
/// Determines which action to do when there is a file conflict
|
||||
pub enum FileConflitOperation {
|
||||
#[default]
|
||||
/// Cancel the operation
|
||||
Cancel,
|
||||
/// Overwrite the existing file with the new one
|
||||
Overwrite,
|
||||
/// Rename the file
|
||||
/// It'll be put "_1" at the end of the filename or "_2","_3","_4".. if already exists
|
||||
Rename,
|
||||
/// Merge conflicting folders
|
||||
Merge,
|
||||
}
|
||||
|
||||
/// Check if QuestionPolicy flags were set, otherwise, ask user if they want to overwrite.
|
||||
pub fn user_wants_to_overwrite(path: &Path, question_policy: QuestionPolicy) -> crate::Result<bool> {
|
||||
pub fn user_wants_to_overwrite(
|
||||
path: &Path,
|
||||
question_policy: QuestionPolicy,
|
||||
question_action: QuestionAction,
|
||||
) -> crate::Result<FileConflitOperation> {
|
||||
use FileConflitOperation as Op;
|
||||
|
||||
match question_policy {
|
||||
QuestionPolicy::AlwaysYes => Ok(true),
|
||||
QuestionPolicy::AlwaysNo => Ok(false),
|
||||
QuestionPolicy::Ask => {
|
||||
let path = to_utf(strip_cur_dir(path));
|
||||
let path = Some(&*path);
|
||||
let placeholder = Some("FILE");
|
||||
Confirmation::new("Do you want to overwrite 'FILE'?", placeholder).ask(path)
|
||||
}
|
||||
QuestionPolicy::AlwaysYes => Ok(Op::Overwrite),
|
||||
QuestionPolicy::AlwaysNo => Ok(Op::Cancel),
|
||||
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action),
|
||||
}
|
||||
}
|
||||
|
||||
/// Ask the user if they want to overwrite or rename the &Path
|
||||
pub fn ask_file_conflict_operation(path: &Path, question_action: QuestionAction) -> Result<FileConflitOperation> {
|
||||
use FileConflitOperation as Op;
|
||||
|
||||
let path = path_to_str(strip_cur_dir(path));
|
||||
match question_action {
|
||||
QuestionAction::Compression => ChoicePrompt::new(
|
||||
format!("Do you want to overwrite {path}?"),
|
||||
[
|
||||
("yes", Op::Overwrite, *colors::GREEN),
|
||||
("no", Op::Cancel, *colors::RED),
|
||||
("rename", Op::Rename, *colors::BLUE),
|
||||
],
|
||||
)
|
||||
.ask(),
|
||||
QuestionAction::Decompression => ChoicePrompt::new(
|
||||
format!("Do you want to overwrite {path}?"),
|
||||
[
|
||||
("yes", Op::Overwrite, *colors::GREEN),
|
||||
("no", Op::Cancel, *colors::RED),
|
||||
("rename", Op::Rename, *colors::BLUE),
|
||||
("merge", Op::Merge, *colors::ORANGE),
|
||||
],
|
||||
)
|
||||
.ask(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create the file if it doesn't exist and if it does then ask to overwrite it.
|
||||
/// If the user doesn't want to overwrite then we return [`Ok(None)`]
|
||||
pub fn ask_to_create_file(path: &Path, question_policy: QuestionPolicy) -> Result<Option<fs::File>> {
|
||||
pub fn ask_to_create_file(
|
||||
path: &Path,
|
||||
question_policy: QuestionPolicy,
|
||||
question_action: QuestionAction,
|
||||
) -> Result<Option<fs::File>> {
|
||||
match fs::OpenOptions::new().write(true).create_new(true).open(path) {
|
||||
Ok(w) => Ok(Some(w)),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
if user_wants_to_overwrite(path, question_policy)? {
|
||||
utils::remove_file_or_dir(path)?;
|
||||
Ok(Some(fs::File::create(path)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
let action = match question_policy {
|
||||
QuestionPolicy::AlwaysYes => FileConflitOperation::Overwrite,
|
||||
QuestionPolicy::AlwaysNo => FileConflitOperation::Cancel,
|
||||
QuestionPolicy::Ask => ask_file_conflict_operation(path, question_action)?,
|
||||
};
|
||||
|
||||
match action {
|
||||
FileConflitOperation::Merge => Ok(Some(fs::File::create(path)?)),
|
||||
FileConflitOperation::Overwrite => {
|
||||
utils::remove_file_or_dir(path)?;
|
||||
Ok(Some(fs::File::create(path)?))
|
||||
}
|
||||
FileConflitOperation::Cancel => Ok(None),
|
||||
FileConflitOperation::Rename => {
|
||||
let renamed_file_path = utils::rename_for_available_filename(path);
|
||||
Ok(Some(fs::File::create(renamed_file_path)?))
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => Err(Error::from(e)),
|
||||
@ -83,7 +142,7 @@ pub fn user_wants_to_continue(
|
||||
QuestionAction::Compression => "compress",
|
||||
QuestionAction::Decompression => "decompress",
|
||||
};
|
||||
let path = to_utf(strip_cur_dir(path));
|
||||
let path = path_to_str(strip_cur_dir(path));
|
||||
let path = Some(&*path);
|
||||
let placeholder = Some("FILE");
|
||||
Confirmation::new(&format!("Do you want to {action} 'FILE'?"), placeholder).ask(path)
|
||||
@ -91,6 +150,108 @@ pub fn user_wants_to_continue(
|
||||
}
|
||||
}
|
||||
|
||||
/// Choise dialog for end user with [option1/option2/...] question.
|
||||
/// Each option is a [Choice] entity, holding a value "T" returned when that option is selected
|
||||
pub struct ChoicePrompt<'a, T: Default> {
|
||||
/// The message to be displayed before the options
|
||||
/// e.g.: "Do you want to overwrite 'FILE'?"
|
||||
pub prompt: String,
|
||||
|
||||
pub choises: Vec<Choice<'a, T>>,
|
||||
}
|
||||
|
||||
/// A single choice showed as a option to user in a [ChoicePrompt]
|
||||
/// It holds a label and a color to display to user and a real value to be returned
|
||||
pub struct Choice<'a, T: Default> {
|
||||
label: &'a str,
|
||||
value: T,
|
||||
color: &'a str,
|
||||
}
|
||||
|
||||
impl<'a, T: Default> ChoicePrompt<'a, T> {
|
||||
/// Creates a new Confirmation.
|
||||
pub fn new(prompt: impl Into<String>, choises: impl IntoIterator<Item = (&'a str, T, &'a str)>) -> Self {
|
||||
Self {
|
||||
prompt: prompt.into(),
|
||||
choises: choises
|
||||
.into_iter()
|
||||
.map(|(label, value, color)| Choice { label, value, color })
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates user message and receives a input to be compared with choises "label"
|
||||
/// and returning the real value of the choise selected
|
||||
pub fn ask(mut self) -> crate::Result<T> {
|
||||
let message = self.prompt;
|
||||
|
||||
#[cfg(not(feature = "allow_piped_choice"))]
|
||||
if !stdin().is_terminal() {
|
||||
eprintln!("{}", message);
|
||||
eprintln!("Pass --yes to proceed");
|
||||
return Ok(T::default());
|
||||
}
|
||||
|
||||
let _locks = lock_and_flush_output_stdio()?;
|
||||
let mut stdin_lock = stdin().lock();
|
||||
|
||||
// Ask the same question to end while no valid answers are given
|
||||
loop {
|
||||
let choice_prompt = if is_running_in_accessible_mode() {
|
||||
self.choises
|
||||
.iter()
|
||||
.map(|choise| format!("{}{}{}", choise.color, choise.label, *colors::RESET))
|
||||
.collect::<Vec<_>>()
|
||||
.join("/")
|
||||
} else {
|
||||
let choises = self
|
||||
.choises
|
||||
.iter()
|
||||
.map(|choise| {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
choise.color,
|
||||
choise
|
||||
.label
|
||||
.chars()
|
||||
.nth(0)
|
||||
.expect("dev error, should be reported, we checked this won't happen"),
|
||||
*colors::RESET
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("/");
|
||||
|
||||
format!("[{}]", choises)
|
||||
};
|
||||
|
||||
eprintln!("{} {}", message, choice_prompt);
|
||||
|
||||
let mut answer = String::new();
|
||||
let bytes_read = stdin_lock.read_line(&mut answer)?;
|
||||
|
||||
if bytes_read == 0 {
|
||||
let error = FinalError::with_title("Unexpected EOF when asking question.")
|
||||
.detail("When asking the user:")
|
||||
.detail(format!(" \"{message}\""))
|
||||
.detail("Expected one of the options as answer, but found EOF instead.")
|
||||
.hint("If using Ouch in scripting, consider using `--yes` and `--no`.");
|
||||
|
||||
return Err(error.into());
|
||||
}
|
||||
|
||||
answer.make_ascii_lowercase();
|
||||
let answer = answer.trim();
|
||||
|
||||
let chosen_index = self.choises.iter().position(|choise| choise.label.starts_with(answer));
|
||||
|
||||
if let Some(i) = chosen_index {
|
||||
return Ok(self.choises.remove(i).value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Confirmation dialog for end user with [Y/n] question.
|
||||
///
|
||||
/// If the placeholder is found in the prompt text, it will be replaced to form the final message.
|
||||
@ -121,10 +282,20 @@ impl<'a> Confirmation<'a> {
|
||||
(Some(placeholder), Some(subs)) => Cow::Owned(self.prompt.replace(placeholder, subs)),
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "allow_piped_choice"))]
|
||||
if !stdin().is_terminal() {
|
||||
eprintln!("{}", message);
|
||||
eprintln!("Pass --yes to proceed");
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let _locks = lock_and_flush_output_stdio()?;
|
||||
let mut stdin_lock = stdin().lock();
|
||||
|
||||
// Ask the same question to end while no valid answers are given
|
||||
loop {
|
||||
if is_running_in_accessible_mode() {
|
||||
print!(
|
||||
eprintln!(
|
||||
"{} {}yes{}/{}no{}: ",
|
||||
message,
|
||||
*colors::GREEN,
|
||||
@ -133,7 +304,7 @@ impl<'a> Confirmation<'a> {
|
||||
*colors::RESET
|
||||
);
|
||||
} else {
|
||||
print!(
|
||||
eprintln!(
|
||||
"{} [{}Y{}/{}n{}] ",
|
||||
message,
|
||||
*colors::GREEN,
|
||||
@ -142,10 +313,9 @@ impl<'a> Confirmation<'a> {
|
||||
*colors::RESET
|
||||
);
|
||||
}
|
||||
io::stdout().flush()?;
|
||||
|
||||
let mut answer = String::new();
|
||||
let bytes_read = io::stdin().read_line(&mut answer)?;
|
||||
let bytes_read = stdin_lock.read_line(&mut answer)?;
|
||||
|
||||
if bytes_read == 0 {
|
||||
let error = FinalError::with_title("Unexpected EOF when asking question.")
|
||||
|
BIN
tests/data/testfile.rar3.rar.gz
Normal file
BIN
tests/data/testfile.rar3.rar.gz
Normal file
Binary file not shown.
BIN
tests/data/testfile.rar5.rar
Normal file
BIN
tests/data/testfile.rar5.rar
Normal file
Binary file not shown.
1010
tests/integration.rs
1010
tests/integration.rs
File diff suppressed because it is too large
Load Diff
@ -17,11 +17,12 @@ fn sanity_check_through_mime() {
|
||||
write_random_content(test_file, &mut SmallRng::from_entropy());
|
||||
|
||||
let formats = [
|
||||
"tar", "zip", "tar.gz", "tgz", "tbz", "tbz2", "txz", "tlzma", "tzst", "tar.bz", "tar.bz2", "tar.lzma",
|
||||
"7z", "tar", "zip", "tar.gz", "tgz", "tbz", "tbz2", "txz", "tlzma", "tzst", "tar.bz", "tar.bz2", "tar.lzma",
|
||||
"tar.xz", "tar.zst",
|
||||
];
|
||||
|
||||
let expected_mimes = [
|
||||
"application/x-7z-compressed",
|
||||
"application/x-tar",
|
||||
"application/zip",
|
||||
"application/gzip",
|
||||
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output\", dir)"
|
||||
---
|
||||
[ERROR] Cannot compress to 'output'.
|
||||
- You shall supply the compression format
|
||||
|
||||
hint: Try adding supported extensions (see --help):
|
||||
hint: ouch compress <FILES>... output.tar.gz
|
||||
hint: ouch compress <FILES>... output.zip
|
||||
hint:
|
||||
hint: Alternatively, you can overwrite this option by using the '--format' flag:
|
||||
hint: ouch compress <FILES>... output --format tar.gz
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress a\", dir)"
|
||||
---
|
||||
[ERROR] Cannot decompress files
|
||||
- Files with missing extensions: <TMP_DIR>/a
|
||||
- Decompression formats are detected automatically from file extension
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Alternatively, you can pass an extension to the '--format' flag:
|
||||
hint: ouch decompress <TMP_DIR>/a --format tar.gz
|
@ -0,0 +1,11 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress a b.unknown\", dir)"
|
||||
---
|
||||
[ERROR] Cannot decompress files
|
||||
- Files with unsupported extensions: <TMP_DIR>/b.unknown
|
||||
- Files with missing extensions: <TMP_DIR>/a
|
||||
- Decompression formats are detected automatically from file extension
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress b.unknown\", dir)"
|
||||
---
|
||||
[ERROR] Cannot decompress files
|
||||
- Files with unsupported extensions: <TMP_DIR>/b.unknown
|
||||
- Decompression formats are detected automatically from file extension
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Alternatively, you can pass an extension to the '--format' flag:
|
||||
hint: ouch decompress <TMP_DIR>/b.unknown --format tar.gz
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress a\", dir)"
|
||||
---
|
||||
[ERROR] Cannot decompress files
|
||||
- Files with missing extensions: <TMP_DIR>/a
|
||||
- Decompression formats are detected automatically from file extension
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Alternatively, you can pass an extension to the '--format' flag:
|
||||
hint: ouch decompress <TMP_DIR>/a --format tar.gz
|
@ -0,0 +1,11 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress a b.unknown\", dir)"
|
||||
---
|
||||
[ERROR] Cannot decompress files
|
||||
- Files with unsupported extensions: <TMP_DIR>/b.unknown
|
||||
- Files with missing extensions: <TMP_DIR>/a
|
||||
- Decompression formats are detected automatically from file extension
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress b.unknown\", dir)"
|
||||
---
|
||||
[ERROR] Cannot decompress files
|
||||
- Files with unsupported extensions: <TMP_DIR>/b.unknown
|
||||
- Decompression formats are detected automatically from file extension
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Alternatively, you can pass an extension to the '--format' flag:
|
||||
hint: ouch decompress <TMP_DIR>/b.unknown --format tar.gz
|
14
tests/snapshots/ui__ui_test_err_format_flag_with_rar-1.snap
Normal file
14
tests/snapshots/ui__ui_test_err_format_flag_with_rar-1.snap
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output --format tar.gz.unknown\", dir)"
|
||||
---
|
||||
[ERROR] Failed to parse `--format tar.gz.unknown`
|
||||
- Unsupported extension 'unknown'
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Examples:
|
||||
hint: --format tar
|
||||
hint: --format gz
|
||||
hint: --format tar.gz
|
14
tests/snapshots/ui__ui_test_err_format_flag_with_rar-2.snap
Normal file
14
tests/snapshots/ui__ui_test_err_format_flag_with_rar-2.snap
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output --format targz\", dir)"
|
||||
---
|
||||
[ERROR] Failed to parse `--format targz`
|
||||
- Unsupported extension 'targz'
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Examples:
|
||||
hint: --format tar
|
||||
hint: --format gz
|
||||
hint: --format tar.gz
|
14
tests/snapshots/ui__ui_test_err_format_flag_with_rar-3.snap
Normal file
14
tests/snapshots/ui__ui_test_err_format_flag_with_rar-3.snap
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output --format .tar.$#!@.rest\", dir)"
|
||||
---
|
||||
[ERROR] Failed to parse `--format .tar.$#!@.rest`
|
||||
- Unsupported extension '$#!@'
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, rar, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Examples:
|
||||
hint: --format tar
|
||||
hint: --format gz
|
||||
hint: --format tar.gz
|
@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output --format tar.gz.unknown\", dir)"
|
||||
---
|
||||
[ERROR] Failed to parse `--format tar.gz.unknown`
|
||||
- Unsupported extension 'unknown'
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Examples:
|
||||
hint: --format tar
|
||||
hint: --format gz
|
||||
hint: --format tar.gz
|
@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output --format targz\", dir)"
|
||||
---
|
||||
[ERROR] Failed to parse `--format targz`
|
||||
- Unsupported extension 'targz'
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Examples:
|
||||
hint: --format tar
|
||||
hint: --format gz
|
||||
hint: --format tar.gz
|
@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output --format .tar.$#!@.rest\", dir)"
|
||||
---
|
||||
[ERROR] Failed to parse `--format .tar.$#!@.rest`
|
||||
- Unsupported extension '$#!@'
|
||||
|
||||
hint: Supported extensions are: tar, zip, bz, bz2, bz3, gz, lz4, xz, lzma, sz, zst, 7z
|
||||
hint: Supported aliases are: tgz, tbz, tlz4, txz, tzlma, tsz, tzst
|
||||
hint:
|
||||
hint: Examples:
|
||||
hint: --format tar
|
||||
hint: --format gz
|
||||
hint: --format tar.gz
|
6
tests/snapshots/ui__ui_test_err_missing_files-2.snap
Normal file
6
tests/snapshots/ui__ui_test_err_missing_files-2.snap
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress a b\", dir)"
|
||||
---
|
||||
[ERROR] failed to canonicalize path `a`
|
||||
- File not found
|
6
tests/snapshots/ui__ui_test_err_missing_files-3.snap
Normal file
6
tests/snapshots/ui__ui_test_err_missing_files-3.snap
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch list a b\", dir)"
|
||||
---
|
||||
[ERROR] failed to canonicalize path `a`
|
||||
- File not found
|
6
tests/snapshots/ui__ui_test_err_missing_files.snap
Normal file
6
tests/snapshots/ui__ui_test_err_missing_files.snap
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress a b\", dir)"
|
||||
---
|
||||
[ERROR] failed to canonicalize path `a`
|
||||
- File not found
|
5
tests/snapshots/ui__ui_test_ok_compress-2.snap
Normal file
5
tests/snapshots/ui__ui_test_ok_compress-2.snap
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output.gz\", dir)"
|
||||
---
|
||||
[INFO] Successfully compressed 'output.gz'
|
6
tests/snapshots/ui__ui_test_ok_compress.snap
Normal file
6
tests/snapshots/ui__ui_test_ok_compress.snap
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output.zip\", dir)"
|
||||
---
|
||||
[INFO] Compressing 'input'
|
||||
[INFO] Successfully compressed 'output.zip'
|
6
tests/snapshots/ui__ui_test_ok_decompress.snap
Normal file
6
tests/snapshots/ui__ui_test_ok_decompress.snap
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch decompress output.zst\", dir)"
|
||||
---
|
||||
[INFO] Successfully decompressed archive in current directory
|
||||
[INFO] Files unpacked: 1
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: stdout_lines
|
||||
---
|
||||
{
|
||||
"",
|
||||
"[INFO] Files unpacked: 4",
|
||||
"[INFO] Successfully decompressed archive in <TMP_DIR>/outputs",
|
||||
"[INFO] extracted ( 0.00 B) \"outputs/inputs\"",
|
||||
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input\"",
|
||||
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input2\"",
|
||||
"[INFO] extracted ( 0.00 B) \"outputs/inputs/input3\"",
|
||||
}
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output1 --format tar.gz\", dir)"
|
||||
---
|
||||
[INFO] Compressing 'input'
|
||||
[INFO] Successfully compressed 'output1'
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output2 --format .tar.gz\", dir)"
|
||||
---
|
||||
[INFO] Compressing 'input'
|
||||
[INFO] Successfully compressed 'output2'
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output1 --format tar.gz\", dir)"
|
||||
---
|
||||
[INFO] Compressing 'input'
|
||||
[INFO] Successfully compressed 'output1'
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "run_ouch(\"ouch compress input output2 --format .tar.gz\", dir)"
|
||||
---
|
||||
[INFO] Compressing 'input'
|
||||
[INFO] Successfully compressed 'output2'
|
27
tests/snapshots/ui__ui_test_usage_help_flag-2.snap
Normal file
27
tests/snapshots/ui__ui_test_usage_help_flag-2.snap
Normal file
@ -0,0 +1,27 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "output_to_string(ouch!(\"-h\"))"
|
||||
snapshot_kind: text
|
||||
---
|
||||
A command-line utility for easily compressing and decompressing files and directories.
|
||||
|
||||
Usage: <OUCH_BIN> [OPTIONS] <COMMAND>
|
||||
|
||||
Commands:
|
||||
compress Compress one or more files into one output file [aliases: c]
|
||||
decompress Decompresses one or more files, optionally into another folder [aliases: d]
|
||||
list List contents of an archive [aliases: l, ls]
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Options:
|
||||
-y, --yes Skip [Y/n] questions, default to yes
|
||||
-n, --no Skip [Y/n] questions, default to no
|
||||
-A, --accessible Activate accessibility mode, reducing visual noise [env: ACCESSIBLE=]
|
||||
-H, --hidden Ignore hidden files
|
||||
-q, --quiet Silence output
|
||||
-g, --gitignore Ignore files matched by git's ignore files
|
||||
-f, --format <FORMAT> Specify the format of the archive
|
||||
-p, --password <PASSWORD> Decompress or list with password
|
||||
-c, --threads <THREADS> Concurrent working threads
|
||||
-h, --help Print help (see more with '--help')
|
||||
-V, --version Print version
|
54
tests/snapshots/ui__ui_test_usage_help_flag.snap
Normal file
54
tests/snapshots/ui__ui_test_usage_help_flag.snap
Normal file
@ -0,0 +1,54 @@
|
||||
---
|
||||
source: tests/ui.rs
|
||||
expression: "output_to_string(ouch!(\"--help\"))"
|
||||
snapshot_kind: text
|
||||
---
|
||||
A command-line utility for easily compressing and decompressing files and directories.
|
||||
|
||||
Supported formats: tar, zip, gz, 7z, xz/lzma, bz/bz2, bz3, lz4, sz (Snappy), zst, rar and br.
|
||||
|
||||
Repository: https://github.com/ouch-org/ouch
|
||||
|
||||
Usage: <OUCH_BIN> [OPTIONS] <COMMAND>
|
||||
|
||||
Commands:
|
||||
compress Compress one or more files into one output file [aliases: c]
|
||||
decompress Decompresses one or more files, optionally into another folder [aliases: d]
|
||||
list List contents of an archive [aliases: l, ls]
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Options:
|
||||
-y, --yes
|
||||
Skip [Y/n] questions, default to yes
|
||||
|
||||
-n, --no
|
||||
Skip [Y/n] questions, default to no
|
||||
|
||||
-A, --accessible
|
||||
Activate accessibility mode, reducing visual noise
|
||||
|
||||
[env: ACCESSIBLE=]
|
||||
|
||||
-H, --hidden
|
||||
Ignore hidden files
|
||||
|
||||
-q, --quiet
|
||||
Silence output
|
||||
|
||||
-g, --gitignore
|
||||
Ignore files matched by git's ignore files
|
||||
|
||||
-f, --format <FORMAT>
|
||||
Specify the format of the archive
|
||||
|
||||
-p, --password <PASSWORD>
|
||||
Decompress or list with password
|
||||
|
||||
-c, --threads <THREADS>
|
||||
Concurrent working threads
|
||||
|
||||
-h, --help
|
||||
Print help (see a summary with '-h')
|
||||
|
||||
-V, --version
|
||||
Print version
|
187
tests/ui.rs
Normal file
187
tests/ui.rs
Normal file
@ -0,0 +1,187 @@
|
||||
/// Snapshot tests for Ouch's output.
|
||||
///
|
||||
/// See CONTRIBUTING.md for a brief guide on how to use [`insta`] for these tests.
|
||||
/// [`insta`]: https://docs.rs/insta
|
||||
#[macro_use]
|
||||
mod utils;
|
||||
|
||||
use std::{collections::BTreeSet, ffi::OsStr, io, path::Path, process::Output};
|
||||
|
||||
use insta::assert_snapshot as ui;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::utils::create_files_in;
|
||||
|
||||
fn testdir() -> io::Result<(tempfile::TempDir, &'static Path)> {
|
||||
let dir = tempfile::tempdir()?;
|
||||
let path = dir.path().to_path_buf().into_boxed_path();
|
||||
Ok((dir, Box::leak(path)))
|
||||
}
|
||||
|
||||
fn run_ouch(argv: &str, dir: &Path) -> String {
|
||||
let output = utils::cargo_bin()
|
||||
.args(argv.split_whitespace().skip(1))
|
||||
.current_dir(dir)
|
||||
.output()
|
||||
.unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to run command\n\
|
||||
argv: {argv}\n\
|
||||
path: {dir:?}\n\
|
||||
err: {err}"
|
||||
)
|
||||
});
|
||||
|
||||
redact_paths(&output_to_string(output), dir)
|
||||
}
|
||||
|
||||
/// Remove random tempdir paths from snapshots to make them deterministic.
|
||||
fn redact_paths(text: &str, dir: &Path) -> String {
|
||||
let dir_name = dir.file_name().and_then(OsStr::to_str).unwrap();
|
||||
|
||||
// this regex should be good as long as the path does not contain whitespace characters
|
||||
let re = Regex::new(&format!(r"\S*[/\\]{dir_name}[/\\]")).unwrap();
|
||||
re.replace_all(text, "<TMP_DIR>/").into()
|
||||
}
|
||||
|
||||
fn output_to_string(output: Output) -> String {
|
||||
String::from_utf8(output.stdout).unwrap() + std::str::from_utf8(&output.stderr).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_err_compress_missing_extension() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
// prepare
|
||||
create_files_in(dir, &["input"]);
|
||||
|
||||
ui!(run_ouch("ouch compress input output", dir));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_err_decompress_missing_extension() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
create_files_in(dir, &["a", "b.unknown"]);
|
||||
|
||||
let snapshot = concat_snapshot_filename_rar_feature("ui_test_err_decompress_missing_extension");
|
||||
ui!(format!("{snapshot}-1"), run_ouch("ouch decompress a", dir));
|
||||
ui!(format!("{snapshot}-2"), run_ouch("ouch decompress a b.unknown", dir));
|
||||
ui!(format!("{snapshot}-3"), run_ouch("ouch decompress b.unknown", dir));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_err_missing_files() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
ui!(run_ouch("ouch compress a b", dir));
|
||||
ui!(run_ouch("ouch decompress a b", dir));
|
||||
ui!(run_ouch("ouch list a b", dir));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_err_format_flag() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
// prepare
|
||||
create_files_in(dir, &["input"]);
|
||||
|
||||
let snapshot = concat_snapshot_filename_rar_feature("ui_test_err_format_flag");
|
||||
ui!(
|
||||
format!("{snapshot}-1"),
|
||||
run_ouch("ouch compress input output --format tar.gz.unknown", dir),
|
||||
);
|
||||
ui!(
|
||||
format!("{snapshot}-2"),
|
||||
run_ouch("ouch compress input output --format targz", dir),
|
||||
);
|
||||
ui!(
|
||||
format!("{snapshot}-3"),
|
||||
run_ouch("ouch compress input output --format .tar.$#!@.rest", dir),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_ok_format_flag() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
// prepare
|
||||
create_files_in(dir, &["input"]);
|
||||
|
||||
let snapshot = concat_snapshot_filename_rar_feature("ui_test_ok_format_flag");
|
||||
ui!(
|
||||
format!("{snapshot}-1"),
|
||||
run_ouch("ouch compress input output1 --format tar.gz", dir),
|
||||
);
|
||||
ui!(
|
||||
format!("{snapshot}-2"),
|
||||
run_ouch("ouch compress input output2 --format .tar.gz", dir),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_ok_compress() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
// prepare
|
||||
create_files_in(dir, &["input"]);
|
||||
|
||||
ui!(run_ouch("ouch compress input output.zip", dir));
|
||||
ui!(run_ouch("ouch compress input output.gz", dir));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_ok_decompress() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
// prepare
|
||||
create_files_in(dir, &["input"]);
|
||||
run_ouch("ouch compress input output.zst", dir);
|
||||
|
||||
ui!(run_ouch("ouch decompress output.zst", dir));
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[test]
|
||||
fn ui_test_ok_decompress_multiple_files() {
|
||||
let (_dropper, dir) = testdir().unwrap();
|
||||
|
||||
let inputs_dir = dir.join("inputs");
|
||||
std::fs::create_dir(&inputs_dir).unwrap();
|
||||
|
||||
let outputs_dir = dir.join("outputs");
|
||||
std::fs::create_dir(&outputs_dir).unwrap();
|
||||
|
||||
// prepare
|
||||
create_files_in(&inputs_dir, &["input", "input2", "input3"]);
|
||||
|
||||
let compress_command = format!("ouch compress {} output.tar.zst", inputs_dir.to_str().unwrap());
|
||||
run_ouch(&compress_command, dir);
|
||||
|
||||
let decompress_command = format!("ouch decompress output.tar.zst --dir {}", outputs_dir.to_str().unwrap());
|
||||
let stdout = run_ouch(&decompress_command, dir);
|
||||
let stdout_lines = stdout.split('\n').collect::<BTreeSet<_>>();
|
||||
insta::assert_debug_snapshot!(stdout_lines);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_test_usage_help_flag() {
|
||||
insta::with_settings!({filters => vec![
|
||||
// binary name is `ouch.exe` on Windows and `ouch` on everywhere else
|
||||
(r"(Usage:.*\b)ouch(\.exe)?\b", "${1}<OUCH_BIN>"),
|
||||
]}, {
|
||||
ui!(output_to_string(ouch!("--help")));
|
||||
ui!(output_to_string(ouch!("-h")));
|
||||
});
|
||||
}
|
||||
|
||||
/// Concatenates `with_rar` or `without_rar` if the feature is toggled or not.
|
||||
fn concat_snapshot_filename_rar_feature(name: &str) -> String {
|
||||
let suffix = if cfg!(feature = "unrar") {
|
||||
"with_rar"
|
||||
} else {
|
||||
"without_rar"
|
||||
};
|
||||
|
||||
format!("{name}_{suffix}")
|
||||
}
|
@ -1,16 +1,24 @@
|
||||
use std::{env, io::Write, path::PathBuf};
|
||||
// This warning is unavoidable when reusing testing utils.
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::{
|
||||
env,
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use assert_cmd::Command;
|
||||
use fs_err as fs;
|
||||
use rand::{Rng, RngCore};
|
||||
|
||||
/// Run ouch with the provided arguments, returns [`assert_cmd::Output`]
|
||||
#[macro_export]
|
||||
macro_rules! ouch {
|
||||
($($e:expr),*) => {
|
||||
$crate::utils::cargo_bin()
|
||||
$(.arg($e))*
|
||||
.arg("--yes")
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@ -27,16 +35,32 @@ pub fn cargo_bin() -> Command {
|
||||
.unwrap_or_else(|| Command::cargo_bin("ouch").expect("Failed to find ouch executable"))
|
||||
}
|
||||
|
||||
// write random content to a file
|
||||
/// Creates files in the specified directory.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```no_run
|
||||
/// let (_dropper, dir) = testdir().unwrap();
|
||||
/// create_files_in(dir, &["file1.txt", "file2.txt"]);
|
||||
/// ```
|
||||
pub fn create_files_in(dir: &Path, files: &[&str]) {
|
||||
for f in files {
|
||||
std::fs::File::create(dir.join(f)).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Write random content to a file
|
||||
pub fn write_random_content(file: &mut impl Write, rng: &mut impl RngCore) {
|
||||
let mut data = Vec::new();
|
||||
data.resize(rng.gen_range(0..8192), 0);
|
||||
let mut data = vec![0; rng.gen_range(0..8192)];
|
||||
|
||||
rng.fill_bytes(&mut data);
|
||||
file.write_all(&data).unwrap();
|
||||
}
|
||||
|
||||
// check that two directories have the exact same content recursively
|
||||
// checks equility of file types if preserve_permissions is true, ignored on non-unix
|
||||
/// Check that two directories have the exact same content recursively.
|
||||
/// Checks equility of file types if preserve_permissions is true, ignored on non-unix
|
||||
// Silence clippy warning that triggers because of the `#[cfg(unix)]` on Windows.
|
||||
#[allow(clippy::only_used_in_recursion)]
|
||||
pub fn assert_same_directory(x: impl Into<PathBuf>, y: impl Into<PathBuf>, preserve_permissions: bool) {
|
||||
fn read_dir(dir: impl Into<PathBuf>) -> impl Iterator<Item = fs::DirEntry> {
|
||||
let mut dir: Vec<_> = fs::read_dir(dir).unwrap().map(|entry| entry.unwrap()).collect();
|
||||
@ -64,7 +88,7 @@ pub fn assert_same_directory(x: impl Into<PathBuf>, y: impl Into<PathBuf>, prese
|
||||
|
||||
if ft_x.is_dir() && ft_y.is_dir() {
|
||||
assert_same_directory(x.path(), y.path(), preserve_permissions);
|
||||
} else if ft_x.is_file() && ft_y.is_file() {
|
||||
} else if (ft_x.is_file() && ft_y.is_file()) || (ft_x.is_symlink() && ft_y.is_symlink()) {
|
||||
assert_eq!(meta_x.len(), meta_y.len());
|
||||
assert_eq!(fs::read(x.path()).unwrap(), fs::read(y.path()).unwrap());
|
||||
} else {
|
||||
|
Loading…
x
Reference in New Issue
Block a user