mirror of
https://github.com/ouch-org/ouch.git
synced 2025-06-06 11:35:45 +00:00
Merge branch 'master' into master
This commit is contained in:
commit
9b7b863e94
20
.github/workflows/build.yml
vendored
20
.github/workflows/build.yml
vendored
@ -104,13 +104,15 @@ jobs:
|
||||
- name: Install dependencies for musl libc
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install musl-tools
|
||||
sudo apt-get install help2man musl-tools
|
||||
|
||||
- name: Run cargo build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --target x86_64-unknown-linux-musl
|
||||
env:
|
||||
GEN_COMPLETIONS: 1
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
@ -118,6 +120,11 @@ jobs:
|
||||
command: test
|
||||
args: --target x86_64-unknown-linux-musl
|
||||
|
||||
- name: Build man page and find completions
|
||||
run: |
|
||||
help2man target/x86_64-unknown-linux-musl/release/ouch > ouch.1
|
||||
cp -r target/x86_64-unknown-linux-musl/release/build/ouch-*/out/completions .
|
||||
|
||||
- name: Strip binary
|
||||
run: strip target/x86_64-unknown-linux-musl/release/ouch
|
||||
|
||||
@ -127,6 +134,17 @@ jobs:
|
||||
name: 'ouch-x86_64-linux-musl'
|
||||
path: target/x86_64-unknown-linux-musl/release/ouch
|
||||
|
||||
- name: Upload completions
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: completions
|
||||
path: completions
|
||||
|
||||
- name: Upload man page
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ouch.1
|
||||
path: ouch.1
|
||||
|
||||
x86_64_glibc:
|
||||
name: Ubuntu 20.04 (glibc)
|
||||
|
@ -4,12 +4,12 @@ Feel free to open an issue anytime you wish to ask a question, suggest a feature
|
||||
|
||||
# Requirements
|
||||
|
||||
1. Be kind, considerate and respectfull.
|
||||
2. If editing .rs files, run `rustfmt` on them before commiting.
|
||||
1. Be nice to other people.
|
||||
2. If editing the Rust source code, remember to run `rustfmt` (otherwise, CI will warn you the code was not properly formatted).
|
||||
|
||||
Note that we are using `unstable` features of `rustfmt`, so you will need to change your toolchain to nightly.
|
||||
Note: we are using `unstable` features of `rustfmt`! Nightly toolchain is required (will likely be installed automatically, cause the toolchain was specified in the project root).
|
||||
|
||||
# Suggestions
|
||||
|
||||
1. Ask for some guidance before solving an error if you feel like it.
|
||||
2. If editing Rust code, run `clippy` before commiting.
|
||||
1. If you wish to, you can ask for some guidance before solving an issue.
|
||||
2. Run `cargo clippy` too.
|
||||
|
17
Cargo.lock
generated
17
Cargo.lock
generated
@ -114,6 +114,15 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_generate"
|
||||
version = "3.0.0-beta.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "097ab5db1c3417442270cd57c8dd39f6c3114d3ce09d595f9efddbb1fcfaa799"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.2.1"
|
||||
@ -148,6 +157,12 @@ dependencies = [
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fs-err"
|
||||
version = "2.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ebd3504ad6116843b8375ad70df74e7bfe83cac77a1f3fe73200c844d43bfe0"
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.3"
|
||||
@ -283,7 +298,9 @@ dependencies = [
|
||||
"atty",
|
||||
"bzip2",
|
||||
"clap",
|
||||
"clap_generate",
|
||||
"flate2",
|
||||
"fs-err",
|
||||
"infer",
|
||||
"libc",
|
||||
"once_cell",
|
||||
|
@ -15,6 +15,7 @@ description = "A command-line utility for easily compressing and decompressing f
|
||||
[dependencies]
|
||||
clap = "=3.0.0-beta.5" # Keep it pinned while in beta!
|
||||
atty = "0.2.14"
|
||||
fs-err = "2.6.0"
|
||||
once_cell = "1.8.0"
|
||||
walkdir = "2.3.2"
|
||||
bzip2 = "0.4.3"
|
||||
@ -25,6 +26,10 @@ zip = { version = "0.5.13", default-features = false, features = ["defl
|
||||
flate2 = { version = "1.0.22", default-features = false, features = ["zlib"] }
|
||||
zstd = { version = "0.9.0", default-features = false, features = ["thin"] }
|
||||
|
||||
[build-dependencies]
|
||||
clap = "=3.0.0-beta.5"
|
||||
clap_generate = "=3.0.0-beta.5"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.2.0"
|
||||
infer = "0.5.0"
|
||||
|
@ -35,11 +35,11 @@ ouch decompress a.zip b.tar.gz c.tar
|
||||
ouch d a.zip
|
||||
```
|
||||
|
||||
You can redirect the decompression results to another folder with the `-o/--output` flag.
|
||||
You can redirect the decompression results to another folder with the `-d/--dir` flag.
|
||||
|
||||
```sh
|
||||
# Decompress 'summer_vacation.zip' inside of new folder 'pictures'
|
||||
ouch decompress summer_vacation.zip -o pictures
|
||||
ouch decompress summer_vacation.zip -d pictures
|
||||
```
|
||||
|
||||
### Compressing
|
||||
|
22
build.rs
Normal file
22
build.rs
Normal file
@ -0,0 +1,22 @@
|
||||
use clap::{ArgEnum, IntoApp};
|
||||
use clap_generate::{generate_to, Shell};
|
||||
|
||||
use std::{env, fs::create_dir_all, path::Path};
|
||||
|
||||
include!("src/opts.rs");
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-env-changed=GEN_COMPLETIONS");
|
||||
|
||||
if env::var_os("GEN_COMPLETIONS") != Some("1".into()) {
|
||||
return;
|
||||
}
|
||||
|
||||
let out = &Path::new(&env::var_os("OUT_DIR").unwrap()).join("completions");
|
||||
create_dir_all(out).unwrap();
|
||||
let app = &mut Opts::into_app();
|
||||
|
||||
for shell in Shell::value_variants() {
|
||||
generate_to(*shell, app, "ouch", out).unwrap();
|
||||
}
|
||||
}
|
@ -1,17 +1,20 @@
|
||||
//! Contains Tar-specific building and unpacking functions
|
||||
|
||||
use std::{
|
||||
env, fs,
|
||||
env,
|
||||
io::prelude::*,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use fs_err as fs;
|
||||
use tar;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
error::FinalError,
|
||||
info,
|
||||
utils::{self, Bytes, QuestionPolicy},
|
||||
utils::{self, Bytes},
|
||||
QuestionPolicy,
|
||||
};
|
||||
|
||||
pub fn unpack_archive(
|
||||
@ -62,7 +65,12 @@ where
|
||||
builder.append_dir(path, path)?;
|
||||
} else {
|
||||
let mut file = fs::File::open(path)?;
|
||||
builder.append_file(path, &mut file)?;
|
||||
builder.append_file(path, file.file_mut()).map_err(|err| {
|
||||
FinalError::with_title("Could not create archive")
|
||||
.detail("Unexpected error while trying to read file")
|
||||
.detail(format!("Error: {}.", err))
|
||||
.into_owned()
|
||||
})?;
|
||||
}
|
||||
}
|
||||
env::set_current_dir(previous_location)?;
|
||||
|
@ -1,17 +1,20 @@
|
||||
//! Contains Zip-specific building and unpacking functions
|
||||
|
||||
use std::{
|
||||
env, fs,
|
||||
env,
|
||||
io::{self, prelude::*},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use fs_err as fs;
|
||||
|
||||
use walkdir::WalkDir;
|
||||
use zip::{self, read::ZipFile, ZipArchive};
|
||||
|
||||
use crate::{
|
||||
info,
|
||||
utils::{self, dir_is_empty, strip_cur_dir, Bytes, QuestionPolicy},
|
||||
utils::{self, dir_is_empty, strip_cur_dir, Bytes},
|
||||
QuestionPolicy,
|
||||
};
|
||||
|
||||
use self::utf8::get_invalid_utf8_paths;
|
||||
@ -126,10 +129,11 @@ fn check_for_comments(file: &ZipFile) {
|
||||
|
||||
#[cfg(unix)]
|
||||
fn __unix_set_permissions(file_path: &Path, file: &ZipFile) -> crate::Result<()> {
|
||||
use std::fs::Permissions;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
if let Some(mode) = file.unix_mode() {
|
||||
fs::set_permissions(file_path, fs::Permissions::from_mode(mode))?;
|
||||
fs::set_permissions(file_path, Permissions::from_mode(mode))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
49
src/cli.rs
49
src/cli.rs
@ -5,51 +5,10 @@ use std::{
|
||||
vec::Vec,
|
||||
};
|
||||
|
||||
use clap::{Parser, ValueHint};
|
||||
use clap::Parser;
|
||||
use fs_err as fs;
|
||||
|
||||
pub use crate::utils::QuestionPolicy;
|
||||
use crate::Error;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(version, about)]
|
||||
pub struct Opts {
|
||||
/// Skip overwrite questions positively.
|
||||
#[clap(short, long, conflicts_with = "no")]
|
||||
pub yes: bool,
|
||||
|
||||
/// Skip overwrite questions negatively.
|
||||
#[clap(short, long)]
|
||||
pub no: bool,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub cmd: Subcommand,
|
||||
}
|
||||
|
||||
#[derive(Parser, PartialEq, Eq, Debug)]
|
||||
pub enum Subcommand {
|
||||
/// Compress files. Alias: c
|
||||
#[clap(alias = "c")]
|
||||
Compress {
|
||||
/// Files to be compressed
|
||||
#[clap(required = true, min_values = 1)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// The resulting file. Its extensions specify how the files will be compressed and they need to be supported
|
||||
#[clap(required = true, value_hint = ValueHint::FilePath)]
|
||||
output: PathBuf,
|
||||
},
|
||||
/// Compress files. Alias: d
|
||||
#[clap(alias = "d")]
|
||||
Decompress {
|
||||
/// Files to be decompressed
|
||||
#[clap(required = true, min_values = 1)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// Decompress files in a directory other than the current
|
||||
#[clap(short, long, value_hint = ValueHint::DirPath)]
|
||||
output: Option<PathBuf>,
|
||||
},
|
||||
}
|
||||
use crate::{Error, Opts, QuestionPolicy, Subcommand};
|
||||
|
||||
impl Opts {
|
||||
/// A helper method that calls `clap::Parser::parse` and then translates relative paths to absolute.
|
||||
@ -73,7 +32,7 @@ impl Opts {
|
||||
}
|
||||
|
||||
fn canonicalize(path: impl AsRef<Path>) -> crate::Result<PathBuf> {
|
||||
match std::fs::canonicalize(&path.as_ref()) {
|
||||
match fs::canonicalize(&path.as_ref()) {
|
||||
Ok(abs_path) => Ok(abs_path),
|
||||
Err(io_err) => {
|
||||
if !path.as_ref().exists() {
|
||||
|
193
src/commands.rs
193
src/commands.rs
@ -3,26 +3,23 @@
|
||||
//! Also, where correctly call functions based on the detected `Command`.
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
io::{self, BufReader, BufWriter, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use fs_err as fs;
|
||||
use utils::colors;
|
||||
|
||||
use crate::{
|
||||
archive,
|
||||
cli::{Opts, Subcommand},
|
||||
error::FinalError,
|
||||
extension::{
|
||||
self,
|
||||
CompressionFormat::{self, *},
|
||||
},
|
||||
info,
|
||||
utils::nice_directory_display,
|
||||
utils::to_utf,
|
||||
utils::{self, dir_is_empty, QuestionPolicy},
|
||||
Error,
|
||||
utils::{self, dir_is_empty, nice_directory_display, to_utf},
|
||||
Error, Opts, QuestionPolicy, Subcommand,
|
||||
};
|
||||
|
||||
// Used in BufReader and BufWriter to perform less syscalls
|
||||
@ -56,7 +53,9 @@ pub fn run(args: Opts, question_policy: QuestionPolicy) -> crate::Result<()> {
|
||||
return Err(Error::with_reason(reason));
|
||||
}
|
||||
|
||||
if matches!(&formats[0], Bzip | Gzip | Lzma) && represents_several_files(&files) {
|
||||
if !formats.get(0).map(CompressionFormat::is_archive_format).unwrap_or(false)
|
||||
&& represents_several_files(&files)
|
||||
{
|
||||
// This piece of code creates a suggestion for compressing multiple files
|
||||
// It says:
|
||||
// Change from file.bz.xz
|
||||
@ -84,7 +83,7 @@ pub fn run(args: Opts, question_policy: QuestionPolicy) -> crate::Result<()> {
|
||||
return Err(Error::with_reason(reason));
|
||||
}
|
||||
|
||||
if let Some(format) = formats.iter().skip(1).find(|format| matches!(format, Tar | Zip)) {
|
||||
if let Some(format) = formats.iter().skip(1).find(|format| format.is_archive_format()) {
|
||||
let reason = FinalError::with_title(format!("Cannot compress to '{}'.", to_utf(&output_path)))
|
||||
.detail(format!("Found the format '{}' in an incorrect position.", format))
|
||||
.detail(format!("'{}' can only be used at the start of the file extension.", format))
|
||||
@ -144,7 +143,7 @@ pub fn run(args: Opts, question_policy: QuestionPolicy) -> crate::Result<()> {
|
||||
|
||||
compress_result?;
|
||||
}
|
||||
Subcommand::Decompress { files, output: output_folder } => {
|
||||
Subcommand::Decompress { files, output_dir } => {
|
||||
let mut output_paths = vec![];
|
||||
let mut formats = vec![];
|
||||
|
||||
@ -173,10 +172,10 @@ pub fn run(args: Opts, question_policy: QuestionPolicy) -> crate::Result<()> {
|
||||
}
|
||||
|
||||
// From Option<PathBuf> to Option<&Path>
|
||||
let output_folder = output_folder.as_ref().map(|path| path.as_ref());
|
||||
let output_dir = output_dir.as_ref().map(|path| path.as_ref());
|
||||
|
||||
for ((input_path, formats), file_name) in files.iter().zip(formats).zip(output_paths) {
|
||||
decompress_file(input_path, formats, output_folder, file_name, question_policy)?;
|
||||
decompress_file(input_path, formats, output_dir, file_name, question_policy)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -186,94 +185,74 @@ pub fn run(args: Opts, question_policy: QuestionPolicy) -> crate::Result<()> {
|
||||
fn compress_files(files: Vec<PathBuf>, formats: Vec<CompressionFormat>, output_file: fs::File) -> crate::Result<()> {
|
||||
let file_writer = BufWriter::with_capacity(BUFFER_CAPACITY, output_file);
|
||||
|
||||
if let [Tar | Tgz | Zip] = *formats.as_slice() {
|
||||
match formats[0] {
|
||||
Tar => {
|
||||
let mut bufwriter = archive::tar::build_archive_from_paths(&files, file_writer)?;
|
||||
bufwriter.flush()?;
|
||||
}
|
||||
Tgz => {
|
||||
// Wrap it into an gz_decoder, and pass to the tar archive builder
|
||||
let gz_decoder = flate2::write::GzEncoder::new(file_writer, Default::default());
|
||||
let mut bufwriter = archive::tar::build_archive_from_paths(&files, gz_decoder)?;
|
||||
bufwriter.flush()?;
|
||||
}
|
||||
Zip => {
|
||||
let mut bufwriter = archive::zip::build_archive_from_paths(&files, file_writer)?;
|
||||
bufwriter.flush()?;
|
||||
let mut writer: Box<dyn Write> = Box::new(file_writer);
|
||||
|
||||
// Grab previous encoder and wrap it inside of a new one
|
||||
let chain_writer_encoder = |format: &CompressionFormat, encoder: Box<dyn Write>| {
|
||||
let encoder: Box<dyn Write> = match format {
|
||||
Gzip => Box::new(flate2::write::GzEncoder::new(encoder, Default::default())),
|
||||
Bzip => Box::new(bzip2::write::BzEncoder::new(encoder, Default::default())),
|
||||
Lzma => Box::new(xz2::write::XzEncoder::new(encoder, 6)),
|
||||
Zstd => {
|
||||
let zstd_encoder = zstd::stream::write::Encoder::new(encoder, Default::default());
|
||||
// Safety:
|
||||
// Encoder::new() can only fail if `level` is invalid, but Default::default()
|
||||
// is guaranteed to be valid
|
||||
Box::new(zstd_encoder.unwrap().auto_finish())
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
} else {
|
||||
let mut writer: Box<dyn Write> = Box::new(file_writer);
|
||||
encoder
|
||||
};
|
||||
|
||||
// Grab previous encoder and wrap it inside of a new one
|
||||
let chain_writer_encoder = |format: &CompressionFormat, encoder: Box<dyn Write>| {
|
||||
let encoder: Box<dyn Write> = match format {
|
||||
Gzip => Box::new(flate2::write::GzEncoder::new(encoder, Default::default())),
|
||||
Bzip => Box::new(bzip2::write::BzEncoder::new(encoder, Default::default())),
|
||||
Lzma => Box::new(xz2::write::XzEncoder::new(encoder, 6)),
|
||||
Zstd => {
|
||||
let zstd_encoder = zstd::stream::write::Encoder::new(encoder, Default::default());
|
||||
// Safety:
|
||||
// Encoder::new() can only fail if `level` is invalid, but Default::default()
|
||||
// is guaranteed to be valid
|
||||
Box::new(zstd_encoder.unwrap().auto_finish())
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
encoder
|
||||
};
|
||||
for format in formats.iter().skip(1).rev() {
|
||||
writer = chain_writer_encoder(format, writer);
|
||||
}
|
||||
|
||||
for format in formats.iter().skip(1).rev() {
|
||||
writer = chain_writer_encoder(format, writer);
|
||||
match formats[0] {
|
||||
Gzip | Bzip | Lzma | Zstd => {
|
||||
writer = chain_writer_encoder(&formats[0], writer);
|
||||
let mut reader = fs::File::open(&files[0]).unwrap();
|
||||
io::copy(&mut reader, &mut writer)?;
|
||||
}
|
||||
Tar => {
|
||||
let mut writer = archive::tar::build_archive_from_paths(&files, writer)?;
|
||||
writer.flush()?;
|
||||
}
|
||||
Tgz => {
|
||||
let encoder = flate2::write::GzEncoder::new(writer, Default::default());
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Tbz => {
|
||||
let encoder = bzip2::write::BzEncoder::new(writer, Default::default());
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Tlzma => {
|
||||
let encoder = xz2::write::XzEncoder::new(writer, 6);
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Tzst => {
|
||||
let encoder = zstd::stream::write::Encoder::new(writer, Default::default())?;
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Zip => {
|
||||
eprintln!("{yellow}Warning:{reset}", yellow = *colors::YELLOW, reset = *colors::RESET);
|
||||
eprintln!("\tCompressing .zip entirely in memory.");
|
||||
eprintln!("\tIf the file is too big, your PC might freeze!");
|
||||
eprintln!(
|
||||
"\tThis is a limitation for formats like '{}'.",
|
||||
formats.iter().map(|format| format.to_string()).collect::<String>()
|
||||
);
|
||||
eprintln!("\tThe design of .zip makes it impossible to compress via stream.");
|
||||
|
||||
match formats[0] {
|
||||
Gzip | Bzip | Lzma | Zstd => {
|
||||
writer = chain_writer_encoder(&formats[0], writer);
|
||||
let mut reader = fs::File::open(&files[0]).unwrap();
|
||||
io::copy(&mut reader, &mut writer)?;
|
||||
}
|
||||
Tar => {
|
||||
let mut writer = archive::tar::build_archive_from_paths(&files, writer)?;
|
||||
writer.flush()?;
|
||||
}
|
||||
Tgz => {
|
||||
let encoder = flate2::write::GzEncoder::new(writer, Default::default());
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Tbz => {
|
||||
let encoder = bzip2::write::BzEncoder::new(writer, Default::default());
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Tlzma => {
|
||||
let encoder = xz2::write::XzEncoder::new(writer, 6);
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Tzst => {
|
||||
let encoder = zstd::stream::write::Encoder::new(writer, Default::default())?;
|
||||
let writer = archive::tar::build_archive_from_paths(&files, encoder)?;
|
||||
writer.finish()?.flush()?;
|
||||
}
|
||||
Zip => {
|
||||
eprintln!("{yellow}Warning:{reset}", yellow = *colors::YELLOW, reset = *colors::RESET);
|
||||
eprintln!("\tCompressing .zip entirely in memory.");
|
||||
eprintln!("\tIf the file is too big, your PC might freeze!");
|
||||
eprintln!(
|
||||
"\tThis is a limitation for formats like '{}'.",
|
||||
formats.iter().map(|format| format.to_string()).collect::<String>()
|
||||
);
|
||||
eprintln!("\tThe design of .zip makes it impossible to compress via stream.");
|
||||
|
||||
let mut vec_buffer = io::Cursor::new(vec![]);
|
||||
archive::zip::build_archive_from_paths(&files, &mut vec_buffer)?;
|
||||
let vec_buffer = vec_buffer.into_inner();
|
||||
io::copy(&mut vec_buffer.as_slice(), &mut writer)?;
|
||||
}
|
||||
let mut vec_buffer = io::Cursor::new(vec![]);
|
||||
archive::zip::build_archive_from_paths(&files, &mut vec_buffer)?;
|
||||
let vec_buffer = vec_buffer.into_inner();
|
||||
io::copy(&mut vec_buffer.as_slice(), &mut writer)?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -282,12 +261,12 @@ fn compress_files(files: Vec<PathBuf>, formats: Vec<CompressionFormat>, output_f
|
||||
|
||||
// File at input_file_path is opened for reading, example: "archive.tar.gz"
|
||||
// formats contains each format necessary for decompression, example: [Gz, Tar] (in decompression order)
|
||||
// output_folder it's where the file will be decompressed to
|
||||
// output_dir it's where the file will be decompressed to
|
||||
// file_name is only used when extracting single file formats, no archive formats like .tar or .zip
|
||||
fn decompress_file(
|
||||
input_file_path: &Path,
|
||||
formats: Vec<extension::CompressionFormat>,
|
||||
output_folder: Option<&Path>,
|
||||
output_dir: Option<&Path>,
|
||||
file_name: &Path,
|
||||
question_policy: QuestionPolicy,
|
||||
) -> crate::Result<()> {
|
||||
@ -296,10 +275,10 @@ fn decompress_file(
|
||||
|
||||
// Output path is used by single file formats
|
||||
let output_path =
|
||||
if let Some(output_folder) = output_folder { output_folder.join(file_name) } else { file_name.to_path_buf() };
|
||||
if let Some(output_dir) = output_dir { output_dir.join(file_name) } else { file_name.to_path_buf() };
|
||||
|
||||
// Output folder is used by archive file formats (zip and tar)
|
||||
let output_folder = output_folder.unwrap_or_else(|| Path::new("."));
|
||||
let output_dir = output_dir.unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
|
||||
// from decoder chaining.
|
||||
@ -309,10 +288,10 @@ fn decompress_file(
|
||||
//
|
||||
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
|
||||
if let [Zip] = *formats.as_slice() {
|
||||
utils::create_dir_if_non_existent(output_folder)?;
|
||||
utils::create_dir_if_non_existent(output_dir)?;
|
||||
let zip_archive = zip::ZipArchive::new(reader)?;
|
||||
let _files = crate::archive::zip::unpack_archive(zip_archive, output_folder, question_policy)?;
|
||||
info!("Successfully decompressed archive in {}.", nice_directory_display(output_folder));
|
||||
let _files = crate::archive::zip::unpack_archive(zip_archive, output_dir, question_policy)?;
|
||||
info!("Successfully decompressed archive in {}.", nice_directory_display(output_dir));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@ -336,7 +315,7 @@ fn decompress_file(
|
||||
reader = chain_reader_decoder(format, reader)?;
|
||||
}
|
||||
|
||||
utils::create_dir_if_non_existent(output_folder)?;
|
||||
utils::create_dir_if_non_existent(output_dir)?;
|
||||
|
||||
let files_unpacked;
|
||||
|
||||
@ -351,23 +330,23 @@ fn decompress_file(
|
||||
files_unpacked = vec![output_path];
|
||||
}
|
||||
Tar => {
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_folder, question_policy)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_dir, question_policy)?;
|
||||
}
|
||||
Tgz => {
|
||||
let reader = chain_reader_decoder(&Gzip, reader)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_folder, question_policy)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_dir, question_policy)?;
|
||||
}
|
||||
Tbz => {
|
||||
let reader = chain_reader_decoder(&Bzip, reader)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_folder, question_policy)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_dir, question_policy)?;
|
||||
}
|
||||
Tlzma => {
|
||||
let reader = chain_reader_decoder(&Lzma, reader)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_folder, question_policy)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_dir, question_policy)?;
|
||||
}
|
||||
Tzst => {
|
||||
let reader = chain_reader_decoder(&Zstd, reader)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_folder, question_policy)?;
|
||||
files_unpacked = crate::archive::tar::unpack_archive(reader, output_dir, question_policy)?;
|
||||
}
|
||||
Zip => {
|
||||
eprintln!("Compressing first into .zip.");
|
||||
@ -381,11 +360,11 @@ fn decompress_file(
|
||||
io::copy(&mut reader, &mut vec)?;
|
||||
let zip_archive = zip::ZipArchive::new(io::Cursor::new(vec))?;
|
||||
|
||||
files_unpacked = crate::archive::zip::unpack_archive(zip_archive, output_folder, question_policy)?;
|
||||
files_unpacked = crate::archive::zip::unpack_archive(zip_archive, output_dir, question_policy)?;
|
||||
}
|
||||
}
|
||||
|
||||
info!("Successfully decompressed archive in {}.", nice_directory_display(output_folder));
|
||||
info!("Successfully decompressed archive in {}.", nice_directory_display(output_dir));
|
||||
info!("Files unpacked: {}", files_unpacked.len());
|
||||
|
||||
Ok(())
|
||||
|
18
src/error.rs
18
src/error.rs
@ -21,7 +21,7 @@ pub enum Error {
|
||||
FileNotFound(PathBuf),
|
||||
AlreadyExists,
|
||||
InvalidZipArchive(&'static str),
|
||||
PermissionDenied,
|
||||
PermissionDenied { error_title: String },
|
||||
UnsupportedZipArchive(&'static str),
|
||||
InternalError,
|
||||
CompressingRootFolder,
|
||||
@ -78,6 +78,10 @@ impl FinalError {
|
||||
self.hints.push(hint.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn into_owned(&mut self) -> Self {
|
||||
std::mem::take(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
@ -134,7 +138,7 @@ impl fmt::Display for Error {
|
||||
Error::UnknownExtensionError(_) => todo!(),
|
||||
Error::AlreadyExists => todo!(),
|
||||
Error::InvalidZipArchive(_) => todo!(),
|
||||
Error::PermissionDenied => todo!(),
|
||||
Error::PermissionDenied { error_title } => FinalError::with_title(error_title).detail("Permission denied"),
|
||||
Error::UnsupportedZipArchive(_) => todo!(),
|
||||
Error::Custom { reason } => reason.clone(),
|
||||
};
|
||||
@ -152,8 +156,8 @@ impl Error {
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
match err.kind() {
|
||||
std::io::ErrorKind::NotFound => panic!("{}", err),
|
||||
std::io::ErrorKind::PermissionDenied => Self::PermissionDenied,
|
||||
std::io::ErrorKind::NotFound => todo!(),
|
||||
std::io::ErrorKind::PermissionDenied => Self::PermissionDenied { error_title: err.to_string() },
|
||||
std::io::ErrorKind::AlreadyExists => Self::AlreadyExists,
|
||||
_other => Self::IoError { reason: err.to_string() },
|
||||
}
|
||||
@ -177,3 +181,9 @@ impl From<walkdir::Error> for Error {
|
||||
Self::WalkdirError { reason: err.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FinalError> for Error {
|
||||
fn from(err: FinalError) -> Self {
|
||||
Self::Custom { reason: err }
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,12 @@ pub enum CompressionFormat {
|
||||
Zip, // .zip
|
||||
}
|
||||
|
||||
impl CompressionFormat {
|
||||
pub fn is_archive_format(&self) -> bool {
|
||||
matches!(self, Tar | Tgz | Tbz | Tlzma | Tzst | Zip)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CompressionFormat {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
|
@ -5,18 +5,21 @@
|
||||
//! 2. It's required by some integration tests at tests/ folder.
|
||||
|
||||
// Public modules
|
||||
pub mod cli;
|
||||
pub mod archive;
|
||||
pub mod commands;
|
||||
|
||||
// Private modules
|
||||
pub mod archive;
|
||||
mod cli;
|
||||
mod dialogs;
|
||||
mod error;
|
||||
mod extension;
|
||||
mod macros;
|
||||
mod opts;
|
||||
mod utils;
|
||||
|
||||
pub use error::{Error, Result};
|
||||
pub use opts::{Opts, Subcommand};
|
||||
pub use utils::QuestionPolicy;
|
||||
|
||||
/// The status code ouch has when an error is encountered
|
||||
pub const EXIT_FAILURE: i32 = libc::EXIT_FAILURE;
|
||||
|
@ -1,4 +1,4 @@
|
||||
use ouch::{cli::Opts, commands, Result};
|
||||
use ouch::{commands, Opts, Result};
|
||||
|
||||
fn main() {
|
||||
if let Err(err) = run() {
|
||||
|
44
src/opts.rs
Normal file
44
src/opts.rs
Normal file
@ -0,0 +1,44 @@
|
||||
use clap::{Parser, ValueHint};
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(version, about)]
|
||||
pub struct Opts {
|
||||
/// Skip overwrite questions positively.
|
||||
#[clap(short, long, conflicts_with = "no")]
|
||||
pub yes: bool,
|
||||
|
||||
/// Skip overwrite questions negatively.
|
||||
#[clap(short, long)]
|
||||
pub no: bool,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub cmd: Subcommand,
|
||||
}
|
||||
|
||||
#[derive(Parser, PartialEq, Eq, Debug)]
|
||||
pub enum Subcommand {
|
||||
/// Compress files. Alias: c
|
||||
#[clap(alias = "c")]
|
||||
Compress {
|
||||
/// Files to be compressed
|
||||
#[clap(required = true, min_values = 1)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// The resulting file. Its extensions specify how the files will be compressed and they need to be supported
|
||||
#[clap(required = true, value_hint = ValueHint::FilePath)]
|
||||
output: PathBuf,
|
||||
},
|
||||
/// Compress files. Alias: d
|
||||
#[clap(alias = "d")]
|
||||
Decompress {
|
||||
/// Files to be decompressed
|
||||
#[clap(required = true, min_values = 1)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// Decompress files in a directory other than the current
|
||||
#[clap(short, long = "dir", value_hint = ValueHint::DirPath)]
|
||||
output_dir: Option<PathBuf>,
|
||||
},
|
||||
}
|
@ -1,16 +1,17 @@
|
||||
use std::{
|
||||
cmp, env,
|
||||
ffi::OsStr,
|
||||
fs::{self, ReadDir},
|
||||
path::Component,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use fs_err as fs;
|
||||
|
||||
use crate::{dialogs::Confirmation, info};
|
||||
|
||||
/// Checks if the given path represents an empty directory.
|
||||
pub fn dir_is_empty(dir_path: &Path) -> bool {
|
||||
let is_empty = |mut rd: ReadDir| rd.next().is_none();
|
||||
let is_empty = |mut rd: std::fs::ReadDir| rd.next().is_none();
|
||||
|
||||
dir_path.read_dir().map(is_empty).unwrap_or_default()
|
||||
}
|
||||
|
@ -1,16 +1,15 @@
|
||||
mod utils;
|
||||
|
||||
use std::{
|
||||
env, fs,
|
||||
env,
|
||||
io::prelude::*,
|
||||
path::{Path, PathBuf},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use ouch::{
|
||||
cli::{Opts, QuestionPolicy, Subcommand},
|
||||
commands::run,
|
||||
};
|
||||
use ouch::{commands::run, Opts, QuestionPolicy, Subcommand};
|
||||
|
||||
use fs_err as fs;
|
||||
use rand::{rngs::SmallRng, RngCore, SeedableRng};
|
||||
use tempfile::NamedTempFile;
|
||||
use utils::*;
|
||||
@ -180,7 +179,7 @@ fn extract_files(archive_path: &Path) -> Vec<PathBuf> {
|
||||
no: false,
|
||||
cmd: Subcommand::Decompress {
|
||||
files: vec![archive_path.to_owned()],
|
||||
output: Some(extraction_output_folder.clone()),
|
||||
output_dir: Some(extraction_output_folder.clone()),
|
||||
},
|
||||
};
|
||||
run(command, QuestionPolicy::Ask).expect("Failed to extract");
|
||||
|
@ -2,15 +2,11 @@
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use ouch::{
|
||||
cli::{Opts, QuestionPolicy, Subcommand},
|
||||
commands::run,
|
||||
};
|
||||
use fs_err as fs;
|
||||
|
||||
use ouch::{commands::run, Opts, QuestionPolicy, Subcommand};
|
||||
|
||||
pub fn create_empty_dir(at: &Path, filename: &str) -> PathBuf {
|
||||
let dirname = Path::new(filename);
|
||||
@ -52,7 +48,7 @@ pub fn extract_files(archive_path: &Path) -> Vec<PathBuf> {
|
||||
no: false,
|
||||
cmd: Subcommand::Decompress {
|
||||
files: vec![archive_path.to_owned()],
|
||||
output: Some(extraction_output_folder.clone()),
|
||||
output_dir: Some(extraction_output_folder.clone()),
|
||||
},
|
||||
};
|
||||
run(command, QuestionPolicy::Ask).expect("Failed to extract");
|
||||
|
Loading…
x
Reference in New Issue
Block a user