mirror of
https://github.com/ouch-org/ouch.git
synced 2025-06-07 12:05:46 +00:00
create helper function split_first_extension
This commit is contained in:
parent
4d518b7056
commit
19769223c8
@ -9,6 +9,7 @@ use crate::{
|
|||||||
archive,
|
archive,
|
||||||
commands::warn_user_about_in_memory_zip_compression,
|
commands::warn_user_about_in_memory_zip_compression,
|
||||||
extension::{
|
extension::{
|
||||||
|
split_first_extension,
|
||||||
CompressionFormat::{self, *},
|
CompressionFormat::{self, *},
|
||||||
Extension,
|
Extension,
|
||||||
},
|
},
|
||||||
@ -69,11 +70,13 @@ pub fn compress_files(
|
|||||||
Ok(encoder)
|
Ok(encoder)
|
||||||
};
|
};
|
||||||
|
|
||||||
for format in formats.iter().flat_map(Extension::iter).skip(1).collect::<Vec<_>>().iter().rev() {
|
let (first_extension, extensions) = split_first_extension(&formats);
|
||||||
|
|
||||||
|
for format in extensions.iter().rev() {
|
||||||
writer = chain_writer_encoder(format, writer)?;
|
writer = chain_writer_encoder(format, writer)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
match formats[0].compression_formats[0] {
|
match first_extension {
|
||||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||||
let _progress = Progress::new_accessible_aware(
|
let _progress = Progress::new_accessible_aware(
|
||||||
total_input_size,
|
total_input_size,
|
||||||
@ -81,7 +84,7 @@ pub fn compress_files(
|
|||||||
Some(Box::new(move || output_file_path.metadata().expect("file exists").len())),
|
Some(Box::new(move || output_file_path.metadata().expect("file exists").len())),
|
||||||
);
|
);
|
||||||
|
|
||||||
writer = chain_writer_encoder(&formats[0].compression_formats[0], writer)?;
|
writer = chain_writer_encoder(&first_extension, writer)?;
|
||||||
let mut reader = fs::File::open(&files[0]).unwrap();
|
let mut reader = fs::File::open(&files[0]).unwrap();
|
||||||
io::copy(&mut reader, &mut writer)?;
|
io::copy(&mut reader, &mut writer)?;
|
||||||
}
|
}
|
||||||
|
@ -9,6 +9,7 @@ use fs_err as fs;
|
|||||||
use crate::{
|
use crate::{
|
||||||
commands::warn_user_about_in_memory_zip_decompression,
|
commands::warn_user_about_in_memory_zip_decompression,
|
||||||
extension::{
|
extension::{
|
||||||
|
split_first_extension,
|
||||||
CompressionFormat::{self, *},
|
CompressionFormat::{self, *},
|
||||||
Extension,
|
Extension,
|
||||||
},
|
},
|
||||||
@ -34,6 +35,7 @@ pub fn decompress_file(
|
|||||||
assert!(output_dir.exists());
|
assert!(output_dir.exists());
|
||||||
let total_input_size = input_file_path.metadata().expect("file exists").len();
|
let total_input_size = input_file_path.metadata().expect("file exists").len();
|
||||||
let reader = fs::File::open(&input_file_path)?;
|
let reader = fs::File::open(&input_file_path)?;
|
||||||
|
|
||||||
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
|
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
|
||||||
// from decoder chaining.
|
// from decoder chaining.
|
||||||
//
|
//
|
||||||
@ -41,7 +43,7 @@ pub fn decompress_file(
|
|||||||
// in-memory decompression/copying first.
|
// in-memory decompression/copying first.
|
||||||
//
|
//
|
||||||
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
|
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
|
||||||
if formats.len() == 1 && *formats[0].compression_formats == [Zip] {
|
if let [Extension { compression_formats: [Zip], .. }] = formats.as_slice() {
|
||||||
let zip_archive = zip::ZipArchive::new(reader)?;
|
let zip_archive = zip::ZipArchive::new(reader)?;
|
||||||
let files = if let ControlFlow::Continue(files) = smart_unpack(
|
let files = if let ControlFlow::Continue(files) = smart_unpack(
|
||||||
Box::new(move |output_dir| {
|
Box::new(move |output_dir| {
|
||||||
@ -93,13 +95,15 @@ pub fn decompress_file(
|
|||||||
Ok(decoder)
|
Ok(decoder)
|
||||||
};
|
};
|
||||||
|
|
||||||
for format in formats.iter().flat_map(Extension::iter).skip(1).collect::<Vec<_>>().iter().rev() {
|
let (first_extension, extensions) = split_first_extension(&formats);
|
||||||
|
|
||||||
|
for format in extensions.iter().rev() {
|
||||||
reader = chain_reader_decoder(format, reader)?;
|
reader = chain_reader_decoder(format, reader)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let files_unpacked = match formats[0].compression_formats[0] {
|
let files_unpacked = match first_extension {
|
||||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||||
reader = chain_reader_decoder(&formats[0].compression_formats[0], reader)?;
|
reader = chain_reader_decoder(&first_extension, reader)?;
|
||||||
|
|
||||||
let writer = utils::create_or_ask_overwrite(&output_file_path, question_policy)?;
|
let writer = utils::create_or_ask_overwrite(&output_file_path, question_policy)?;
|
||||||
if writer.is_none() {
|
if writer.is_none() {
|
||||||
|
@ -171,3 +171,10 @@ mod tests {
|
|||||||
assert_eq!(formats, vec![&Tar, &Gzip]);
|
assert_eq!(formats, vec![&Tar, &Gzip]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Panics if formats has an empty list of compression formats
|
||||||
|
pub fn split_first_extension(formats: &[Extension]) -> (CompressionFormat, Vec<CompressionFormat>) {
|
||||||
|
let mut extensions: Vec<CompressionFormat> = formats.iter().flat_map(Extension::iter).copied().collect();
|
||||||
|
let first_extension = extensions.remove(0);
|
||||||
|
(first_extension, extensions)
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user