mirror of
https://github.com/ouch-org/ouch.git
synced 2025-06-07 03:55:28 +00:00
create helper function split_first_extension
This commit is contained in:
parent
4d518b7056
commit
19769223c8
@ -9,6 +9,7 @@ use crate::{
|
||||
archive,
|
||||
commands::warn_user_about_in_memory_zip_compression,
|
||||
extension::{
|
||||
split_first_extension,
|
||||
CompressionFormat::{self, *},
|
||||
Extension,
|
||||
},
|
||||
@ -69,11 +70,13 @@ pub fn compress_files(
|
||||
Ok(encoder)
|
||||
};
|
||||
|
||||
for format in formats.iter().flat_map(Extension::iter).skip(1).collect::<Vec<_>>().iter().rev() {
|
||||
let (first_extension, extensions) = split_first_extension(&formats);
|
||||
|
||||
for format in extensions.iter().rev() {
|
||||
writer = chain_writer_encoder(format, writer)?;
|
||||
}
|
||||
|
||||
match formats[0].compression_formats[0] {
|
||||
match first_extension {
|
||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||
let _progress = Progress::new_accessible_aware(
|
||||
total_input_size,
|
||||
@ -81,7 +84,7 @@ pub fn compress_files(
|
||||
Some(Box::new(move || output_file_path.metadata().expect("file exists").len())),
|
||||
);
|
||||
|
||||
writer = chain_writer_encoder(&formats[0].compression_formats[0], writer)?;
|
||||
writer = chain_writer_encoder(&first_extension, writer)?;
|
||||
let mut reader = fs::File::open(&files[0]).unwrap();
|
||||
io::copy(&mut reader, &mut writer)?;
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ use fs_err as fs;
|
||||
use crate::{
|
||||
commands::warn_user_about_in_memory_zip_decompression,
|
||||
extension::{
|
||||
split_first_extension,
|
||||
CompressionFormat::{self, *},
|
||||
Extension,
|
||||
},
|
||||
@ -34,6 +35,7 @@ pub fn decompress_file(
|
||||
assert!(output_dir.exists());
|
||||
let total_input_size = input_file_path.metadata().expect("file exists").len();
|
||||
let reader = fs::File::open(&input_file_path)?;
|
||||
|
||||
// Zip archives are special, because they require io::Seek, so it requires it's logic separated
|
||||
// from decoder chaining.
|
||||
//
|
||||
@ -41,7 +43,7 @@ pub fn decompress_file(
|
||||
// in-memory decompression/copying first.
|
||||
//
|
||||
// Any other Zip decompression done can take up the whole RAM and freeze ouch.
|
||||
if formats.len() == 1 && *formats[0].compression_formats == [Zip] {
|
||||
if let [Extension { compression_formats: [Zip], .. }] = formats.as_slice() {
|
||||
let zip_archive = zip::ZipArchive::new(reader)?;
|
||||
let files = if let ControlFlow::Continue(files) = smart_unpack(
|
||||
Box::new(move |output_dir| {
|
||||
@ -93,13 +95,15 @@ pub fn decompress_file(
|
||||
Ok(decoder)
|
||||
};
|
||||
|
||||
for format in formats.iter().flat_map(Extension::iter).skip(1).collect::<Vec<_>>().iter().rev() {
|
||||
let (first_extension, extensions) = split_first_extension(&formats);
|
||||
|
||||
for format in extensions.iter().rev() {
|
||||
reader = chain_reader_decoder(format, reader)?;
|
||||
}
|
||||
|
||||
let files_unpacked = match formats[0].compression_formats[0] {
|
||||
let files_unpacked = match first_extension {
|
||||
Gzip | Bzip | Lz4 | Lzma | Snappy | Zstd => {
|
||||
reader = chain_reader_decoder(&formats[0].compression_formats[0], reader)?;
|
||||
reader = chain_reader_decoder(&first_extension, reader)?;
|
||||
|
||||
let writer = utils::create_or_ask_overwrite(&output_file_path, question_policy)?;
|
||||
if writer.is_none() {
|
||||
|
@ -171,3 +171,10 @@ mod tests {
|
||||
assert_eq!(formats, vec![&Tar, &Gzip]);
|
||||
}
|
||||
}
|
||||
|
||||
// Panics if formats has an empty list of compression formats
|
||||
pub fn split_first_extension(formats: &[Extension]) -> (CompressionFormat, Vec<CompressionFormat>) {
|
||||
let mut extensions: Vec<CompressionFormat> = formats.iter().flat_map(Extension::iter).copied().collect();
|
||||
let first_extension = extensions.remove(0);
|
||||
(first_extension, extensions)
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user