From 12ab601341d94e60c264d2512bd5caf79d17d480 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Mikrut?= <41945903+qarmin@users.noreply.github.com> Date: Fri, 27 Sep 2024 14:11:22 +0200 Subject: [PATCH] Fix CI, print results also in CLI, show errors when image cannot be readed (#1355) --- .github/workflows/linux_gui.yml | 4 +- Cargo.lock | 49 +++---- Changelog.md | 28 ++-- ci_tester/src/main.rs | 11 +- czkawka_cli/src/main.rs | 11 +- czkawka_core/Cargo.toml | 3 +- czkawka_core/src/bad_extensions.rs | 8 +- czkawka_core/src/big_file.rs | 14 +- czkawka_core/src/broken_files.rs | 14 +- czkawka_core/src/common.rs | 91 ++++++++----- czkawka_core/src/common_cache.rs | 17 +++ czkawka_core/src/common_dir_traversal.rs | 24 +--- czkawka_core/src/common_tool.rs | 18 +-- czkawka_core/src/duplicate.rs | 126 +++++++++--------- czkawka_core/src/empty_files.rs | 2 +- czkawka_core/src/invalid_symlinks.rs | 6 +- czkawka_core/src/same_music.rs | 74 +++++----- czkawka_core/src/similar_images.rs | 90 ++++++------- czkawka_core/src/similar_videos.rs | 21 ++- czkawka_core/src/temporary.rs | 2 +- czkawka_gui/README.md | 2 +- .../connect_things/connect_button_compare.rs | 4 +- czkawka_gui/src/initialize_gui.rs | 4 +- justfile | 5 +- krokiet/src/connect_show_preview.rs | 11 +- 25 files changed, 338 insertions(+), 301 deletions(-) diff --git a/.github/workflows/linux_gui.yml b/.github/workflows/linux_gui.yml index 96fb4d47c..89a228909 100644 --- a/.github/workflows/linux_gui.yml +++ b/.github/workflows/linux_gui.yml @@ -118,7 +118,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Dependencies - run: sudo apt update || true; sudo apt install libgtk-4-dev libheif-dev librsvg2-dev wget fuse libfuse2 -y + run: sudo apt update || true; sudo apt install libgtk-4-dev libheif-dev librsvg2-dev wget fuse libfuse2 desktop-file-utils -y - name: Setup rust version run: rustup default ${{ matrix.toolchain }} @@ -143,7 +143,7 @@ jobs: pwd cp target/release/czkawka_gui AppDir/usr/bin ./linuxdeploy-x86_64.AppImage --appdir AppDir --plugin gtk --icon-file data/icons/com.github.qarmin.czkawka.svg --desktop-file data/com.github.qarmin.czkawka.desktop - ./appimagetool --comp zstd --mksquashfs-opt -Xcompression-level --mksquashfs-opt 20 \ + ./appimagetool-x86_64.AppImage --comp zstd --mksquashfs-opt -Xcompression-level --mksquashfs-opt 20 \ -u "gh-releases-zsync|$GITHUB_REPOSITORY_OWNER|czkawka|latest|*.AppImage.zsync" \ ./AppDir diff --git a/Cargo.lock b/Cargo.lock index 5d39629ac..d74acdbf5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1318,6 +1318,7 @@ dependencies = [ "image_hasher", "imagepipe", "infer", + "itertools 0.13.0", "libheif-rs", "libheif-sys", "libraw-rs", @@ -2168,9 +2169,9 @@ dependencies = [ [[package]] name = "gdk-pixbuf" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8730751991b97419fc3f0c2dca2c9e45b48edf46e48e0f965964ecf33889812f" +checksum = "c4c29071a9e92337d8270a85cb0510cda4ac478be26d09ad027cc1d081911b19" dependencies = [ "gdk-pixbuf-sys", "gio", @@ -2180,9 +2181,9 @@ dependencies = [ [[package]] name = "gdk-pixbuf-sys" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ffbf649fd5b1c8c0f0feeb015b7533c3ef92da2887fb95ddd338bc2b1644a7c" +checksum = "687343b059b91df5f3fbd87b4307038fa9e647fcc0461d0d3f93e94fee20bf3d" dependencies = [ "gio-sys", "glib-sys", @@ -2279,9 +2280,9 @@ dependencies = [ [[package]] name = "gio" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcacaa37401cad0a95aadd266bc39c72a131d454fc012f6dfd217f891d76cc52" +checksum = "b8d999e8fb09583e96080867e364bc1e701284ad206c76a5af480d63833ad43c" dependencies = [ "futures-channel", "futures-core", @@ -2296,9 +2297,9 @@ dependencies = [ [[package]] name = "gio-sys" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5237611e97e9b86ab5768adc3eef853ae713ea797aa3835404acdfacffc9fb38" +checksum = "4f7efc368de04755344f0084104835b6bb71df2c1d41e37d863947392a894779" dependencies = [ "glib-sys", "gobject-sys", @@ -2320,9 +2321,9 @@ dependencies = [ [[package]] name = "glib" -version = "0.20.3" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95648aac01b75503000bb3bcaa5ec7a7a2dd61e43636b8b1814854de94dd80e4" +checksum = "adcf1ec6d3650bf9fdbc6cee242d4fcebc6f6bfd9bea5b929b6a8b7344eb85ff" dependencies = [ "bitflags 2.6.0", "futures-channel", @@ -2341,9 +2342,9 @@ dependencies = [ [[package]] name = "glib-macros" -version = "0.20.3" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302f1d633c9cdef4350330e7b68fd8016e2834bb106c93fdf9789fcde753c1ab" +checksum = "a6bf88f70cd5720a6197639dcabcb378dd528d0cb68cb1f45e3b358bcb841cd7" dependencies = [ "heck 0.5.0", "proc-macro-crate", @@ -2354,9 +2355,9 @@ dependencies = [ [[package]] name = "glib-sys" -version = "0.20.2" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92eee4531c1c9abba945d19378b205031b5890e1f99c319ba0503b6e0c06a163" +checksum = "5f9eca5d88cfa6a453b00d203287c34a2b7cac3a7831779aa2bb0b3c7233752b" dependencies = [ "libc", "system-deps 7.0.3", @@ -2459,9 +2460,9 @@ dependencies = [ [[package]] name = "gobject-sys" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa3d1dcd8a1eb2e7c22be3d5e792b14b186f3524f79b25631730f9a8c169d49a" +checksum = "a4c674d2ff8478cf0ec29d2be730ed779fef54415a2fb4b565c52def62696462" dependencies = [ "glib-sys", "libc", @@ -2470,9 +2471,9 @@ dependencies = [ [[package]] name = "graphene-rs" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80aac87f74e81c0e13433e892a047237abdc37945c86887f5eed905038356e69" +checksum = "1f53144c7fe78292705ff23935f1477d511366fb2f73c43d63b37be89076d2fe" dependencies = [ "glib", "graphene-sys", @@ -2481,9 +2482,9 @@ dependencies = [ [[package]] name = "graphene-sys" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f91ecd32989efad60326cc20a8fb252bd2852239a08e4e70cde8c100de9ca" +checksum = "e741797dc5081e59877a4d72c442c72d61efdd99161a0b1c1b29b6b988934b99" dependencies = [ "glib-sys", "libc", @@ -4359,9 +4360,9 @@ dependencies = [ [[package]] name = "pango" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5764e5a174a5a0ec054fe5962ce6d4fc7052e2d0dcc23bbc77202b40a4a403d3" +checksum = "aa26aa54b11094d72141a754901cd71d9356432bb8147f9cace8d9c7ba95f356" dependencies = [ "gio", "glib", @@ -4371,9 +4372,9 @@ dependencies = [ [[package]] name = "pango-sys" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd317e1de76b14b3d3efe05518c08b360327f1ab7fec150473a89ffcad4b072d" +checksum = "84fd65917bf12f06544ae2bbc200abf9fc0a513a5a88a0fa81013893aef2b838" dependencies = [ "glib-sys", "gobject-sys", diff --git a/Changelog.md b/Changelog.md index d476bf87b..f3ff8ee2a 100644 --- a/Changelog.md +++ b/Changelog.md @@ -2,25 +2,33 @@ ### Core -- Removed some unnecessary panics -- Simplified usage of structures when sending/receiving progress information -- Added Median hash algorithm -- Fixed compilation with Rust >=1.80 -- Extracted tool input parameters, that helped to find not used parameters -- Added new mod to find similar music only in groups with similar title tag +- Removed some unnecessary panics - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Simplified usage of structures when sending/receiving progress + information - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Added Median hash algorithm - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Fixed compilation with Rust >=1.80 - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Extracted tool input parameters, that helped to find not used + parameters - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Added new mod to find similar music only in groups with similar title + tag - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Printing to file/console no longer uses two backslashes in windows + paths - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Fixed panic when failed to decode raw picture - [#1355](https://github.com/qarmin/czkawka/pull/1355) ### Krokiet -- Fixed invalid default hash size in similar images -- Fixed and added more input parameters to the application +- Fixed invalid default hash size in similar images - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Fixed and added more input parameters to the application - [#1354](https://github.com/qarmin/czkawka/pull/1354) ### GTK GUI -- Fixed and added more input parameters to the application +- Fixed and added more input parameters to the application - [#1355](https://github.com/qarmin/czkawka/pull/1355) ### CLI -- Fixed and added more input parameters to the application +- Fixed and added more input parameters to the application - [#1354](https://github.com/qarmin/czkawka/pull/1354) +- Fixed crash when stopping scan mutliple times - [#1355](https://github.com/qarmin/czkawka/pull/1355) +- Print results also in debug build - [#1355](https://github.com/qarmin/czkawka/pull/1355) ## Version 7.0.0 - 19.02.2024r diff --git a/ci_tester/src/main.rs b/ci_tester/src/main.rs index 456ded953..b0959d95f 100644 --- a/ci_tester/src/main.rs +++ b/ci_tester/src/main.rs @@ -33,6 +33,7 @@ fn main() { for _ in 0..ATTEMPTS { test_empty_files(); + test_big_files(); test_smallest_files(); test_biggest_files(); test_empty_folders(); @@ -373,6 +374,11 @@ fn test_empty_files() { run_test(&["empty-files", "-d", "TestFiles", "-D"], vec!["EmptyFile"], vec![], vec![]); } +fn test_big_files() { + info!("test_big_files"); + run_test(&["big", "-d", "TestFiles", "-n", "2", "-D"], vec!["Music/M4.mp3", "Videos/V3.webm"], vec![], vec![]); +} + //////////////////////////////////// //////////////////////////////////// /////////HELPER FUNCTIONS/////////// @@ -454,8 +460,7 @@ fn collect_all_files_and_dirs(dir: &str) -> std::io::Result { let mut symlinks = BTreeSet::new(); let mut folders_to_check = vec![dir.to_string()]; - while !folders_to_check.is_empty() { - let folder = folders_to_check.pop().expect("Should not fail in tests"); + while let Some(folder) = folders_to_check.pop() { let rd = fs::read_dir(folder)?; for entry in rd { let entry = entry?; @@ -470,7 +475,7 @@ fn collect_all_files_and_dirs(dir: &str) -> std::io::Result { } else if file_type.is_file() { files.insert(path_str); } else { - panic!("Unknown type of file {path_str:?}"); + panic!("Unknown type of file {path_str}"); } } } diff --git a/czkawka_cli/src/main.rs b/czkawka_cli/src/main.rs index 644d417d4..39320d9ff 100644 --- a/czkawka_cli/src/main.rs +++ b/czkawka_cli/src/main.rs @@ -65,8 +65,10 @@ fn main() { }) .expect("Failed to spawn calculation thread"); ctrlc::set_handler(move || { - println!("Get Sender"); - stop_sender.send(()).expect("Could not send signal on channel."); + println!("Get Ctrl+C signal, stopping..."); + if let Err(e) = stop_sender.send(()) { + eprintln!("Failed to send stop signal {e}(it is possible that the program is already stopped)"); + }; }) .expect("Error setting Ctrl-C handler"); @@ -348,9 +350,8 @@ fn save_and_print_results(component: &mut T, commo } } - if !cfg!(debug_assertions) { - component.print_results_to_output(); - } + component.print_results_to_output(); + component.get_text_messages().print_messages(); } diff --git a/czkawka_core/Cargo.toml b/czkawka_core/Cargo.toml index 32f0f80c2..eeb0c9617 100644 --- a/czkawka_core/Cargo.toml +++ b/czkawka_core/Cargo.toml @@ -81,10 +81,11 @@ os_info = { version = "3", default-features = false } log = "0.4.22" handsome_logger = "0.8" fun_time = { version = "0.3", features = ["log"] } +itertools = "0.13" -[target.'cfg(windows)'.dependencies] # Don't update anymore! This crate has a bug. I've submitted a patch upstream, but the change is breaking. The current code relies on the bug to work correctly! # Warning by CalunVier 2024.7.15 +[target.'cfg(windows)'.dependencies] file-id = "=0.2.1" [build-dependencies] diff --git a/czkawka_core/src/bad_extensions.rs b/czkawka_core/src/bad_extensions.rs index 144181930..1ead2f571 100644 --- a/czkawka_core/src/bad_extensions.rs +++ b/czkawka_core/src/bad_extensions.rs @@ -103,6 +103,8 @@ const WORKAROUNDS: &[(&str, &str)] = &[ ("pptx", "ppsx"), // Powerpoint ("sh", "bash"), // Linux ("sh", "guess"), // GNU + ("sh", "lua"), // Lua + ("sh", "js"), // Javascript ("sh", "pl"), // Gnome/Linux ("sh", "pm"), // Gnome/Linux ("sh", "py"), // Python @@ -268,10 +270,6 @@ impl BadExtensions { let mut hashmap_workarounds: HashMap<&str, Vec<&str>> = Default::default(); for (proper, found) in WORKAROUNDS { - // This should be enabled when items will have only 1 possible workaround items, but looks that some have 2 or even more, so at least for now this is disabled - // if hashmap_workarounds.contains_key(found) { - // panic!("Already have {} key", found); - // } hashmap_workarounds.entry(found).or_default().push(proper); } @@ -441,7 +439,7 @@ impl PrintResults for BadExtensions { writeln!(writer, "Found {} files with invalid extension.\n", self.information.number_of_files_with_bad_extension)?; for file_entry in &self.bad_extensions_files { - writeln!(writer, "{:?} ----- {}", file_entry.path, file_entry.proper_extensions)?; + writeln!(writer, "\"{}\" ----- {}", file_entry.path.to_string_lossy(), file_entry.proper_extensions)?; } Ok(()) diff --git a/czkawka_core/src/big_file.rs b/czkawka_core/src/big_file.rs index 9c221e153..addbaf4d3 100644 --- a/czkawka_core/src/big_file.rs +++ b/czkawka_core/src/big_file.rs @@ -17,7 +17,7 @@ pub enum SearchMode { SmallestFiles, } -#[derive(Default)] +#[derive(Debug, Default)] pub struct Info { pub number_of_real_files: usize, } @@ -126,8 +126,8 @@ impl DebugPrint for BigFile { } println!("### INDIVIDUAL DEBUG PRINT ###"); - println!("Big files size {} in {} groups", self.information.number_of_real_files, self.big_files.len()); - println!("Number of files to check - {:?}", self.get_params().number_of_files_to_check); + println!("Info: {:?}", self.information); + println!("Number of files to check - {}", self.get_params().number_of_files_to_check); self.debug_print_common(); println!("-----------------------------------------"); } @@ -150,7 +150,13 @@ impl PrintResults for BigFile { writeln!(writer, "{} the smallest files.\n\n", self.information.number_of_real_files)?; } for file_entry in &self.big_files { - writeln!(writer, "{} ({}) - {:?}", format_size(file_entry.size, BINARY), file_entry.size, file_entry.path)?; + writeln!( + writer, + "{} ({}) - \"{}\"", + format_size(file_entry.size, BINARY), + file_entry.size, + file_entry.path.to_string_lossy() + )?; } } else { writeln!(writer, "Not found any files.")?; diff --git a/czkawka_core/src/broken_files.rs b/czkawka_core/src/broken_files.rs index 64755eff6..72cd15266 100644 --- a/czkawka_core/src/broken_files.rs +++ b/czkawka_core/src/broken_files.rs @@ -9,7 +9,7 @@ use crate::common::{ check_if_stop_received, create_crash_message, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, AUDIO_FILES_EXTENSIONS, IMAGE_RS_BROKEN_FILES_EXTENSIONS, PDF_FILES_EXTENSIONS, ZIP_FILES_EXTENSIONS, }; -use crate::common_cache::{get_broken_files_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; +use crate::common_cache::{extract_loaded_cache, get_broken_files_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{DirTraversalBuilder, DirTraversalResult, FileEntry, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; use crate::common_traits::*; @@ -284,13 +284,7 @@ impl BrokenFiles { self.get_text_messages_mut().extend_with_another_messages(messages); loaded_hash_map = loaded_items.unwrap_or_default(); - for (name, file_entry) in files_to_check { - if let Some(cached_file_entry) = loaded_hash_map.get(&name) { - records_already_cached.insert(name, cached_file_entry.clone()); - } else { - non_cached_files_to_check.insert(name, file_entry); - } - } + extract_loaded_cache(&loaded_hash_map, files_to_check, &mut records_already_cached, &mut non_cached_files_to_check); } else { loaded_hash_map = Default::default(); non_cached_files_to_check = files_to_check; @@ -421,7 +415,7 @@ impl PrintResults for BrokenFiles { if !self.broken_files.is_empty() { writeln!(writer, "Found {} broken files.", self.information.number_of_broken_files)?; for file_entry in &self.broken_files { - writeln!(writer, "{:?} - {}", file_entry.path, file_entry.error_string)?; + writeln!(writer, "\"{}\" - {}", file_entry.path.to_string_lossy(), file_entry.error_string)?; } } else { write!(writer, "Not found any broken files.")?; @@ -462,7 +456,7 @@ fn check_extension_availability( } else if pdf_extensions.contains(&extension_lowercase.as_str()) { TypeOfFile::PDF } else { - eprintln!("File with unknown extension: {full_name:?} - {extension_lowercase}"); + eprintln!("File with unknown extension: \"{}\" - {extension_lowercase}", full_name.to_string_lossy()); debug_assert!(false, "File with unknown extension"); TypeOfFile::Unknown } diff --git a/czkawka_core/src/common.rs b/czkawka_core/src/common.rs index 7d08746a7..807265238 100644 --- a/czkawka_core/src/common.rs +++ b/czkawka_core/src/common.rs @@ -150,18 +150,18 @@ pub const SEND_PROGRESS_DATA_TIME_BETWEEN: u32 = 200; //ms pub fn remove_folder_if_contains_only_empty_folders(path: impl AsRef, remove_to_trash: bool) -> Result<(), String> { let path = path.as_ref(); if !path.is_dir() { - return Err(format!("Trying to remove folder {path:?} which is not a directory",)); + return Err(format!("Trying to remove folder \"{}\" which is not a directory", path.to_string_lossy())); } let mut entries_to_check = Vec::new(); let Ok(initial_entry) = path.read_dir() else { - return Err(format!("Cannot read directory {path:?}",)); + return Err(format!("Cannot read directory \"{}\"", path.to_string_lossy())); }; for entry in initial_entry { if let Ok(entry) = entry { entries_to_check.push(entry); } else { - return Err(format!("Cannot read entry from directory {path:?}")); + return Err(format!("Cannot read entry from directory \"{}\"", path.to_string_lossy())); } } loop { @@ -169,28 +169,40 @@ pub fn remove_folder_if_contains_only_empty_folders(path: impl AsRef, remo break; }; let Some(file_type) = entry.file_type().ok() else { - return Err(format!("Folder contains file with unknown type {:?} inside {path:?}", entry.path())); + return Err(format!( + "Folder contains file with unknown type \"{}\" inside \"{}\"", + entry.path().to_string_lossy(), + path.to_string_lossy() + )); }; if !file_type.is_dir() { - return Err(format!("Folder contains file {:?} inside {path:?}", entry.path(),)); + return Err(format!("Folder contains file \"{}\" inside \"{}\"", entry.path().to_string_lossy(), path.to_string_lossy())); } let Ok(internal_read_dir) = entry.path().read_dir() else { - return Err(format!("Cannot read directory {:?} inside {path:?}", entry.path())); + return Err(format!( + "Cannot read directory \"{}\" inside \"{}\"", + entry.path().to_string_lossy(), + path.to_string_lossy() + )); }; for internal_elements in internal_read_dir { if let Ok(internal_element) = internal_elements { entries_to_check.push(internal_element); } else { - return Err(format!("Cannot read entry from directory {:?} inside {path:?}", entry.path())); + return Err(format!( + "Cannot read entry from directory \"{}\" inside \"{}\"", + entry.path().to_string_lossy(), + path.to_string_lossy() + )); } } } if remove_to_trash { - trash::delete(path).map_err(|e| format!("Cannot move folder {path:?} to trash, reason {e}")) + trash::delete(path).map_err(|e| format!("Cannot move folder \"{}\" to trash, reason {e}", path.to_string_lossy())) } else { - fs::remove_dir_all(path).map_err(|e| format!("Cannot remove directory {path:?}, reason {e}")) + fs::remove_dir_all(path).map_err(|e| format!("Cannot remove directory \"{}\", reason {e}", path.to_string_lossy())) } } @@ -206,18 +218,18 @@ pub fn open_cache_folder(cache_file_name: &str, save_to_cache: bool, use_json: b if save_to_cache { if cache_dir.exists() { if !cache_dir.is_dir() { - warnings.push(format!("Config dir {cache_dir:?} is a file!")); + warnings.push(format!("Config dir \"{}\" is a file!", cache_dir.to_string_lossy())); return None; } } else if let Err(e) = fs::create_dir_all(&cache_dir) { - warnings.push(format!("Cannot create config dir {cache_dir:?}, reason {e}")); + warnings.push(format!("Cannot create config dir \"{}\", reason {e}", cache_dir.to_string_lossy())); return None; } file_handler_default = Some(match OpenOptions::new().truncate(true).write(true).create(true).open(&cache_file) { Ok(t) => t, Err(e) => { - warnings.push(format!("Cannot create or open cache file {cache_file:?}, reason {e}")); + warnings.push(format!("Cannot create or open cache file \"{}\", reason {e}", cache_file.to_string_lossy())); return None; } }); @@ -225,7 +237,7 @@ pub fn open_cache_folder(cache_file_name: &str, save_to_cache: bool, use_json: b file_handler_json = Some(match OpenOptions::new().truncate(true).write(true).create(true).open(&cache_file_json) { Ok(t) => t, Err(e) => { - warnings.push(format!("Cannot create or open cache file {cache_file_json:?}, reason {e}")); + warnings.push(format!("Cannot create or open cache file \"{}\", reason {e}", cache_file_json.to_string_lossy())); return None; } }); @@ -264,31 +276,34 @@ pub fn get_dynamic_image_from_heic(path: &str) -> Result { } #[cfg(feature = "libraw")] -pub fn get_dynamic_image_from_raw_image(path: impl AsRef) -> Option { - let buf = fs::read(path.as_ref()).ok()?; +pub fn get_dynamic_image_from_raw_image(path: impl AsRef) -> Result { + let buf = fs::read(path.as_ref())?; let processor = Processor::new(); let start_timer = Instant::now(); - let processed = processor.process_8bit(&buf).expect("processing successful"); + let processed = processor.process_8bit(&buf)?; println!("Processing took {:?}", start_timer.elapsed()); let width = processed.width(); let height = processed.height(); let data = processed.to_vec(); + let data_len = data.len(); - let buffer = ImageBuffer::from_raw(width, height, data)?; - // Utwórz DynamicImage z ImageBuffer - Some(DynamicImage::ImageRgb8(buffer)) + let buffer = ImageBuffer::from_raw(width, height, data).ok_or(anyhow::anyhow!(format!( + "Cannot create ImageBuffer from raw image with width: {width} and height: {height} and data length: {data_len}", + )))?; + + Ok(DynamicImage::ImageRgb8(buffer)) } #[cfg(not(feature = "libraw"))] -pub fn get_dynamic_image_from_raw_image(path: impl AsRef + std::fmt::Debug) -> Option { +pub fn get_dynamic_image_from_raw_image(path: impl AsRef + std::fmt::Debug) -> Result { let mut start_timer = Instant::now(); let mut times = Vec::new(); let loader = RawLoader::new(); - let raw = loader.decode_file(path.as_ref()).ok()?; + let raw = loader.decode_file(path.as_ref()).map_err(|e| format!("Error decoding file: {e:?}"))?; times.push(("After decoding", start_timer.elapsed())); start_timer = Instant::now(); @@ -298,28 +313,27 @@ pub fn get_dynamic_image_from_raw_image(path: impl AsRef + std::fmt::Debug times.push(("After creating source", start_timer.elapsed())); start_timer = Instant::now(); - let mut pipeline = Pipeline::new_from_source(source).ok()?; + let mut pipeline = Pipeline::new_from_source(source).map_err(|e| format!("Error creating pipeline: {e:?}"))?; times.push(("After creating pipeline", start_timer.elapsed())); start_timer = Instant::now(); pipeline.run(None); - let image = pipeline.output_8bit(None).ok()?; + let image = pipeline.output_8bit(None).map_err(|e| format!("Error running pipeline: {e:?}"))?; times.push(("After creating image", start_timer.elapsed())); start_timer = Instant::now(); - let image = ImageBuffer::, Vec>::from_raw(image.width as u32, image.height as u32, image.data)?; + let image = ImageBuffer::, Vec>::from_raw(image.width as u32, image.height as u32, image.data).ok_or_else(|| "Failed to create image buffer".to_string())?; times.push(("After creating image buffer", start_timer.elapsed())); start_timer = Instant::now(); - // println!("Properly hashed {:?}", path); - let res = Some(DynamicImage::ImageRgb8(image)); + let res = DynamicImage::ImageRgb8(image); times.push(("After creating dynamic image", start_timer.elapsed())); let str_timer = times.into_iter().map(|(name, time)| format!("{name}: {time:?}")).collect::>().join(", "); debug!("Loading raw image --- {str_timer}"); - res + Ok(res) } pub fn split_path(path: &Path) -> (String, String) { @@ -470,20 +484,23 @@ where for file_entry in &all_values[1..] { if dry_run { infos.push(format!( - "dry_run - would create hardlink from {:?} to {:?}", - original_file.get_path(), - file_entry.get_path() + "dry_run - would create hardlink from \"{}\" to \"{}\"", + original_file.get_path().to_string_lossy(), + file_entry.get_path().to_string_lossy() )); } else { if dry_run { - infos.push(format!("Replace file {:?} with hard link to {:?}", original_file.get_path(), file_entry.get_path())); + infos.push(format!( + "Replace file \"{}\" with hard link to \"{}\"", + original_file.get_path().to_string_lossy(), + file_entry.get_path().to_string_lossy() + )); } else { if let Err(e) = make_hard_link(original_file.get_path(), file_entry.get_path()) { errors.push(format!( - "Cannot create hard link from {:?} to {:?} - {}", - file_entry.get_path(), - original_file.get_path(), - e + "Cannot create hard link from \"{}\" to \"{}\" - {e}", + file_entry.get_path().to_string_lossy(), + original_file.get_path().to_string_lossy() )); failed_to_remove_files += 1; } else { @@ -508,10 +525,10 @@ where for i in items { if dry_run { - infos.push(format!("dry_run - would delete file: {:?}", i.get_path())); + infos.push(format!("dry_run - would delete file: \"{}\"", i.get_path().to_string_lossy())); } else { if let Err(e) = fs::remove_file(i.get_path()) { - errors.push(format!("Cannot delete file: {:?} - {e}", i.get_path())); + errors.push(format!("Cannot delete file: \"{}\" - {e}", i.get_path().to_string_lossy())); failed_to_remove_files += 1; } else { removed_files += 1; diff --git a/czkawka_core/src/common_cache.rs b/czkawka_core/src/common_cache.rs index e78aac69a..d4367d57b 100644 --- a/czkawka_core/src/common_cache.rs +++ b/czkawka_core/src/common_cache.rs @@ -84,6 +84,23 @@ where text_messages } +pub fn extract_loaded_cache( + loaded_hash_map: &BTreeMap, + files_to_check: BTreeMap, + records_already_cached: &mut BTreeMap, + non_cached_files_to_check: &mut BTreeMap, +) where + T: Clone, +{ + for (name, file_entry) in files_to_check { + if let Some(cached_file_entry) = loaded_hash_map.get(&name) { + records_already_cached.insert(name, cached_file_entry.clone()); + } else { + non_cached_files_to_check.insert(name, file_entry); + } + } +} + #[fun_time(message = "load_cache_from_file_generalized_by_path", level = "debug")] pub fn load_cache_from_file_generalized_by_path(cache_file_name: &str, delete_outdated_cache: bool, used_files: &BTreeMap) -> (Messages, Option>) where diff --git a/czkawka_core/src/common_dir_traversal.rs b/czkawka_core/src/common_dir_traversal.rs index dcc53c0c3..b07f16001 100644 --- a/czkawka_core/src/common_dir_traversal.rs +++ b/czkawka_core/src/common_dir_traversal.rs @@ -585,28 +585,8 @@ pub fn common_get_metadata_dir(entry_data: &DirEntry, warnings: &mut Vec } pub fn common_get_entry_data_metadata<'a>(entry: &'a Result, warnings: &mut Vec, current_folder: &Path) -> Option<(&'a DirEntry, Metadata)> { - let entry_data = match entry { - Ok(t) => t, - Err(e) => { - warnings.push(flc!( - "core_cannot_read_entry_dir", - dir = current_folder.to_string_lossy().to_string(), - reason = e.to_string() - )); - return None; - } - }; - let metadata: Metadata = match entry_data.metadata() { - Ok(t) => t, - Err(e) => { - warnings.push(flc!( - "core_cannot_read_metadata_dir", - dir = current_folder.to_string_lossy().to_string(), - reason = e.to_string() - )); - return None; - } - }; + let entry_data = common_get_entry_data(entry, warnings, current_folder)?; + let metadata = common_get_metadata_dir(entry_data, warnings, current_folder)?; Some((entry_data, metadata)) } diff --git a/czkawka_core/src/common_tool.rs b/czkawka_core/src/common_tool.rs index bf06ebd70..b74604181 100644 --- a/czkawka_core/src/common_tool.rs +++ b/czkawka_core/src/common_tool.rs @@ -208,16 +208,16 @@ pub trait CommonData { println!("Directories: {:?}", self.get_cd().directories); println!("Extensions: {:?}", self.get_cd().extensions); println!("Excluded items: {:?}", self.get_cd().excluded_items); - println!("Recursive search: {:?}", self.get_cd().recursive_search); - println!("Maximal file size: {:?}", self.get_cd().maximal_file_size); - println!("Minimal file size: {:?}", self.get_cd().minimal_file_size); - println!("Stopped search: {:?}", self.get_cd().stopped_search); - println!("Use cache: {:?}", self.get_cd().use_cache); - println!("Delete outdated cache: {:?}", self.get_cd().delete_outdated_cache); - println!("Save also as json: {:?}", self.get_cd().save_also_as_json); + println!("Recursive search: {}", self.get_cd().recursive_search); + println!("Maximal file size: {}", self.get_cd().maximal_file_size); + println!("Minimal file size: {}", self.get_cd().minimal_file_size); + println!("Stopped search: {}", self.get_cd().stopped_search); + println!("Use cache: {}", self.get_cd().use_cache); + println!("Delete outdated cache: {}", self.get_cd().delete_outdated_cache); + println!("Save also as json: {}", self.get_cd().save_also_as_json); println!("Delete method: {:?}", self.get_cd().delete_method); - println!("Use reference folders: {:?}", self.get_cd().use_reference_folders); - println!("Dry run: {:?}", self.get_cd().dry_run); + println!("Use reference folders: {}", self.get_cd().use_reference_folders); + println!("Dry run: {}", self.get_cd().dry_run); println!("---------------DEBUG PRINT MESSAGES---------------"); println!("Errors size - {}", self.get_cd().text_messages.errors.len()); diff --git a/czkawka_core/src/duplicate.rs b/czkawka_core/src/duplicate.rs index d07efd227..bc755ac51 100644 --- a/czkawka_core/src/duplicate.rs +++ b/czkawka_core/src/duplicate.rs @@ -205,7 +205,7 @@ impl DuplicateFinder { |fe: &FileEntry| { fe.path .file_name() - .unwrap_or_else(|| panic!("Found invalid file_name {:?}", fe.path)) + .unwrap_or_else(|| panic!("Found invalid file_name \"{}\"", fe.path.to_string_lossy())) .to_string_lossy() .to_string() } @@ -213,7 +213,7 @@ impl DuplicateFinder { |fe: &FileEntry| { fe.path .file_name() - .unwrap_or_else(|| panic!("Found invalid file_name {:?}", fe.path)) + .unwrap_or_else(|| panic!("Found invalid file_name \"{}\"", fe.path.to_string_lossy())) .to_string_lossy() .to_lowercase() } @@ -294,7 +294,7 @@ impl DuplicateFinder { fe.size, fe.path .file_name() - .unwrap_or_else(|| panic!("Found invalid file_name {:?}", fe.path)) + .unwrap_or_else(|| panic!("Found invalid file_name \"{}\"", fe.path.to_string_lossy())) .to_string_lossy() .to_string(), ) @@ -305,7 +305,7 @@ impl DuplicateFinder { fe.size, fe.path .file_name() - .unwrap_or_else(|| panic!("Found invalid file_name {:?}", fe.path)) + .unwrap_or_else(|| panic!("Found invalid file_name \"{}\"", fe.path.to_string_lossy())) .to_string_lossy() .to_lowercase(), ) @@ -486,32 +486,12 @@ impl DuplicateFinder { self.get_text_messages_mut().extend_with_another_messages(messages); loaded_hash_map = loaded_items.unwrap_or_default(); - debug!("prehash_load_cache_at_start - started diff between loaded and prechecked files"); - for (size, mut vec_file_entry) in mem::take(&mut self.files_with_identical_size) { - if let Some(cached_vec_file_entry) = loaded_hash_map.get(&size) { - // TODO maybe hashmap is not needed when using < 4 elements - let mut cached_path_entries: HashMap<&Path, DuplicateEntry> = HashMap::new(); - for file_entry in cached_vec_file_entry { - cached_path_entries.insert(&file_entry.path, file_entry.clone()); - } - for file_entry in vec_file_entry { - if let Some(cached_file_entry) = cached_path_entries.remove(file_entry.path.as_path()) { - records_already_cached.entry(size).or_default().push(cached_file_entry); - } else { - non_cached_files_to_check.entry(size).or_default().push(file_entry); - } - } - } else { - non_cached_files_to_check.entry(size).or_default().append(&mut vec_file_entry); - } - } - - debug!( - "prehash_load_cache_at_start - completed diff between loaded and prechecked files, {}({}) - non cached, {}({}) - already cached", - non_cached_files_to_check.values().map(Vec::len).sum::(), - format_size(non_cached_files_to_check.values().map(|v| v.iter().map(|e| e.size).sum::()).sum::(), BINARY), - records_already_cached.values().map(Vec::len).sum::(), - format_size(records_already_cached.values().map(|v| v.iter().map(|e| e.size).sum::()).sum::(), BINARY), + Self::diff_loaded_and_prechecked_files( + "prehash_load_cache_at_start", + mem::take(&mut self.files_with_identical_size), + &loaded_hash_map, + &mut records_already_cached, + &mut non_cached_files_to_check, ); } else { loaded_hash_map = Default::default(); @@ -644,6 +624,42 @@ impl DuplicateFinder { Some(()) } + fn diff_loaded_and_prechecked_files( + function_name: &str, + used_map: BTreeMap>, + loaded_hash_map: &BTreeMap>, + records_already_cached: &mut BTreeMap>, + non_cached_files_to_check: &mut BTreeMap>, + ) { + debug!("{function_name} - started diff between loaded and prechecked files"); + + for (size, mut vec_file_entry) in used_map { + if let Some(cached_vec_file_entry) = loaded_hash_map.get(&size) { + // TODO maybe hashmap is not needed when using < 4 elements + let mut cached_path_entries: HashMap<&Path, DuplicateEntry> = HashMap::new(); + for file_entry in cached_vec_file_entry { + cached_path_entries.insert(&file_entry.path, file_entry.clone()); + } + for file_entry in vec_file_entry { + if let Some(cached_file_entry) = cached_path_entries.remove(file_entry.path.as_path()) { + records_already_cached.entry(size).or_default().push(cached_file_entry); + } else { + non_cached_files_to_check.entry(size).or_default().push(file_entry); + } + } + } else { + non_cached_files_to_check.entry(size).or_default().append(&mut vec_file_entry); + } + } + debug!( + "{function_name} - completed diff between loaded and prechecked files - {}({}) non cached, {}({}) already cached", + non_cached_files_to_check.len(), + format_size(non_cached_files_to_check.values().map(|v| v.iter().map(|e| e.size).sum::()).sum::(), BINARY), + records_already_cached.len(), + format_size(records_already_cached.values().map(|v| v.iter().map(|e| e.size).sum::()).sum::(), BINARY), + ); + } + #[fun_time(message = "full_hashing_load_cache_at_start", level = "debug")] fn full_hashing_load_cache_at_start( &mut self, @@ -663,32 +679,12 @@ impl DuplicateFinder { self.get_text_messages_mut().extend_with_another_messages(messages); loaded_hash_map = loaded_items.unwrap_or_default(); - debug!("full_hashing_load_cache_at_start - started diff between loaded and prechecked files"); - for (size, mut vec_file_entry) in pre_checked_map { - if let Some(cached_vec_file_entry) = loaded_hash_map.get(&size) { - // TODO maybe hashmap is not needed when using < 4 elements - let mut cached_path_entries: HashMap<&Path, DuplicateEntry> = HashMap::new(); - for file_entry in cached_vec_file_entry { - cached_path_entries.insert(&file_entry.path, file_entry.clone()); - } - for file_entry in vec_file_entry { - if let Some(cached_file_entry) = cached_path_entries.remove(file_entry.path.as_path()) { - records_already_cached.entry(size).or_default().push(cached_file_entry); - } else { - non_cached_files_to_check.entry(size).or_default().push(file_entry); - } - } - } else { - non_cached_files_to_check.entry(size).or_default().append(&mut vec_file_entry); - } - } - - debug!( - "full_hashing_load_cache_at_start - completed diff between loaded and prechecked files - {}({}) non cached, {}({}) already cached", - non_cached_files_to_check.len(), - format_size(non_cached_files_to_check.values().map(|v| v.iter().map(|e| e.size).sum::()).sum::(), BINARY), - records_already_cached.len(), - format_size(records_already_cached.values().map(|v| v.iter().map(|e| e.size).sum::()).sum::(), BINARY), + Self::diff_loaded_and_prechecked_files( + "full_hashing_load_cache_at_start", + pre_checked_map, + &loaded_hash_map, + &mut records_already_cached, + &mut non_cached_files_to_check, ); } else { debug!("full_hashing_load_cache_at_start - not using cache"); @@ -1043,7 +1039,7 @@ impl PrintResults for DuplicateFinder { for (name, vector) in self.files_with_identical_names.iter().rev() { writeln!(writer, "Name - {} - {} files ", name, vector.len())?; for j in vector { - writeln!(writer, "{:?}", j.path)?; + writeln!(writer, "\"{}\"", j.path.to_string_lossy())?; } writeln!(writer)?; } @@ -1061,7 +1057,7 @@ impl PrintResults for DuplicateFinder { writeln!(writer, "Name - {} - {} files ", name, vector.len())?; writeln!(writer, "Reference file - {:?}", file_entry.path)?; for j in vector { - writeln!(writer, "{:?}", j.path)?; + writeln!(writer, "\"{}\"", j.path.to_string_lossy())?; } writeln!(writer)?; } @@ -1083,7 +1079,7 @@ impl PrintResults for DuplicateFinder { for ((size, name), vector) in self.files_with_identical_size_names.iter().rev() { writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?; for j in vector { - writeln!(writer, "{:?}", j.path)?; + writeln!(writer, "\"{}\"", j.path.to_string_lossy())?; } writeln!(writer)?; } @@ -1101,7 +1097,7 @@ impl PrintResults for DuplicateFinder { writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?; writeln!(writer, "Reference file - {:?}", file_entry.path)?; for j in vector { - writeln!(writer, "{:?}", j.path)?; + writeln!(writer, "\"{}\"", j.path.to_string_lossy())?; } writeln!(writer)?; } @@ -1125,7 +1121,7 @@ impl PrintResults for DuplicateFinder { for (size, vector) in self.files_with_identical_size.iter().rev() { write!(writer, "\n---- Size {} ({}) - {} files \n", format_size(*size, BINARY), size, vector.len())?; for file_entry in vector { - writeln!(writer, "{:?}", file_entry.path)?; + writeln!(writer, "\"{}\"", file_entry.path.to_string_lossy())?; } } } else if !self.files_with_identical_size_referenced.is_empty() { @@ -1144,7 +1140,7 @@ impl PrintResults for DuplicateFinder { writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?; writeln!(writer, "Reference file - {:?}", file_entry.path)?; for file_entry in vector { - writeln!(writer, "{:?}", file_entry.path)?; + writeln!(writer, "\"{}\"", file_entry.path.to_string_lossy())?; } } } else { @@ -1168,7 +1164,7 @@ impl PrintResults for DuplicateFinder { for vector in vectors_vector { writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?; for file_entry in vector { - writeln!(writer, "{:?}", file_entry.path)?; + writeln!(writer, "\"{}\"", file_entry.path.to_string_lossy())?; } } } @@ -1189,7 +1185,7 @@ impl PrintResults for DuplicateFinder { writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?; writeln!(writer, "Reference file - {:?}", file_entry.path)?; for file_entry in vector { - writeln!(writer, "{:?}", file_entry.path)?; + writeln!(writer, "\"{}\"", file_entry.path.to_string_lossy())?; } } } @@ -1233,7 +1229,7 @@ fn filter_hard_links(vec_file_entry: &[FileEntry]) -> Vec { let mut identical: Vec = Vec::with_capacity(vec_file_entry.len()); for f in vec_file_entry { if let Ok(meta) = file_id::get_low_res_file_id(&f.path) { - if let file_id::FileId::HighRes {file_id, ..} = meta { + if let file_id::FileId::HighRes { file_id, .. } = meta { if !inodes.insert(file_id) { continue; } diff --git a/czkawka_core/src/empty_files.rs b/czkawka_core/src/empty_files.rs index d43a03987..abb7f2017 100644 --- a/czkawka_core/src/empty_files.rs +++ b/czkawka_core/src/empty_files.rs @@ -128,7 +128,7 @@ impl PrintResults for EmptyFiles { if !self.empty_files.is_empty() { writeln!(writer, "Found {} empty files.", self.information.number_of_empty_files)?; for file_entry in &self.empty_files { - writeln!(writer, "{:?}", file_entry.path)?; + writeln!(writer, "\"{}\"", file_entry.path.to_string_lossy())?; } } else { write!(writer, "Not found any empty files.")?; diff --git a/czkawka_core/src/invalid_symlinks.rs b/czkawka_core/src/invalid_symlinks.rs index 02fae285e..6121bdbff 100644 --- a/czkawka_core/src/invalid_symlinks.rs +++ b/czkawka_core/src/invalid_symlinks.rs @@ -193,9 +193,9 @@ impl PrintResults for InvalidSymlinks { for file_entry in &self.invalid_symlinks { writeln!( writer, - "{:?}\t\t{:?}\t\t{}", - file_entry.path, - file_entry.symlink_info.destination_path, + "\"{}\"\t\t\"{}\"\t\t{}", + file_entry.path.to_string_lossy(), + file_entry.symlink_info.destination_path.to_string_lossy(), match file_entry.symlink_info.type_of_error { ErrorType::InfiniteRecursion => "Infinite Recursion", ErrorType::NonExistentFile => "Non Existent File", diff --git a/czkawka_core/src/same_music.rs b/czkawka_core/src/same_music.rs index d109938fc..599fc9ba5 100644 --- a/czkawka_core/src/same_music.rs +++ b/czkawka_core/src/same_music.rs @@ -28,7 +28,7 @@ use crate::common::{ check_if_stop_received, create_crash_message, delete_files_custom, filter_reference_folders_generic, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, AUDIO_FILES_EXTENSIONS, }; -use crate::common_cache::{get_similar_music_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; +use crate::common_cache::{extract_loaded_cache, get_similar_music_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; use crate::common_traits::*; @@ -246,13 +246,12 @@ impl SameMusic { loaded_hash_map = loaded_items.unwrap_or_default(); debug!("load_cache - Starting to check for differences"); - for (name, file_entry) in mem::take(&mut self.music_to_check) { - if let Some(cached_file_entry) = loaded_hash_map.get(&name) { - records_already_cached.insert(name, cached_file_entry.clone()); - } else { - non_cached_files_to_check.insert(name, file_entry); - } - } + extract_loaded_cache( + &loaded_hash_map, + mem::take(&mut self.music_to_check), + &mut records_already_cached, + &mut non_cached_files_to_check, + ); debug!( "load_cache - completed diff between loaded and prechecked files, {}({}) - non cached, {}({}) - already cached", non_cached_files_to_check.len(), @@ -586,38 +585,31 @@ impl SameMusic { continue; } - let temp_collected_similar_items = files_to_compare + let (mut collected_similar_items, errors): (Vec<_>, Vec<_>) = files_to_compare .par_iter() .map(|e_entry| { let e_string = e_entry.path.to_string_lossy().to_string(); if used_paths.contains(&e_string) || e_string == f_string { - return Ok(None); + return None; } let mut segments = match match_fingerprints(&f_entry.fingerprint, &e_entry.fingerprint, configuration) { Ok(segments) => segments, - Err(e) => return Err(format!("Error while comparing fingerprints: {e}")), + Err(e) => return Some(Err(format!("Error while comparing fingerprints: {e}"))), }; segments.retain(|s| s.duration(configuration) > minimum_segment_duration && s.score < maximum_difference); if segments.is_empty() { - Ok(None) + None } else { - Ok(Some((e_string, e_entry))) + Some(Ok((e_string, e_entry))) } }) - .collect::>(); + .flatten() + .partition_map(|res| match res { + Ok(entry) => itertools::Either::Left(entry), + Err(err) => itertools::Either::Right(err), + }); - let mut collected_similar_items = Vec::with_capacity(temp_collected_similar_items.len()); - for result in temp_collected_similar_items { - match result { - Ok(Some(data)) => { - collected_similar_items.push(data); - } - Ok(None) => (), - Err(e) => { - self.common_data.text_messages.errors.push(e); - } - } - } + self.common_data.text_messages.errors.extend(errors); collected_similar_items.retain(|(path, _entry)| !used_paths.contains(path)); if !collected_similar_items.is_empty() { @@ -930,8 +922,14 @@ impl PrintResults for SameMusic { for file_entry in vec_file_entry { writeln!( writer, - "TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {:?}", - file_entry.track_title, file_entry.track_artist, file_entry.year, file_entry.length, file_entry.genre, file_entry.bitrate, file_entry.path + "TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: \"{}\"", + file_entry.track_title, + file_entry.track_artist, + file_entry.year, + file_entry.length, + file_entry.genre, + file_entry.bitrate, + file_entry.path.to_string_lossy() )?; } writeln!(writer)?; @@ -943,14 +941,26 @@ impl PrintResults for SameMusic { writeln!(writer)?; writeln!( writer, - "TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {:?}", - file_entry.track_title, file_entry.track_artist, file_entry.year, file_entry.length, file_entry.genre, file_entry.bitrate, file_entry.path + "TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: \"{}\"", + file_entry.track_title, + file_entry.track_artist, + file_entry.year, + file_entry.length, + file_entry.genre, + file_entry.bitrate, + file_entry.path.to_string_lossy() )?; for file_entry in vec_file_entry { writeln!( writer, - "TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {:?}", - file_entry.track_title, file_entry.track_artist, file_entry.year, file_entry.length, file_entry.genre, file_entry.bitrate, file_entry.path + "TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: \"{}\"", + file_entry.track_title, + file_entry.track_artist, + file_entry.year, + file_entry.length, + file_entry.genre, + file_entry.bitrate, + file_entry.path.to_string_lossy() )?; } writeln!(writer)?; diff --git a/czkawka_core/src/similar_images.rs b/czkawka_core/src/similar_images.rs index d3b1c43b9..6aeaa3c62 100644 --- a/czkawka_core/src/similar_images.rs +++ b/czkawka_core/src/similar_images.rs @@ -9,7 +9,7 @@ use bk_tree::BKTree; use crossbeam_channel::{Receiver, Sender}; use fun_time::fun_time; use humansize::{format_size, BINARY}; -use image::GenericImageView; +use image::{DynamicImage, GenericImageView}; use image_hasher::{FilterType, HashAlg, HasherConfig}; use log::debug; use rayon::prelude::*; @@ -21,7 +21,7 @@ use crate::common::{ check_if_stop_received, create_crash_message, delete_files_custom, get_dynamic_image_from_raw_image, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, HEIC_EXTENSIONS, IMAGE_RS_SIMILAR_IMAGES_EXTENSIONS, RAW_IMAGE_EXTENSIONS, }; -use crate::common_cache::{get_similar_images_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; +use crate::common_cache::{extract_loaded_cache, get_similar_images_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{inode, take_1_per_inode, DirTraversalBuilder, DirTraversalResult, FileEntry, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; use crate::common_traits::{DebugPrint, PrintResults, ResultEntry}; @@ -257,13 +257,12 @@ impl SimilarImages { loaded_hash_map = loaded_items.unwrap_or_default(); debug!("hash_images-load_cache - starting calculating diff"); - for (name, file_entry) in mem::take(&mut self.images_to_check) { - if let Some(cached_file_entry) = loaded_hash_map.get(&name) { - records_already_cached.insert(name, cached_file_entry.clone()); - } else { - non_cached_files_to_check.insert(name, file_entry); - } - } + extract_loaded_cache( + &loaded_hash_map, + mem::take(&mut self.images_to_check), + &mut records_already_cached, + &mut non_cached_files_to_check, + ); debug!( "hash_images_load_cache - completed diff between loaded and prechecked files, {}({}) - non cached, {}({}) - already cached", non_cached_files_to_check.len(), @@ -303,7 +302,7 @@ impl SimilarImages { .collect::>(); debug!("hash_images - start hashing images"); - let mut vec_file_entry: Vec = non_cached_files_to_check + let (mut vec_file_entry, errors): (Vec, Vec) = non_cached_files_to_check .into_par_iter() .map(|(_s, mut file_entry)| { atomic_counter.fetch_add(1, Ordering::Relaxed); @@ -311,13 +310,19 @@ impl SimilarImages { check_was_stopped.store(true, Ordering::Relaxed); return None; } - self.collect_image_file_entry(&mut file_entry); + if let Err(e) = self.collect_image_file_entry(&mut file_entry) { + return Some(Err(e)); + } - Some(Some(file_entry)) + Some(Ok(file_entry)) }) .while_some() - .filter_map(|e| e) - .collect::>(); + .partition_map(|res| match res { + Ok(entry) => itertools::Either::Left(entry), + Err(err) => itertools::Either::Right(err), + }); + + self.common_data.text_messages.errors.extend(errors); debug!("hash_images - end hashing {} images", vec_file_entry.len()); send_info_and_wait_for_ending_all_threads(&progress_thread_run, progress_thread_handle); @@ -364,7 +369,7 @@ impl SimilarImages { } } - fn collect_image_file_entry(&self, file_entry: &mut ImagesEntry) { + fn collect_image_file_entry(&self, file_entry: &mut ImagesEntry) -> Result<(), String> { let img; if file_entry.image_type == ImageType::Heic { @@ -372,44 +377,24 @@ impl SimilarImages { { img = match get_dynamic_image_from_heic(&file_entry.path.to_string_lossy()) { Ok(t) => t, - Err(_) => { - return; + Err(e) => { + return Err(format!("Cannot open HEIC file \"{}\": {}", file_entry.path.to_string_lossy(), e)); } }; } #[cfg(not(feature = "heif"))] { - if let Ok(image_result) = panic::catch_unwind(|| image::open(&file_entry.path)) { - if let Ok(image2) = image_result { - img = image2; - } else { - return; - } - } else { - let message = crate::common::create_crash_message("Image-rs", &file_entry.path.to_string_lossy(), "https://github.com/image-rs/image/issues"); - println!("{message}"); - return; - } + img = Self::get_normal_heif_image(file_entry)?; } } else { match file_entry.image_type { ImageType::Normal | ImageType::Heic => { - if let Ok(image_result) = panic::catch_unwind(|| image::open(&file_entry.path)) { - if let Ok(image2) = image_result { - img = image2; - } else { - return; - } - } else { - let message = create_crash_message("Image-rs", &file_entry.path.to_string_lossy(), "https://github.com/image-rs/image/issues"); - println!("{message}"); - return; - } + img = Self::get_normal_heif_image(file_entry)?; } ImageType::Raw => { img = match get_dynamic_image_from_raw_image(&file_entry.path) { - Some(t) => t, - None => return, + Ok(t) => t, + Err(e) => return Err(format!("Cannot open RAW file \"{}\": {}", file_entry.path.to_string_lossy(), e)), }; } _ => { @@ -431,6 +416,21 @@ impl SimilarImages { let hash = hasher.hash_image(&img); file_entry.hash = hash.as_bytes().to_vec(); + + Ok(()) + } + + fn get_normal_heif_image(file_entry: &ImagesEntry) -> Result { + if let Ok(image_result) = panic::catch_unwind(|| image::open(&file_entry.path)) { + match image_result { + Ok(image) => Ok(image), + Err(e) => Err(format!("Cannot open image file \"{}\": {}", file_entry.path.to_string_lossy(), e)), + } + } else { + let message = create_crash_message("Image-rs", &file_entry.path.to_string_lossy(), "https://github.com/image-rs/image/issues"); + println!("{message}"); + Err(message) + } } // Split hashes at 2 parts, base hashes and hashes to compare, 3 argument is set of hashes with multiple images @@ -809,8 +809,8 @@ impl PrintResults for SimilarImages { for file_entry in struct_similar { writeln!( writer, - "{:?} - {}x{} - {} - {}", - file_entry.path, + "\"{}\" - {}x{} - {} - {}", + file_entry.path.to_string_lossy(), file_entry.width, file_entry.height, format_size(file_entry.size, BINARY), @@ -827,8 +827,8 @@ impl PrintResults for SimilarImages { writeln!(writer)?; writeln!( writer, - "{:?} - {}x{} - {} - {}", - file_entry.path, + "\"{}\" - {}x{} - {} - {}", + file_entry.path.to_string_lossy(), file_entry.width, file_entry.height, format_size(file_entry.size, BINARY), diff --git a/czkawka_core/src/similar_videos.rs b/czkawka_core/src/similar_videos.rs index 49dfbd299..e366ecf66 100644 --- a/czkawka_core/src/similar_videos.rs +++ b/czkawka_core/src/similar_videos.rs @@ -15,7 +15,7 @@ use vid_dup_finder_lib::HashCreationErrorKind::DetermineVideo; use vid_dup_finder_lib::{NormalizedTolerance, VideoHash}; use crate::common::{check_if_stop_received, delete_files_custom, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, VIDEO_FILES_EXTENSIONS}; -use crate::common_cache::{get_similar_videos_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; +use crate::common_cache::{extract_loaded_cache, get_similar_videos_cache_file, load_cache_from_file_generalized_by_path, save_cache_to_file_generalized}; use crate::common_dir_traversal::{inode, take_1_per_inode, DirTraversalBuilder, DirTraversalResult, FileEntry, ToolType}; use crate::common_tool::{CommonData, CommonToolData, DeleteMethod}; use crate::common_traits::{DebugPrint, PrintResults, ResultEntry}; @@ -183,13 +183,12 @@ impl SimilarVideos { self.get_text_messages_mut().extend_with_another_messages(messages); loaded_hash_map = loaded_items.unwrap_or_default(); - for (name, file_entry) in mem::take(&mut self.videos_to_check) { - if let Some(cached_file_entry) = loaded_hash_map.get(&name) { - records_already_cached.insert(name, cached_file_entry.clone()); - } else { - non_cached_files_to_check.insert(name, file_entry); - } - } + extract_loaded_cache( + &loaded_hash_map, + mem::take(&mut self.videos_to_check), + &mut records_already_cached, + &mut non_cached_files_to_check, + ); } else { loaded_hash_map = Default::default(); mem::swap(&mut self.videos_to_check, &mut non_cached_files_to_check); @@ -373,7 +372,7 @@ impl PrintResults for SimilarVideos { for struct_similar in &self.similar_vectors { writeln!(writer, "Found {} videos which have similar friends", struct_similar.len())?; for file_entry in struct_similar { - writeln!(writer, "{:?} - {}", file_entry.path, format_size(file_entry.size, BINARY))?; + writeln!(writer, "\"{}\" - {}", file_entry.path.to_string_lossy(), format_size(file_entry.size, BINARY))?; } writeln!(writer)?; } @@ -383,9 +382,9 @@ impl PrintResults for SimilarVideos { for (fe, struct_similar) in &self.similar_referenced_vectors { writeln!(writer, "Found {} videos which have similar friends", struct_similar.len())?; writeln!(writer)?; - writeln!(writer, "{:?} - {}", fe.path, format_size(fe.size, BINARY))?; + writeln!(writer, "\"{}\" - {}", fe.path.to_string_lossy(), format_size(fe.size, BINARY))?; for file_entry in struct_similar { - writeln!(writer, "{:?} - {}", file_entry.path, format_size(file_entry.size, BINARY))?; + writeln!(writer, "\"{}\" - {}", file_entry.path.to_string_lossy(), format_size(file_entry.size, BINARY))?; } writeln!(writer)?; } diff --git a/czkawka_core/src/temporary.rs b/czkawka_core/src/temporary.rs index bcea554a8..e4c5ab3f3 100644 --- a/czkawka_core/src/temporary.rs +++ b/czkawka_core/src/temporary.rs @@ -206,7 +206,7 @@ impl PrintResults for Temporary { writeln!(writer, "Found {} temporary files.\n", self.information.number_of_temporary_files)?; for file_entry in &self.temporary_files { - writeln!(writer, "{:?}", file_entry.path)?; + writeln!(writer, "\"{}\"", file_entry.path.to_string_lossy())?; } Ok(()) diff --git a/czkawka_gui/README.md b/czkawka_gui/README.md index fbbdc44c2..5db5f1487 100644 --- a/czkawka_gui/README.md +++ b/czkawka_gui/README.md @@ -14,7 +14,7 @@ Prebuilt binaries are available here - https://github.com/qarmin/czkawka/release #### Prebuild binaries -Ubuntu - `sudo apt install libgtk-4 libheif libraw ffmpeg -y` +Ubuntu - `sudo apt install libgtk-4-bin libheif1 libraw-bin ffmpeg -y` #### Snap - diff --git a/czkawka_gui/src/connect_things/connect_button_compare.rs b/czkawka_gui/src/connect_things/connect_button_compare.rs index 4b0e4f8b0..d1c8fb976 100644 --- a/czkawka_gui/src/connect_things/connect_button_compare.rs +++ b/czkawka_gui/src/connect_things/connect_button_compare.rs @@ -619,8 +619,8 @@ fn update_bottom_buttons( shared_using_for_preview: &Rc, Option)>>, image_cache: &Rc>>, ) { - let left_tree_view = (shared_using_for_preview.borrow()).0.clone().expect("Left tree_view not set"); - let right_tree_view = (shared_using_for_preview.borrow()).1.clone().expect("Right tree_view not set"); + let left_tree_view = shared_using_for_preview.borrow().0.clone().expect("Left tree_view not set"); + let right_tree_view = shared_using_for_preview.borrow().1.clone().expect("Right tree_view not set"); for (number, i) in get_all_direct_children(all_gtk_box).into_iter().enumerate() { let cache_tree_path = (*image_cache.borrow())[number].4.clone(); diff --git a/czkawka_gui/src/initialize_gui.rs b/czkawka_gui/src/initialize_gui.rs index 41f877023..cd96a58d9 100644 --- a/czkawka_gui/src/initialize_gui.rs +++ b/czkawka_gui/src/initialize_gui.rs @@ -425,7 +425,7 @@ fn connect_event_buttons(gui_data: &GuiData) { &event_controller_key .widget() .expect("Item has no widget") - .downcast::() + .downcast::() .expect("Widget is not TreeView"), &text_view_errors, &check_button_settings_show_preview, @@ -457,7 +457,7 @@ fn connect_event_buttons(gui_data: &GuiData) { &event_controller_key .widget() .expect("Item has no widget") - .downcast::() + .downcast::() .expect("Widget is not TreeView"), &text_view_errors, &check_button_settings_show_preview_similar_images, diff --git a/justfile b/justfile index 9c41a0043..85730d149 100644 --- a/justfile +++ b/justfile @@ -25,6 +25,9 @@ czkawka: krokiet_r: cargo run --bin krokiet --release +krokiet_dark: + SLINT_STYLE=fluent-dark cargo run --bin krokiet + czkawka_r: cargo run --bin czkawka_gui --release @@ -46,7 +49,7 @@ upgrade: fix: cargo +nightly fmt - cargo clippy --fix --allow-dirty -- -Wclippy::pedantic -Aclippy::default_trait_access -Aclippy::cast_possible_truncation -Aclippy::must_use_candidate -Aclippy::missing_panics_doc -Aclippy::too_many_lines -Aclippy::cast_precision_loss -Aclippy::cast_sign_loss -Aclippy::module_name_repetitions -Aclippy::struct_excessive_bools -Aclippy::cast_possible_wrap -Aclippy::cast_lossless -Aclippy::if_not_else -Aclippy::wildcard_imports -Aclippy::return_self_not_must_use -Aclippy::missing_errors_doc -Aclippy::match_wildcard_for_single_variants -Aclippy::assigning_clones -Aclippy::unused_self -Aclippy::manual_is_variant_and -Aclippy::new_without_default + cargo clippy --fix --allow-dirty --allow-staged -- -Wclippy::pedantic -Aclippy::default_trait_access -Aclippy::cast_possible_truncation -Aclippy::must_use_candidate -Aclippy::missing_panics_doc -Aclippy::too_many_lines -Aclippy::cast_precision_loss -Aclippy::cast_sign_loss -Aclippy::module_name_repetitions -Aclippy::struct_excessive_bools -Aclippy::cast_possible_wrap -Aclippy::cast_lossless -Aclippy::if_not_else -Aclippy::wildcard_imports -Aclippy::return_self_not_must_use -Aclippy::missing_errors_doc -Aclippy::match_wildcard_for_single_variants -Aclippy::assigning_clones -Aclippy::unused_self -Aclippy::manual_is_variant_and -Aclippy::new_without_default cargo +nightly fmt clippy_nightly: diff --git a/krokiet/src/connect_show_preview.rs b/krokiet/src/connect_show_preview.rs index 69626ffb9..b96b54fac 100644 --- a/krokiet/src/connect_show_preview.rs +++ b/krokiet/src/connect_show_preview.rs @@ -95,11 +95,12 @@ fn load_image(image_path: &Path) -> Option<(Duration, DynamicImage)> { } } } else if is_raw_image { - if let Some(img) = get_dynamic_image_from_raw_image(image_name) { - img - } else { - error!("Error while loading raw image - not sure why - try to guess"); - return None; + match get_dynamic_image_from_raw_image(image_name) { + Ok(img) => img, + Err(e) => { + error!("Error while loading raw image: {}", e); + return None; + } } } else { return None;