Просмотр исходного кода

Merge branch 'release-1.2.0' into release-1.3.0

john 1 год назад
Родитель
Сommit
c4a6de26de

+ 247 - 33
src-tauri/Cargo.lock

@@ -112,6 +112,17 @@ dependencies = [
  "pin-project-lite",
 ]
 
+[[package]]
+name = "async-channel"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
+dependencies = [
+ "concurrent-queue",
+ "event-listener 2.5.3",
+ "futures-core",
+]
+
 [[package]]
 name = "async-channel"
 version = "2.3.1"
@@ -132,8 +143,8 @@ checksum = "c8828ec6e544c02b0d6691d21ed9f9218d0384a82542855073c2a3f58304aaf0"
 dependencies = [
  "async-task",
  "concurrent-queue",
- "fastrand",
- "futures-lite",
+ "fastrand 2.1.0",
+ "futures-lite 2.3.0",
  "slab",
 ]
 
@@ -143,9 +154,44 @@ version = "2.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a"
 dependencies = [
- "async-lock",
+ "async-lock 3.4.0",
+ "blocking",
+ "futures-lite 2.3.0",
+]
+
+[[package]]
+name = "async-global-executor"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c"
+dependencies = [
+ "async-channel 2.3.1",
+ "async-executor",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
  "blocking",
- "futures-lite",
+ "futures-lite 2.3.0",
+ "once_cell",
+]
+
+[[package]]
+name = "async-io"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
+dependencies = [
+ "async-lock 2.8.0",
+ "autocfg",
+ "cfg-if",
+ "concurrent-queue",
+ "futures-lite 1.13.0",
+ "log",
+ "parking",
+ "polling 2.8.0",
+ "rustix 0.37.27",
+ "slab",
+ "socket2 0.4.10",
+ "waker-fn",
 ]
 
 [[package]]
@@ -154,19 +200,28 @@ version = "2.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964"
 dependencies = [
- "async-lock",
+ "async-lock 3.4.0",
  "cfg-if",
  "concurrent-queue",
  "futures-io",
- "futures-lite",
+ "futures-lite 2.3.0",
  "parking",
- "polling",
- "rustix",
+ "polling 3.7.1",
+ "rustix 0.38.34",
  "slab",
  "tracing",
  "windows-sys 0.52.0",
 ]
 
+[[package]]
+name = "async-lock"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b"
+dependencies = [
+ "event-listener 2.5.3",
+]
+
 [[package]]
 name = "async-lock"
 version = "3.4.0"
@@ -184,16 +239,16 @@ version = "2.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f7eda79bbd84e29c2b308d1dc099d7de8dcc7035e48f4bf5dc4a531a44ff5e2a"
 dependencies = [
- "async-channel",
- "async-io",
- "async-lock",
+ "async-channel 2.3.1",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
  "async-signal",
  "async-task",
  "blocking",
  "cfg-if",
  "event-listener 5.3.1",
- "futures-lite",
- "rustix",
+ "futures-lite 2.3.0",
+ "rustix 0.38.34",
  "tracing",
  "windows-sys 0.52.0",
 ]
@@ -215,18 +270,44 @@ version = "0.2.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "329972aa325176e89114919f2a80fdae4f4c040f66a370b1a1159c6c0f94e7aa"
 dependencies = [
- "async-io",
- "async-lock",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
  "atomic-waker",
  "cfg-if",
  "futures-core",
  "futures-io",
- "rustix",
+ "rustix 0.38.34",
  "signal-hook-registry",
  "slab",
  "windows-sys 0.52.0",
 ]
 
+[[package]]
+name = "async-std"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d"
+dependencies = [
+ "async-channel 1.9.0",
+ "async-global-executor",
+ "async-io 1.13.0",
+ "async-lock 2.8.0",
+ "crossbeam-utils",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-lite 1.13.0",
+ "gloo-timers",
+ "kv-log-macro",
+ "log",
+ "memchr",
+ "once_cell",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+ "wasm-bindgen-futures",
+]
+
 [[package]]
 name = "async-task"
 version = "4.7.1"
@@ -382,10 +463,10 @@ version = "1.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea"
 dependencies = [
- "async-channel",
+ "async-channel 2.3.1",
  "async-task",
  "futures-io",
- "futures-lite",
+ "futures-lite 2.3.0",
  "piper",
 ]
 
@@ -1098,6 +1179,15 @@ version = "0.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
 
+[[package]]
+name = "fastrand"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
+dependencies = [
+ "instant",
+]
+
 [[package]]
 name = "fastrand"
 version = "2.1.0"
@@ -1273,13 +1363,28 @@ version = "0.3.30"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
 
+[[package]]
+name = "futures-lite"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
+dependencies = [
+ "fastrand 1.9.0",
+ "futures-core",
+ "futures-io",
+ "memchr",
+ "parking",
+ "pin-project-lite",
+ "waker-fn",
+]
+
 [[package]]
 name = "futures-lite"
 version = "2.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5"
 dependencies = [
- "fastrand",
+ "fastrand 2.1.0",
  "futures-core",
  "futures-io",
  "parking",
@@ -1576,6 +1681,18 @@ dependencies = [
  "regex-syntax 0.8.3",
 ]
 
+[[package]]
+name = "gloo-timers"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "js-sys",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "gobject-sys"
 version = "0.15.10"
@@ -1820,7 +1937,7 @@ dependencies = [
  "httpdate",
  "itoa 1.0.11",
  "pin-project-lite",
- "socket2",
+ "socket2 0.5.7",
  "tokio",
  "tower-service",
  "tracing",
@@ -1970,6 +2087,17 @@ dependencies = [
  "cfg-if",
 ]
 
+[[package]]
+name = "io-lifetimes"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "windows-sys 0.48.0",
+]
+
 [[package]]
 name = "ipnet"
 version = "2.9.0"
@@ -2079,6 +2207,15 @@ dependencies = [
  "selectors",
 ]
 
+[[package]]
+name = "kv-log-macro"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
+dependencies = [
+ "log",
+]
+
 [[package]]
 name = "lazy_static"
 version = "1.4.0"
@@ -2137,6 +2274,12 @@ version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dd1bc4d24ad230d21fb898d1116b1801d7adfc449d42026475862ab48b11e70e"
 
+[[package]]
+name = "linux-raw-sys"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
+
 [[package]]
 name = "linux-raw-sys"
 version = "0.4.14"
@@ -2158,6 +2301,9 @@ name = "log"
 version = "0.4.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
+dependencies = [
+ "value-bag",
+]
 
 [[package]]
 name = "loom"
@@ -2985,7 +3131,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391"
 dependencies = [
  "atomic-waker",
- "fastrand",
+ "fastrand 2.1.0",
  "futures-io",
 ]
 
@@ -3043,6 +3189,22 @@ dependencies = [
  "miniz_oxide",
 ]
 
+[[package]]
+name = "polling"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
+dependencies = [
+ "autocfg",
+ "bitflags 1.3.2",
+ "cfg-if",
+ "concurrent-queue",
+ "libc",
+ "log",
+ "pin-project-lite",
+ "windows-sys 0.48.0",
+]
+
 [[package]]
 name = "polling"
 version = "3.7.1"
@@ -3053,7 +3215,7 @@ dependencies = [
  "concurrent-queue",
  "hermit-abi",
  "pin-project-lite",
- "rustix",
+ "rustix 0.38.34",
  "tracing",
  "windows-sys 0.52.0",
 ]
@@ -3427,6 +3589,20 @@ dependencies = [
  "semver",
 ]
 
+[[package]]
+name = "rustix"
+version = "0.37.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2"
+dependencies = [
+ "bitflags 1.3.2",
+ "errno",
+ "io-lifetimes",
+ "libc",
+ "linux-raw-sys 0.3.8",
+ "windows-sys 0.48.0",
+]
+
 [[package]]
 name = "rustix"
 version = "0.38.34"
@@ -3436,7 +3612,7 @@ dependencies = [
  "bitflags 2.5.0",
  "errno",
  "libc",
- "linux-raw-sys",
+ "linux-raw-sys 0.4.14",
  "windows-sys 0.52.0",
 ]
 
@@ -3756,6 +3932,16 @@ version = "1.13.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
 
+[[package]]
+name = "socket2"
+version = "0.4.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
 [[package]]
 name = "socket2"
 version = "0.5.7"
@@ -4326,6 +4512,7 @@ name = "tauri-app"
 version = "1.2.0"
 dependencies = [
  "anyhow",
+ "async-std",
  "async-trait",
  "hex",
  "home",
@@ -4340,6 +4527,7 @@ dependencies = [
  "tauri-plugin-persisted-scope",
  "tauri-plugin-sql",
  "thiserror",
+ "tokio",
  "tracing",
  "tracing-subscriber",
  "trash",
@@ -4535,8 +4723,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
 dependencies = [
  "cfg-if",
- "fastrand",
- "rustix",
+ "fastrand 2.1.0",
+ "rustix 0.38.34",
  "windows-sys 0.52.0",
 ]
 
@@ -4655,11 +4843,25 @@ dependencies = [
  "libc",
  "mio",
  "num_cpus",
+ "parking_lot",
  "pin-project-lite",
- "socket2",
+ "signal-hook-registry",
+ "socket2 0.5.7",
+ "tokio-macros",
  "windows-sys 0.48.0",
 ]
 
+[[package]]
+name = "tokio-macros"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.66",
+]
+
 [[package]]
 name = "tokio-native-tls"
 version = "0.3.1"
@@ -4959,6 +5161,12 @@ version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
 
+[[package]]
+name = "value-bag"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101"
+
 [[package]]
 name = "vcpkg"
 version = "0.2.15"
@@ -5003,6 +5211,12 @@ dependencies = [
  "libc",
 ]
 
+[[package]]
+name = "waker-fn"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7"
+
 [[package]]
 name = "walkdir"
 version = "2.5.0"
@@ -5127,7 +5341,7 @@ checksum = "34e9e6b6d4a2bb4e7e69433e0b35c7923b95d4dc8503a84d25ec917a4bbfdf07"
 dependencies = [
  "cc",
  "downcast-rs",
- "rustix",
+ "rustix 0.38.34",
  "scoped-tls",
  "smallvec",
  "wayland-sys",
@@ -5140,7 +5354,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1e63801c85358a431f986cffa74ba9599ff571fc5774ac113ed3b490c19a1133"
 dependencies = [
  "bitflags 2.5.0",
- "rustix",
+ "rustix 0.38.34",
  "wayland-backend",
  "wayland-scanner",
 ]
@@ -5877,7 +6091,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5d91ffca73ee7f68ce055750bf9f6eca0780b8c85eff9bc046a3b0da41755e12"
 dependencies = [
  "gethostname",
- "rustix",
+ "rustix 0.38.34",
  "x11rb-protocol",
 ]
 
@@ -5894,8 +6108,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f"
 dependencies = [
  "libc",
- "linux-raw-sys",
- "rustix",
+ "linux-raw-sys 0.4.14",
+ "rustix 0.38.34",
 ]
 
 [[package]]
@@ -5917,8 +6131,8 @@ dependencies = [
  "async-broadcast",
  "async-executor",
  "async-fs",
- "async-io",
- "async-lock",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
  "async-process",
  "async-recursion",
  "async-task",

+ 2 - 0
src-tauri/Cargo.toml

@@ -32,6 +32,8 @@ tracing-subscriber = { version = "0.3.18", features = ["fmt"] }
 lazy_static = "1.4.0"
 home = "0.5.9"
 trash = "1.3"
+tokio = { version = "1.0", features = ["full"] }
+async-std = "1.10.0"
 
 [dependencies.tauri-plugin-sql]
 git = "https://gitee.com/seamong/plugins-workspace"

BIN
src-tauri/icons/128x128.png


BIN
src-tauri/icons/128x128@2x.png


BIN
src-tauri/icons/32x32.png


BIN
src-tauri/icons/Square107x107Logo.png


BIN
src-tauri/icons/Square142x142Logo.png


BIN
src-tauri/icons/Square150x150Logo.png


BIN
src-tauri/icons/Square284x284Logo.png


BIN
src-tauri/icons/Square30x30Logo.png


BIN
src-tauri/icons/Square310x310Logo.png


BIN
src-tauri/icons/Square44x44Logo.png


BIN
src-tauri/icons/Square71x71Logo.png


BIN
src-tauri/icons/Square89x89Logo.png


BIN
src-tauri/icons/StoreLogo.png


BIN
src-tauri/icons/icon.icns


BIN
src-tauri/icons/icon.ico


BIN
src-tauri/icons/icon.png


+ 84 - 39
src-tauri/src/self_plugin/tauri_plugin_file/files.rs

@@ -1,9 +1,13 @@
 use hex;
 use serde::{Deserialize, Serialize};
-use sha2::{Digest as OtherDigest, Sha256}; use std::ffi::OsStr;
+use sha2::{Digest as OtherDigest, Sha256};
+use std::ffi::OsStr;
 // 确保导入 `Digest`
+use async_std::fs as async_std_fs;
 use std::fs;
 use std::path::{Path, PathBuf};
+use std::thread;
+use std::time;
 use std::time::UNIX_EPOCH;
 use tauri::command;
 extern crate trash;
@@ -33,10 +37,11 @@ pub struct FileInfo {
     pub id: Option<u32>,
     pub progress: Option<f32>,
     pub types: Option<Vec<String>>,
+    pub excluded_file_names: Option<Vec<String>>,
 }
 
 #[command]
-pub fn get_all_directory(file_info: FileInfo) -> Vec<PathBuf> {
+pub fn get_all_directory(file_info: FileInfo) -> Vec<FileInfos> {
     let mut files = Vec::new();
     if let Some(ref path) = file_info.path {
         println!("Processing directory: {}", path);
@@ -46,6 +51,7 @@ pub fn get_all_directory(file_info: FileInfo) -> Vec<PathBuf> {
             &mut files,
             &file_info.checked_size_values,
             &file_info.types,
+            &file_info.excluded_file_names,
         );
         files
     } else {
@@ -70,34 +76,44 @@ pub fn get_file_type_by_path(file_path: String) -> String {
 
 fn read_files_in_directory(
     dir: &Path,
-    files: &mut Vec<PathBuf>,
+    files: &mut Vec<FileInfos>,
     filters: &Option<Vec<FileSizeCategory>>,
     types: &Option<Vec<String>>,
+    excluded_file_names: &Option<Vec<String>>,
 ) {
     if dir.is_dir() {
-        // 尝试读取目录,忽略错误
         if let Ok(entries) = fs::read_dir(dir) {
-            for entry in entries {
-                if let Ok(entry) = entry {
-                    let path = entry.path();
-                    if path.is_dir() {
-                        // 递归调用,忽略错误
-                        read_files_in_directory(&path, files, filters, types);
-                    } else {
-                        // 尝试获取文件元数据,忽略错误
-                        if let Ok(metadata) = fs::metadata(&path) {
-                            let size = metadata.len();
-                            let size_matches = filters.is_none()
-                                || file_size_matches(size, filters.as_ref().unwrap());
-                            let type_matches = types.is_none()
-                                || file_type_matches(&path, types.as_ref().unwrap());
-
-                            if size_matches && type_matches {
-                                files.push(path);
-                            }
+            for entry in entries.flatten() {
+                let path = entry.path();
+                if path.is_dir() {
+                    read_files_in_directory(&path, files, filters, types, excluded_file_names);
+                    continue;
+                }
+
+                if let Some(file_name) = path.file_name().and_then(|name| name.to_str()) {
+                    if let Some(excluded_names) = excluded_file_names {
+                        if excluded_names.contains(&String::from(file_name)) {
+                            continue;
                         }
                     }
                 }
+
+                let metadata = if let Ok(meta) = path.metadata() { meta } else { continue };
+                let size_matches = filters.as_ref().map_or(true, |f| file_size_matches(metadata.len(), f));
+                let type_matches = types.as_ref().map_or(true, |t| file_type_matches(&path, t));
+                if size_matches && type_matches {
+                    if let Some(path_str) = path.to_str() {
+                        // 确保 path_str 是有效的 UTF-8 字符串
+                        let path_info = get_file_info(path_str.to_string());
+                        // 使用 path_info 做其他事情
+                        files.push(path_info);
+                    } else {
+                        // 处理 path 不是有效 UTF-8 的情况
+                        // eprintln!("Path is not valid UTF-8");
+                        continue;
+                    }
+
+                }
             }
         }
     }
@@ -125,9 +141,23 @@ fn file_type_matches(path: &Path, types: &Vec<String>) -> bool {
     false
 }
 
+fn excluded_file_names_matches(path_name: &str, excluded_file_names: &Vec<String>) -> bool {
+    for excluded_name in excluded_file_names {
+        if path_name == excluded_name {
+            return true;
+        }
+    }
+    false
+}
+
 #[command]
-pub fn calculate_file_hash(file_path: String) -> String {
-    let file_bytes = fs::read(file_path).expect("Failed to read file");
+// 定义异步函数来计算文件的 SHA256 哈希
+pub async fn calculate_file_hash(file_path: String) -> String {
+    // 异步读取文件
+    let file_bytes = match async_std_fs::read(file_path).await {
+        Ok(bytes) => bytes,
+        Err(_) => return "Failed to read file".to_string(), // 如果读取失败,返回错误信息
+    };
 
     // 初始化 SHA256 哈希上下文
     let mut hasher = Sha256::new();
@@ -227,7 +257,7 @@ pub fn mv_file_to_trash(file_path: String) -> RequestMvFile {
 #[command]
 pub fn get_app_data_dir() -> String {
     std::env::var("MY_APP_DATA_DIR")
-    .unwrap_or_else(|_| "Environment variable for app data directory not set".to_string())
+        .unwrap_or_else(|_| "Environment variable for app data directory not set".to_string())
 }
 
 /* #[command]
@@ -254,17 +284,19 @@ fn open_finder(path: String) -> RequestMvFile {
 
 #[command]
 pub fn show_file_in_explorer(file_path: String) -> RequestMvFile {
-    println!("256 {}",file_path);
+    println!("256 {}", file_path);
     // 获取文件所在的目录
     #[cfg(target_os = "linux")]
     let path = std::path::Path::new(&file_path);
     #[cfg(target_os = "linux")]
     let parent_dir = match path.parent() {
         Some(dir) => dir.to_str().unwrap_or(""),
-        None => return RequestMvFile {
-            code: Some(500),
-            msg: Some("No parent directory found.".to_string()),
-            data: Some("No parent directory found.".to_string()),
+        None => {
+            return RequestMvFile {
+                code: Some(500),
+                msg: Some("No parent directory found.".to_string()),
+                data: Some("No parent directory found.".to_string()),
+            }
         }
     };
 
@@ -301,7 +333,6 @@ pub fn show_file_in_explorer(file_path: String) -> RequestMvFile {
     }
 }
 
-
 // 批量移动指定的多个文件到一个目标目录
 #[command]
 pub fn move_specific_files(file_paths: Vec<PathBuf>, dest_dir: &str) -> RequestMvFile {
@@ -317,22 +348,36 @@ pub fn move_specific_files(file_paths: Vec<PathBuf>, dest_dir: &str) -> RequestM
 
     // 遍历提供的文件路径列表
     for file_path in file_paths {
-        if file_path.is_file() {  // 确保路径是文件
-            let dest_file_path = destination.join(
-                file_path.file_name().unwrap_or_else(|| OsStr::new(""))
-            );
+        if file_path.is_file() {
+            // 确保路径是文件
+            let dest_file_path =
+                destination.join(file_path.file_name().unwrap_or_else(|| OsStr::new("")));
             if let Err(e) = fs::rename(&file_path, &dest_file_path) {
                 return RequestMvFile {
                     code: Some(500),
-                    msg: Some(format!("Failed to move file '{}': {}", file_path.display(), e)),
-                    data: Some(format!("Failed to move file '{}': {}", file_path.display(), e)),
+                    msg: Some(format!(
+                        "Failed to move file '{}': {}",
+                        file_path.display(),
+                        e
+                    )),
+                    data: Some(format!(
+                        "Failed to move file '{}': {}",
+                        file_path.display(),
+                        e
+                    )),
                 };
             }
         } else {
             return RequestMvFile {
                 code: Some(400),
-                msg: Some(format!("Provided path '{}' is not a file.", file_path.display())),
-                data: Some(format!("Provided path '{}' is not a file.", file_path.display())),
+                msg: Some(format!(
+                    "Provided path '{}' is not a file.",
+                    file_path.display()
+                )),
+                data: Some(format!(
+                    "Provided path '{}' is not a file.",
+                    file_path.display()
+                )),
             };
         }
     }

+ 101 - 49
src/pages/DuplicateFile/CalculateDuplicateFiles.tsx

@@ -1,11 +1,12 @@
 import {
-  get_info_by_id,
+  get_info_by_id, getFirstEmptyHashBySourceId,
   insertSearchFiles,
   updateSelectedFileHistoryFiles,
 } from "@/services";
 import { useEffect, useState } from "react";
-import { useNavigate, useParams } from "react-router-dom";
+import { useNavigate, useParams, useLocation  } from "react-router-dom";
 import {
+  backFileInfoType,
   FileInfoType,
   insertSearchFilesPasamsType,
   stepsStatusType,
@@ -13,7 +14,7 @@ import {
 import { message } from "@tauri-apps/api/dialog";
 import styles from "./CalculateDuplicateFiles.module.less";
 import File from "@/plugins/tauri-plugin-file/file";
-import { Button, Col, Row, Steps } from "antd";
+import { Button, Col, Row, Steps, Space } from "antd";
 import { fileTypeList } from "./config";
 import get_progress_by_sourceId, {
   get_fileInfo_by_path,
@@ -24,9 +25,12 @@ import get_progress_by_sourceId, {
 export default function CalculateDuplicateFiles() {
   let { fileId } = useParams();
   let navigate = useNavigate();
+  const location = useLocation();
+
   const [fileInfo, setFileInfo] = useState<FileInfoType>({});
   const [current, setCurrent] = useState(1);
   const [percent, setPercent] = useState(85);
+  const [duplicateFilesStep, setDuplicateFilesStep] = useState('');
   const [stepsStatus, setStepsStatus] = useState<stepsStatusType>({
     // 'wait' | 'process' | 'finish' | 'error';
     scanDir: "wait",
@@ -34,10 +38,34 @@ export default function CalculateDuplicateFiles() {
     duplicateFiles: "wait",
     done: "wait",
   });
+  const [isCancelled, setIsCancelled] = useState(false); // 离开页面时终止正在执行的逻辑
+  const [hasMounted, setHasMounted] = useState(false);
   useEffect(() => {
-    pageInit();
+    pageInit().then(r => console.log(r));
   }, []);
 
+  useEffect(() => {
+    // 这段代码只会在组件首次挂载时执行一次
+    console.log("组件已挂载");
+
+    console.log(location); // 当前路由路径
+    console.log(location.pathname); // 当前路由路径
+
+    setTimeout(() => {
+      // 设置一个状态标志,表示组件已经挂载
+      setHasMounted(true);
+    }, 300)
+    // 如果你需要在组件卸载时进行清理,可以在这里返回一个函数
+    // 当组件加载时,不做特殊操作
+    // 只在组件卸载时设置isCancelled为true
+    return () => {
+      if(hasMounted) {
+        console.log(47, ' 当组件卸载时,设置isCancelled为true');
+        setIsCancelled(true);
+      }
+    };
+  }, [hasMounted]);
+
   const waittime = (time = 100) => {
     return new Promise((resolve) => {
       setTimeout(() => {
@@ -68,18 +96,24 @@ export default function CalculateDuplicateFiles() {
   async function scanDirAll() {
     if (fileInfo.path) {
       // 扫描目录文件
-
       console.log("扫描目录文件 结束");
       const files = await scanAllFilesInDir();
 
       // 计算文件属性
       console.log("计算文件属性 开始");
-      await computeFileMetadata(files);
+      await computeFileMetadata_v2(files);
       console.log("计算文件属性 结束");
 
       // 计算文件具体内容
       console.log("计算每一个文件的hash 开始");
-      await computeFileChecksums();
+      try {
+        await computeFileChecksums_2();
+      } catch (error) {
+        console.log(107, error);
+        if(error == '提前终止') {
+          return
+        }
+      }
       console.log("计算每一个文件的hash 结束");
 
       setStepsStatus({
@@ -107,7 +141,7 @@ export default function CalculateDuplicateFiles() {
   }
 
   // 扫描目录文件
-  async function scanAllFilesInDir(): Promise<string[]> {
+  async function scanAllFilesInDir(): Promise<backFileInfoType[]> {
     const [progressRes] = await get_progress_by_sourceId(`${fileId}`);
     if (progressRes.total_entries !== fileInfo.files || !fileInfo.files) {
       console.log("扫描目录文件 开始");
@@ -132,9 +166,12 @@ export default function CalculateDuplicateFiles() {
     return Promise.resolve([]);
   }
 
-  // 计算文件属性
-  async function computeFileMetadata(files: string[]) {
-    if(!files.length) {
+  /*
+  * 处理获取到的文件属性
+  * */
+  async function computeFileMetadata_v2(files: backFileInfoType[]) {
+    const [progressRes] = await get_progress_by_sourceId(`${fileId}`);
+    if(!files.length || !progressRes.total_entries) {
       setStepsStatus({
         ...stepsStatus,
         scanDir: "finish",
@@ -143,7 +180,6 @@ export default function CalculateDuplicateFiles() {
       setPercent(100);
       return Promise.resolve(0)
     }
-    /* 如果文件数目为0 ,查询数据库进行 */
     // 更新当前查询目录的总文件数目
     await updateSelectedFileHistoryFiles(`${fileInfo.path}`, files.length);
     setStepsStatus({
@@ -155,46 +191,40 @@ export default function CalculateDuplicateFiles() {
     let fileIndex = -1;
     let allFilesLength = files.length;
     await files.reduce(
-      async (prevPromise: any, currentFile: any) => {
-        // 等待上一个 Promise 完成
-        await prevPromise;
-        // ishaveFile: true 表示文件数据已经存在; false 表示文件数据不存在xuy;
-        const [ishaveFile, fileinfo] = await get_fileInfo_by_path(
-          currentFile,
-          `${fileId}`
-        );
-        if (!ishaveFile) {
-          // 获取文件类型和哈希
-          const fileInfo = await File.getInfo(currentFile);
+        async (prevPromise: any, currentFile: any) => {
+          // 等待上一个 Promise 完成
+          await prevPromise;
           fileIndex++;
+          const file_info = files[fileIndex]
           setPercent(Math.floor((fileIndex / allFilesLength) * 100));
           return insertSearchFiles({
             // 组装数据
             sourceId: `${fileId}`,
-            path: currentFile,
-            name: fileInfo.file_name,
-            creation_time: fileInfo.creation_time,
-            modified_time: fileInfo.modified_time,
-            file_size: fileInfo.file_size,
-            type: fileInfo.file_type,
-            // 由于 计算单个文件的hash 时间较长,所以单独起一个事件,专门做这个事情
+            path: `${file_info.file_path}`,
+            name: file_info.file_name,
+            creation_time: file_info.creation_time,
+            modified_time: file_info.modified_time,
+            file_size: file_info.file_size,
+            type: file_info.file_type,
             hash: "",
           });
-        }
-        return Promise.resolve(0);
-      },
-      Promise.resolve(0)
+        },
+        Promise.resolve(0)
     );
     setPercent(100);
     return waittime(300);
   }
 
   // 计算每一个文件的hash
-  async function computeFileChecksums() {
-    const [allList, allListMsg] = await get_list_by_sourceid(`${fileId}`);
-    if (allList && Array.isArray(allList)) {
+  async function computeFileChecksums_2() {
+    const [progressRes] = await get_progress_by_sourceId(`${fileId}`);
+    // console.log(178, progressRes)
+
+    // 已经存在的数据中,计算过的 hash 总量跟 文件总数不是一样的,并且存在有记录的文件
+    if (progressRes.hash_null_count && progressRes.total_entries) {
       let fileIndex = -1;
-      let allFilesLength = allList.length;
+      let allFilesLength = progressRes.hash_null_count;
+      const allList = [...Array(allFilesLength).keys()];
       setStepsStatus({
         ...stepsStatus,
         scanDir: "finish",
@@ -203,20 +233,33 @@ export default function CalculateDuplicateFiles() {
       });
       setPercent(0);
       await allList
-        .filter((currentFile: insertSearchFilesPasamsType) => !currentFile.hash)
         .reduce(
           async (
             prevPromise: any,
-            currentFile: insertSearchFilesPasamsType
+            index: number
           ) => {
             // 等待上一个 Promise 完成
             await prevPromise;
-            // 获取文件类型和哈希
-            const hash = await File.getHash(currentFile.path);
+            if (isCancelled || window.location.href.indexOf(location.pathname) < 0) {
+              // @ts-ignore
+              throw '提前终止'
+              return Promise.resolve(0);
+            } // 如果设置了取消标志,则提前终止
+            const [fileinfo, error] = await getFirstEmptyHashBySourceId(`${fileId}`);
+            if(fileinfo) {
+              // 获取文件类型和哈希
+              const hash = await File.getHash(fileinfo.path);
+              await updateFileHsah(fileinfo.path, hash, `${fileId}`);
+            }
+            // console.clear();  // 清除控制台
+            // console.log(223, window.location.href, location.pathname, fileinfo);
+            // console.log(223, window.location.href.indexOf(location.pathname), location.pathname);
             fileIndex++;
-            await waittime();
-            setPercent(Math.floor((fileIndex / allFilesLength) * 100));
-            return updateFileHsah(currentFile.path, hash, `${fileId}`);
+            // await waittime();
+            const [newProgressRes] = await get_progress_by_sourceId(`${fileId}`);
+            setPercent(Math.floor((fileIndex / newProgressRes.hash_null_count) * 100));
+            setDuplicateFilesStep(`: ${fileIndex} / ${newProgressRes.hash_null_count}`);
+            return Promise.resolve(0)
           },
           Promise.resolve(0)
         );
@@ -252,6 +295,10 @@ export default function CalculateDuplicateFiles() {
     return types;
   }
 
+
+  function toNextPage() {
+    navigate("/calculate-list/" + fileId);
+  }
   return (
     <div className={styles.CalculateDuplicateFiles}>
       <Row justify="start" align="middle">
@@ -261,9 +308,14 @@ export default function CalculateDuplicateFiles() {
           </div>
         </Col>
         <Col>
-          <Button type="primary" onClick={() => scanDirAll()}>
-            开始
-          </Button>
+          <Space>
+            <Button type="primary" onClick={() => scanDirAll()}>
+              开始
+            </Button>
+            <Button type="primary" onClick={() => toNextPage()}>
+              预览重复数据
+            </Button>
+          </Space>
         </Col>
       </Row>
 
@@ -283,7 +335,7 @@ export default function CalculateDuplicateFiles() {
               status: stepsStatus.fileOptions,
             },
             {
-              title: "分析重复文件",
+              title: "分析重复文件" + duplicateFilesStep,
               status: stepsStatus.duplicateFiles,
             },
             {

+ 42 - 7
src/pages/DuplicateFile/CalculateListPage.tsx

@@ -35,6 +35,7 @@ export default function CalculateListPage() {
   let { fileId } = useParams();
   const [data, setData] = useState<FileItem[]>([]);
   const [loading, setLoading] = useState<boolean>(false);
+  const [tip, setTip] = useState<string>('');
   const [removeList, setRemoveList] = useState<string[]>([]);
   interface FileItem {
     sourceId: number;
@@ -45,9 +46,12 @@ export default function CalculateListPage() {
     otherItems: insertSearchFilesPasamsType[];
   }
   const appendData = async () => {
+    setLoading(true)
+    setTip('正在统计中');
     const [isError, searchDuplicateFileRes] = await searchDuplicateFile({
       sourceId: `${fileId}`,
     });
+    console.log(5151, isError)
     if (!isError) {
       typeof searchDuplicateFileRes === "string" &&
         (await tauriMessage(searchDuplicateFileRes, {
@@ -55,13 +59,31 @@ export default function CalculateListPage() {
           type: "error",
         }));
     }
-
+    /*count
+        :
+        2
+    hash
+        :
+        "fdd8051fcf884d8cc9a095cd77a58694e13b066aea68dc1fc353767ab0ebfe01"
+    ids
+        :
+        "25494,26393"
+    sourceId
+        :
+        6*/
+    setTip('');
+    setLoading(false);
+    setData(searchDuplicateFileRes as any);
+    console.log(63, searchDuplicateFileRes);
+    return
     if (Array.isArray(searchDuplicateFileRes)) {
+      let index = -1
       const newData: any[] = [];
       await searchDuplicateFileRes.reduce(
         async (prevPromise: any, currentFile: any) => {
           // 等待上一个 Promise 完成
           await prevPromise;
+          index++
           const ids = currentFile.ids.split(",");
           const firstItem = await get_fileInfo_by_id(ids[0], `${fileId}`);
           const otherItems = await Promise.allSettled(
@@ -88,12 +110,15 @@ export default function CalculateListPage() {
               })
               .filter((elm: any) => elm),
           });
+          setTip(`正在统计中: ${Math.floor((index / searchDuplicateFileRes.length) * 100)}% : ${searchDuplicateFileRes.length - index}`);
           return Promise.resolve(0);
         },
         Promise.resolve(0)
       );
       setData(newData);
     }
+    setLoading(false)
+    setTip('')
   };
 
   useEffect(() => {
@@ -226,10 +251,11 @@ export default function CalculateListPage() {
   }
   return (
     <div className={styles.CalculateListPage}>
-      <Spin spinning={loading}>
+      <Spin spinning={loading} tip={tip}>
         <div
           style={{
             padding: "24px",
+            minHeight: '50vh'
           }}
         >
           <Space>
@@ -248,7 +274,7 @@ export default function CalculateListPage() {
             value={removeList}
           >
             <div style={{ width: "100%" }}>
-              {data.map((item: FileItem) => (
+              {data.map((item: any) => (
                 <div
                   key={item.hash}
                   style={{
@@ -257,8 +283,9 @@ export default function CalculateListPage() {
                   }}
                 >
                   <div className={styles.CheckboxGroup}>
-                    <Checkbox value={item.firstItem.path}>
-                      {CheckboxContent(item.firstItem)}
+                    <Checkbox value={item.path}>
+                      {/*{CheckboxContent(item as any)}*/}
+                      {item.path}
                     </Checkbox>
                   </div>
                   <div
@@ -268,19 +295,27 @@ export default function CalculateListPage() {
                     }}
                     className={styles.CheckboxGroup}
                   >
-                    {item.otherItems.map((otherItem) => (
+                    {/*{item.otherItems.map((otherItem) => (
                       <div key={otherItem.path}>
                         <Checkbox value={otherItem.path}>
                           {CheckboxContent(otherItem)}
                         </Checkbox>
                       </div>
+                    ))}*/}
+                    {item.ids.split(',').map((id_name: string) => (
+                      <div key={id_name}>
+                        <Checkbox value={id_name}>
+                          {/*{CheckboxContent(id_name as any)}*/}
+                          {id_name}
+                        </Checkbox>
+                      </div>
                     ))}
                   </div>
                 </div>
               ))}
             </div>
           </Checkbox.Group>
-          {!data.length && (
+          {!data.length && !loading && (
             <div
               style={{
                 padding: "48px 0",

+ 6 - 3
src/plugins/tauri-plugin-file/file.ts

@@ -1,7 +1,10 @@
 import { invoke } from "@tauri-apps/api/tauri";
 
 import Database from "tauri-plugin-sql-api";
-import {FileInfoType} from "@/types/files";
+import {
+  backFileInfoType,
+  fileInfoParamsType
+} from "@/types/files";
 
 export class File {
   path: string;
@@ -11,8 +14,8 @@ export class File {
   }
 
   // static async getAllList(fileInfo: FileInfoType): Promise<string[]> {
-  static async getAllList(fileInfo: any): Promise<string[]> {
-    return await invoke<string[]>("plugin:st-files|get_all_directory", {
+  static async getAllList(fileInfo: fileInfoParamsType): Promise<backFileInfoType[]> {
+    return await invoke<backFileInfoType[]>("plugin:st-files|get_all_directory", {
       fileInfo,
     });
   }

+ 37 - 9
src/services/file-service.ts

@@ -269,21 +269,14 @@ export async function get_list_by_sourceid(
   sourceId: string
 ): Promise<[insertSearchFilesPasamsType[] | false, string]> {
   try {
-    // await table_init(FILE_DB_PATH, "select_history");
-    // const DB = await SQLite.open(FILE_DB_PATH);
     const DB = await Database.load(`sqlite:files_${sourceId}.db`);
     // 创建表
     await DB.execute(createSql.search_files);
     const res = await DB.select(
-      "SELECT * FROM search_files WHERE sourceId = $1",
+      "SELECT * FROM search_files WHERE sourceId = $1 AND (hash = '' OR hash IS NULL)",
       [sourceId]
     );
     console.log(969696, sourceId);
-
-    /* const res = await DB.queryWithArgs<Array<insertSearchFilesPasamsType>>(
-          "SELECT * FROM search_files WHERE sourceId = :sourceId GROUP BY hash HAVING COUNT(*) > 1",
-          { ":sourceId": sourceid }
-        ); */
     console.log(3434, res);
 
     if (Array.isArray(res)) {
@@ -347,13 +340,20 @@ export async function searchDuplicateFile({
     const DB = await Database.load(`sqlite:files_${sourceId}.db`);
     // 创建表
     await DB.execute(createSql.search_files);
-    /* 
+    /*
     select * from search_files where sourceId = $1 in (select sourceId from search_files group by hash having count(hash) > 1)
  */
     // const res = await DB.select("SELECT * from search_files WHERE sourceId = $1", [sourceId]);
     const res: DuplicateFileInfo[] = await DB.select(
       `SELECT hash,
        sourceId,
+       id,
+       creation_time,
+       modified_time,
+       file_size,
+       type,
+       name,
+       path,
        GROUP_CONCAT(id)    AS ids,
        COUNT(*)           AS count
 FROM search_files
@@ -489,3 +489,31 @@ export async function del_file_by_id(path: string, sourceId: string) {
     return Promise.resolve(error);
   }
 }
+
+
+/*
+* 这个函数是获取到第一个hash为空的数据*/
+export async function getFirstEmptyHashBySourceId(sourceId: string) {
+  try {
+    const DB = await Database.load(`sqlite:files_${sourceId}.db`);
+    // 创建表
+    await DB.execute(createSql.search_files);
+    const res = await DB.select(
+        `SELECT * FROM search_files
+WHERE hash = '' OR hash IS NULL
+LIMIT 1;`,
+        [
+          sourceId
+        ]
+    );
+    if (Array.isArray(res) && res.length) {
+      return Promise.resolve([res[0], ""]);
+    }
+    return Promise.resolve([false, "暂无数据"]);
+  } catch (error) {
+    if (error && `${error}`.indexOf("UNIQUE constraint failed") > -1) {
+      return "当前数据格式异常";
+    }
+    return Promise.resolve([false, error]);
+  }
+}

+ 19 - 1
src/types/files.d.ts

@@ -26,6 +26,7 @@ export type insertSearchFilesPasamsType = {
   id?: number;
   sourceId?: number | string | any;
   path: string;
+  file_path?: string;
   time?: string;
   // progress: number;
   type: string,
@@ -51,4 +52,21 @@ export type stepsStatusType = {
   fileOptions: StepProps.status;
   duplicateFiles: StepProps.status;
   done: StepProps.status;
-}
+}
+
+export type backFileInfoType = {
+    file_path: string,
+    file_name: string,
+    file_type: string,
+    file_size: string,
+    modified_time: string, // 时间戳形式
+    creation_time: string,
+}
+
+
+export type fileInfoParamsType = {
+  path?: string,
+  checked_size_values?: string[],
+  types?: any[],
+  excluded_file_names?: number
+}