Jelajahi Sumber

sql 重复数据分组返回

john 1 tahun lalu
induk
melakukan
2e692ddb78

+ 18 - 0
mermaid.html

@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="UTF-8">
+    <title>Sequence Diagram</title>
+    <script src="https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js"></script>
+    <script>mermaid.initialize({startOnLoad: true});</script>
+</head>
+<body>
+    <div class="mermaid">
+        sequenceDiagram
+        登录用户->>杭律云后台: 用户端登录App时后,通过接口发送获取到的CID
+        登录用户->>杭律云后台: 完成相关事件, 触发后台推送服务
+        杭律云后台->>uniapp提供的推送服务器: 调用推送接口,携带用户ID、CID、事件类型、编号和具体推送内容描述
+        uniapp提供的推送服务器-->>登录用户: 在用户设备展示推送内容
+    </div>
+</body>
+</html>

+ 5 - 2
src-tauri/src/self_plugin/tauri_plugin_file/files.rs

@@ -7,6 +7,7 @@ use std::path::{Path, PathBuf};
 use std::result::Result as new_Result;
 use std::{fs, option};
 use tauri::command;
+use std::time::{SystemTime, UNIX_EPOCH};
 // use std::result::Result;
 // use tauri::api::file::IntoInvokeHandler;
 
@@ -165,8 +166,10 @@ fn read_files_in_directory(
             } else {
                 if let Ok(metadata) = fs::metadata(&path) {
                     let size = metadata.len();
-                    let size_matches = filters.is_none() || file_size_matches(size, filters.as_ref().unwrap());
-                    let type_matches = types.is_none() || file_type_matches(&path, types.as_ref().unwrap());
+                    let size_matches =
+                        filters.is_none() || file_size_matches(size, filters.as_ref().unwrap());
+                    let type_matches =
+                        types.is_none() || file_type_matches(&path, types.as_ref().unwrap());
 
                     if size_matches && type_matches {
                         files.push(path);

+ 6 - 2
src-tauri/src/self_plugin/tauri_plugin_file/mod.rs

@@ -1,7 +1,8 @@
 pub(crate) mod files;
 
 use tauri::{
-    plugin::{Builder, TauriPlugin}, Runtime,
+    plugin::{Builder, TauriPlugin},
+    Runtime,
 };
 
 use self::files::*;
@@ -10,7 +11,10 @@ use self::files::*;
 pub fn init<R: Runtime>() -> TauriPlugin<R> {
     Builder::new("st-files")
         .invoke_handler(tauri::generate_handler![
-            get_all_directory,get_file_type_by_path,calculate_file_hash
+            get_all_directory,
+            get_file_type_by_path,
+            calculate_file_hash,
+            get_file_info_by_path,
         ])
         .setup(|_app| {
             // app.manage(SqliteMap::default());

+ 2 - 1
src/databases/createTableSql.ts

@@ -20,6 +20,7 @@ export const createSql = {
         name TEXT,
         path TEXT,
         hash TEXT,
-        db_version TEXT
+        db_version TEXT,
+        UNIQUE (path)
     );`
 }

+ 49 - 39
src/pages/DuplicateFile/CalculateDuplicateFiles.tsx

@@ -22,6 +22,7 @@ import {
 } from "@ant-design/icons";
 import { readDir, BaseDirectory } from "@tauri-apps/api/fs";
 import { fileTypeList } from "./config";
+import get_progress_by_sourceId from "@/services/file-service";
 
 export default function CalculateDuplicateFiles() {
   let { fileId } = useParams();
@@ -68,45 +69,14 @@ export default function CalculateDuplicateFiles() {
     }
   }
   async function scanDirAll() {
-    const searchDuplicateFileRes =  await searchDuplicateFile({
-        sourceId: fileId || ''
-    })
-    /* 
-        [
-            {count: 6, hash: "3ba7bbfc03e3bed23bf066e2e9a6a5389dd33fd8637bc0220d9e6d642ccf5946", ids: "17,21,22,26,27,31", },
-            {count: 6, hash: "75b7c31709e1529be7bec1c8a40ec98edbda146a09904a5ffad8685da966f90b", ids: "19,23,24,25,29,30", },
-            {count: 3, hash: "7707b032ff2fea855a1bc22b7be536de13d3ad6d418cc7021893a97cf488e1a3", ids: "20,28,32", }
-        ]
-
-
-
-        [
-            {
-                count: 6, 
-                hash: "3ba7bbfc03e3bed23bf066e2e9a6a5389dd33fd8637bc0220d9e6d642ccf5946", 
-                paths: "/Users/sysadmin/Pictures/test/欧洲4_副本.jpeg,/Users/s…4.jpeg,/Users/sysadmin/Pictures/test/欧洲4_副本5.jpeg", 
-                ids: "17,21,22,26,27,31", 
-                times: "1718613803964,1718613804035,1718613804041,1718613804070,1718613804080,1718613804112"
-            },
-            {
-                hash: "75b7c31709e1529be7bec1c8a40ec98edbda146a09904a5ffad8685da966f90b", 
-                times: "1718613804012,1718613804051,1718613804057,1718613804063,1718613804094,1718613804104", 
-                paths: "/Users/sysadmin/Pictures/test/欧洲2.jpeg,/Users/sysa…3.jpeg,/Users/sysadmin/Pictures/test/欧洲2_副本2.jpeg", 
-                ids: "19,23,24,25,29,30", 
-                count: 6
-            }
-            {
-                times: "1718613804018,1718613804086,1718613804118", 
-                ids: "20,28,32", 
-                paths: "/Users/sysadmin/Pictures/test/欧洲1_副本2.jpeg,/Users/…洲1.jpeg,/Users/sysadmin/Pictures/test/欧洲1_副本.jpeg", 
-                count: 3, 
-                hash: "7707b032ff2fea855a1bc22b7be536de13d3ad6d418cc7021893a97cf488e1a3"
-            }
-        ] 
-
-    */
-    console.log(747474, searchDuplicateFileRes);
+    const resInfo = await File.getInfo("/Users/sysadmin/Downloads/google-cloud-cli-455.0.0-darwin-arm.tar.gz")
+    console.log(7373, resInfo);
     return
+    // const aabb = await get_progress_by_sourceId(`${fileId}`);
+    // console.log(737373, aabb);
+    
+    // return
+    
     // navigate('/calculate-list/' + fileId)
     if (fileInfo.path) {
       // 扫描目录文件
@@ -135,7 +105,8 @@ export default function CalculateDuplicateFiles() {
             console.log(95, currentFile);
             // 获取文件类型和哈希
             const type = await File.getType(currentFile);
-            const hash = await File.getHash(currentFile);
+            // const hash = await File.getHash(currentFile);
+            const hash = '';
             return insertSearchFiles({
               // 组装数据
               sourceId: `${fileId}`,
@@ -153,6 +124,45 @@ export default function CalculateDuplicateFiles() {
 
 
         // 分析重复文件
+        const searchDuplicateFileRes =  await searchDuplicateFile({
+            sourceId: fileId || ''
+        })
+        /* 
+            [
+                {count: 6, hash: "3ba7bbfc03e3bed23bf066e2e9a6a5389dd33fd8637bc0220d9e6d642ccf5946", ids: "17,21,22,26,27,31", },
+                {count: 6, hash: "75b7c31709e1529be7bec1c8a40ec98edbda146a09904a5ffad8685da966f90b", ids: "19,23,24,25,29,30", },
+                {count: 3, hash: "7707b032ff2fea855a1bc22b7be536de13d3ad6d418cc7021893a97cf488e1a3", ids: "20,28,32", }
+            ]
+    
+    
+    
+            [
+                {
+                    count: 6, 
+                    hash: "3ba7bbfc03e3bed23bf066e2e9a6a5389dd33fd8637bc0220d9e6d642ccf5946", 
+                    paths: "/Users/sysadmin/Pictures/test/欧洲4_副本.jpeg,/Users/s…4.jpeg,/Users/sysadmin/Pictures/test/欧洲4_副本5.jpeg", 
+                    ids: "17,21,22,26,27,31", 
+                    times: "1718613803964,1718613804035,1718613804041,1718613804070,1718613804080,1718613804112"
+                },
+                {
+                    hash: "75b7c31709e1529be7bec1c8a40ec98edbda146a09904a5ffad8685da966f90b", 
+                    times: "1718613804012,1718613804051,1718613804057,1718613804063,1718613804094,1718613804104", 
+                    paths: "/Users/sysadmin/Pictures/test/欧洲2.jpeg,/Users/sysa…3.jpeg,/Users/sysadmin/Pictures/test/欧洲2_副本2.jpeg", 
+                    ids: "19,23,24,25,29,30", 
+                    count: 6
+                }
+                {
+                    times: "1718613804018,1718613804086,1718613804118", 
+                    ids: "20,28,32", 
+                    paths: "/Users/sysadmin/Pictures/test/欧洲1_副本2.jpeg,/Users/…洲1.jpeg,/Users/sysadmin/Pictures/test/欧洲1_副本.jpeg", 
+                    count: 3, 
+                    hash: "7707b032ff2fea855a1bc22b7be536de13d3ad6d418cc7021893a97cf488e1a3"
+                }
+            ] 
+    
+        */
+        console.log(747474, searchDuplicateFileRes);
+        if(searchDuplicateFileRes[0]) {}
       }
     }
   }

+ 6 - 0
src/plugins/tauri-plugin-file/file.ts

@@ -27,6 +27,12 @@ export class File {
       filePath: path,
     });
   }
+  
+  static async getInfo(path: string): Promise<any> {
+    return await invoke<string>("plugin:st-files|get_file_info_by_path", {
+      filePath: path,
+    });
+  }
 
   // async close(): Promise<boolean> {
   //     return await invoke('plugin:st-sqlite|close', { path: this.path })

+ 53 - 23
src/services/file-service.ts

@@ -270,7 +270,27 @@ export async function delSelectedFileHistory(path?: string) {
   }
 }
 
-export async function searchDuplicateFile({ sourceId }: { sourceId: string }) {
+/* 
+count: 6, 
+                    hash: "3ba7bbfc03e3bed23bf066e2e9a6a5389dd33fd8637bc0220d9e6d642ccf5946", 
+                    paths: "/Users/sysadmin/Pictures/test/欧洲4_副本.jpeg,/Users/s…4.jpeg,/Users/sysadmin/Pictures/test/欧洲4_副本5.jpeg", 
+                    ids: "17,21,22,26,27,31", 
+                    times: "1718613803964,1718613804035,1718613804041,1718613804070,1718613804080,1718613804112"
+*/
+type DuplicateFileInfo = {
+  count: number;
+  hash: string;
+  paths: string[];
+  ids: string[];
+};
+
+type SearchResult = [boolean, DuplicateFileInfo[] | string | unknown];
+
+export async function searchDuplicateFile({
+  sourceId,
+}: {
+  sourceId: string;
+}): Promise<SearchResult> {
   try {
     const DB = await Database.load(`sqlite:files_${sourceId}.db`);
     // 创建表
@@ -279,33 +299,24 @@ export async function searchDuplicateFile({ sourceId }: { sourceId: string }) {
     select * from search_files where sourceId = $1 in (select sourceId from search_files group by hash having count(hash) > 1)
  */
     // const res = await DB.select("SELECT * from search_files WHERE sourceId = $1", [sourceId]);
-    const res = await DB.select(
-      //       `SELECT sf.*
-      // FROM search_files sf
-      // JOIN (
-      //     SELECT hash
-      //     FROM search_files
-      //     WHERE sourceId = $1
-      //     GROUP BY hash
-      //     HAVING COUNT(*) > 1
-      // ) dup ON sf.hash = dup.hash
-      // WHERE sf.sourceId = $1;
-      // `,
+    const res: DuplicateFileInfo[] = await DB.select(
       `SELECT hash,
-        sourceId,
-        GROUP_CONCAT(id) AS ids, 
-        GROUP_CONCAT(path) AS paths,
-        GROUP_CONCAT(time) AS times,
-        COUNT(*) AS count
-    FROM search_files
-    WHERE sourceId = $1
-    GROUP BY hash
-    HAVING COUNT(*) > 1;
+       sourceId,
+       GROUP_CONCAT(id)    AS ids,
+       GROUP_CONCAT(path)  AS paths,
+       GROUP_CONCAT(time)  AS times,
+       COUNT(*)           AS count
+FROM search_files
+WHERE sourceId = $1
+  AND hash IS NOT NULL  
+  AND hash != "''"
+  AND hash != ""
+GROUP BY hash, sourceId
+HAVING COUNT(*) > 1;
 `,
       [sourceId]
     );
     console.log(285, res);
-
     return Promise.resolve([true, res]);
   } catch (err) {
     console.log(145, err);
@@ -315,3 +326,22 @@ export async function searchDuplicateFile({ sourceId }: { sourceId: string }) {
     return Promise.resolve([false, err]);
   }
 }
+
+export default async function get_progress_by_sourceId(
+  sourceId: string
+): Promise<any> {
+  const DB = await Database.load(`sqlite:files_${sourceId}.db`);
+  // 创建表
+  await DB.execute(createSql.search_files);
+
+  const res: DuplicateFileInfo[] = await DB.select(
+    `SELECT 
+    COUNT(*) AS total_entries,
+    COUNT(CASE WHEN sourceId = $1 THEN 1 ELSE NULL END) AS sourceId_1_count,
+    COUNT(CASE WHEN hash IS NULL OR hash = '' THEN 1 ELSE NULL END) AS hash_null_count
+FROM search_files;`,
+    [sourceId]
+  );
+
+  return res;
+}