summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author魏曹先生 <1992414357@qq.com>2025-12-15 10:53:25 +0800
committer魏曹先生 <1992414357@qq.com>2025-12-15 10:53:25 +0800
commit240da2de7a9951c67bec0be9a3c4708f545ae210 (patch)
tree6229e064287e5b5f757b2ef40065b8147c982d68
parentb9976ec9f849f1830ef34823fe609d6fe4058f7f (diff)
Prevent lost file classification for erased directories
The lost files calculation now filters out paths that are already marked as erased, avoiding duplicate classification.
-rw-r--r--crates/vcs_data/src/data/local/workspace_analyzer.rs25
1 files changed, 13 insertions, 12 deletions
diff --git a/crates/vcs_data/src/data/local/workspace_analyzer.rs b/crates/vcs_data/src/data/local/workspace_analyzer.rs
index 599e4a3..a17063c 100644
--- a/crates/vcs_data/src/data/local/workspace_analyzer.rs
+++ b/crates/vcs_data/src/data/local/workspace_analyzer.rs
@@ -145,18 +145,6 @@ impl<'a> AnalyzeResult<'a> {
};
let file_relative_paths_ref: HashSet<&PathBuf> = file_relative_paths.iter().collect();
- // Files that exist in the local sheet but not in reality are considered lost
- let mut lost_files: HashSet<&PathBuf> = local_sheet_paths
- .difference(&file_relative_paths_ref)
- .cloned()
- .collect();
-
- // Files that exist in reality but not in the local sheet are recorded as newly created
- let mut new_files: HashSet<&PathBuf> = file_relative_paths_ref
- .difference(&local_sheet_paths)
- .cloned()
- .collect();
-
// Files that exist locally but not in remote
let mut erased_files: HashSet<PathBuf> = HashSet::new();
@@ -174,6 +162,19 @@ impl<'a> AnalyzeResult<'a> {
}
}
+ // Files that exist in the local sheet but not in reality are considered lost
+ let mut lost_files: HashSet<&PathBuf> = local_sheet_paths
+ .difference(&file_relative_paths_ref)
+ .filter(|&&path| !erased_files.contains(path))
+ .cloned()
+ .collect();
+
+ // Files that exist in reality but not in the local sheet are recorded as newly created
+ let mut new_files: HashSet<&PathBuf> = file_relative_paths_ref
+ .difference(&local_sheet_paths)
+ .cloned()
+ .collect();
+
// Calculate hashes for new files
let new_files_for_hash: Vec<PathBuf> = new_files
.iter()