#Find dupfiles in given directory.
#mv dupfiles into sub dir [dupfiles],and generate the report.
#Run [powershell -ExecutionPolicy Bypass -File mv_dupf.ps1]


# Set current directory
$currentDir = "E:\backup"
if (-not $currentDir) {
    $currentDir = Get-Location
}

# Create dupfiles directory
$dupDir = Join-Path -Path $currentDir -ChildPath "dupfiles"
if (-not (Test-Path -Path $dupDir)) {
    New-Item -ItemType Directory -Path $dupDir | Out-Null
    Write-Host "Directory created: $dupDir"
}

# Get all files and group by size
Write-Host "Scanning files..."
# 修改：移除-Recurse参数，只处理当前目录
$allFiles = Get-ChildItem -Path $currentDir -File | Where-Object { $_.FullName -notlike "$dupDir*" }
$sizeGroups = $allFiles | Group-Object -Property Length | Where-Object { $_.Count -gt 1 }

Write-Host "Found $($sizeGroups.Count) groups of same-size files"

# Initialize result array
$duplicateFiles = @()

# Calculate MD5 hashes for same-size groups
foreach ($sizeGroup in $sizeGroups) {
    $files = $sizeGroup.Group
    $hashGroups = @{}
    
    foreach ($file in $files) {
        try {
            $md5 = [System.Security.Cryptography.MD5]::Create()
            $fileStream = [System.IO.File]::OpenRead($file.FullName)
            $hash = [System.BitConverter]::ToString($md5.ComputeHash($fileStream)).Replace("-", "")
            $fileStream.Close()
            
            if ($hashGroups.ContainsKey($hash)) {
                $hashGroups[$hash] += $file
            } else {
                $hashGroups[$hash] = @($file)
            }
        } catch {
            Write-Warning "Failed to compute hash for $($file.FullName): $_"
        }
    }
    
    # Process duplicate files
    foreach ($hash in $hashGroups.Keys) {
        $duplicates = $hashGroups[$hash]
        if ($duplicates.Count -gt 1) {
            Write-Host "Found duplicate group (Hash: $hash), File count: $($duplicates.Count)"
            
            # Keep first file and move others
            $keepFile = $duplicates[0]
            
            # Move duplicate files
            for ($i = 1; $i -lt $duplicates.Count; $i++) {
                $dupFile = $duplicates[$i]
                $relativePath = $dupFile.FullName.Replace($currentDir, "").TrimStart("\")
                $newPath = Join-Path -Path $dupDir -ChildPath $relativePath
                
                # 创建目标目录结构
                $newDir = Split-Path -Path $newPath -Parent
                if (-not (Test-Path -Path $newDir)) {
                    New-Item -ItemType Directory -Path $newDir -Force | Out-Null
                }
                
                try {
                    Move-Item -Path $dupFile.FullName -Destination $newPath -Force
                    Write-Host "Moved: $($dupFile.FullName)"
                } catch {
                    Write-Warning "Failed to move $($dupFile.FullName): $_"
                }
            }
        }
    }
}

# Output report
if ($duplicateFiles.Count -gt 0) {
    $reportPath = Join-Path -Path $dupDir -ChildPath "dup.csv"
    $duplicateFiles | Export-Csv -Path $reportPath -NoTypeInformation -Encoding UTF8
    Write-Host "Report generated: $reportPath"
    Write-Host "Found $($duplicateFiles | Where-Object { -not $_.IsKept } | Measure-Object).Count duplicate files"
} else {
    Write-Host "No duplicate files found"
}
    