快速校验和散列?

时间:2011-10-29 02:26:21

标签: c# optimization

编写一个简单的程序,可以在我的计算机上找到完全重复的文件,但它有点慢。有什么方法可以加快速度吗?

using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Security.Cryptography;

namespace DupeFinder
{
    class Program
    {
        static void Main(string[] args)
        {
            Console.WriteLine("Getting files...");
            var files = Directory.GetFiles(@"D:\Photos", "*", SearchOption.AllDirectories);
            var alg = new HMACMD5();
            var dict = new Dictionary<string,List<string>>();
            Console.WriteLine("Computing hashes...");
            int i=0;
            int cursorLeft = Console.CursorLeft;
            int cursorTop = Console.CursorTop;
            foreach(var fileName in files)
            {
                Console.SetCursorPosition(cursorLeft,cursorTop);
                Console.Write("Hashing file {0}/{1}", ++i, files.Length);
                using(var stream = new BufferedStream(File.OpenRead(fileName),1024*1024*5))
                {
                    var hash = alg.ComputeHash(stream);
                    var str = BitConverter.ToString(hash);
                    if (!dict.ContainsKey(str)) dict[str] = new List<string>();
                    dict[str].Add(fileName);
                }
            }
            Console.WriteLine();

            foreach(var dupe in dict.Where(p => p.Value.Count >= 2))
            {
                Console.WriteLine(string.Join(", ", dupe.Value));
            }

            Console.WriteLine("Done!");
            Console.ReadLine();
        }
    }
}

可能的优化:

  1. 首先避免将字节数组转换为字符串。我尝试了这个,但它不起作用,我猜是因为它使用引用等于而不是比较字节。
  2. 更快的哈希算法?
  3. 不同的流或缓冲区大小?我尝试做1024 ^ 3,这应该是一个兆字节,但如果有的话,这似乎会减慢它。
  4. 或者这只是一个固有的缓慢的事情?


    我记得那个字典可以接受IEqualityComparer,所以我可以编写自己的byte[]比较器。

    我在互联网上找到的很多算法都倾向于首先比较字节长度,这是我不需要做的,因为我知道它总是会有16个字节。他们也倾向于一次比较1个字节...但我在64位机器上,为什么不做8?

    using System;
    using System.Collections.Generic;
    using System.IO;
    using System.Linq;
    using System.Text;
    using System.Security.Cryptography;
    
    namespace DupeFinder
    {
        class Program
        {
            static void Main(string[] args)
            {
                Console.WriteLine("Getting files...");
                string dir = @"D:\Photos";
                var files = Directory.GetFiles(dir, "*", SearchOption.AllDirectories);
                var alg = new HMACMD5();
                var dict = new Dictionary<byte[], List<string>>(new Md5Comparer());
                Console.WriteLine("Computing hashes...");
                int i = 0;
                int cursorLeft = Console.CursorLeft;
                int cursorTop = Console.CursorTop;
                foreach (var fileName in files)
                {
                    Console.SetCursorPosition(cursorLeft, cursorTop);
                    Console.Write("Hashing file {0}/{1}", ++i, files.Length);
                    using (var stream = new BufferedStream(File.OpenRead(fileName), 1024 * 1024 * 5))
                    {
                        var hash = alg.ComputeHash(stream);
                        if (!dict.ContainsKey(hash)) dict[hash] = new List<string>();
                        dict[hash].Add(fileName);
                    }
                }
                Console.WriteLine();
    
                using (var sw = new StreamWriter(Path.Combine(dir, "duplicates.txt")))
                {
                    i = 0;
                    foreach (var dupe in dict.Where(p => p.Value.Count >= 2))
                    {
                        sw.WriteLine("Duplicate {0}", ++i);
                        foreach(var fn in dupe.Value)
                        {
                            sw.WriteLine("- {0}", fn);
                        }
                    }
                }
    
                Console.WriteLine("Done!");
                //Console.ReadLine();
            }
        }
    
        class Md5Comparer : IEqualityComparer<byte[]>
        {
            public bool Equals(byte[] x, byte[] y)
            {
                var xi = BitConverter.ToInt64(x, 0);
                var yi = BitConverter.ToInt64(y, 0);
                if (xi != yi) return false;
                xi = BitConverter.ToInt64(x, 8);
                yi = BitConverter.ToInt64(y, 8);
                return xi == yi;
            }
    
            public int GetHashCode(byte[] obj)
            {
                return obj[0];
            }
        }
    }
    

    不确定这是多快多了....我没有做任何基准测试,但它似乎没有任何慢。


    新代码,感谢@spender:

    using System;
    using System.Collections.Generic;
    using System.Diagnostics;
    using System.IO;
    using System.Linq;
    using System.Text;
    using System.Security.Cryptography;
    
    namespace DupeFinder
    {
        class Program
        {
            static void Main(string[] args)
            {
                var watch = Stopwatch.StartNew();
                const string dir = @"D:\Photos";
                var md5Comparer = new Md5Comparer();
    
                var dupeGroups = Directory.EnumerateFiles(dir, "*", SearchOption.AllDirectories)
                    .Select(fn => new FileInfo(fn))
                    .GroupBy(fi => fi.Length)
                    .Where(g => g.Count() > 1)
                    .SelectMany(g => g
                       .GroupBy(fi => GetHash(fi.FullName), md5Comparer)
                       .Where(g2 => g2.Count() > 1));
    
                using (var sw = new StreamWriter(Path.Combine(dir, "duplicates.txt")))
                {
                    int i = 0;
                    foreach (var dupeGroup in dupeGroups)
                    {
                        sw.WriteLine("Duplicate {0}", ++i);
                        foreach(FileInfo fi in dupeGroup)
                        {
                            sw.WriteLine("- {0}", fi.FullName);
                        }
                    }
                }
    
                Console.WriteLine("{0:0.000} seconds", watch.ElapsedMilliseconds / 1000d); // 22.068 seconds to process 10K files, 37 GB, 463 dupes
                Console.ReadLine();
            }
    
            static readonly HMACMD5 md5Hasher = new HMACMD5();
            public static byte[] GetHash(string fileName)
            {
                using(var stream = File.OpenRead(fileName))
                    return md5Hasher.ComputeHash(stream);
            }
        }
    
        class Md5Comparer : IEqualityComparer<byte[]>
        {
            public bool Equals(byte[] x, byte[] y)
            {
                var xi = BitConverter.ToInt64(x, 0);
                var yi = BitConverter.ToInt64(y, 0);
                if (xi != yi) return false;
                xi = BitConverter.ToInt64(x, 8);
                yi = BitConverter.ToInt64(y, 8);
                return xi == yi;
            }
    
            public int GetHashCode(byte[] obj)
            {
                return obj[0];
            }
        }
    }
    

    从360秒开始减少到22-70秒。相当好的改进!

1 个答案:

答案 0 :(得分:5)

你错过了大量的光学:如果尺寸不匹配......

此外,匹配哈希不保证匹配内容。

修改

确定。找到基于尺寸的傻瓜并不难,你可以检查它们是否真的是傻瓜:

例如:

var files = 
    Directory
        .GetFiles(
            @"C:\Users\Administrator\Downloads",
            "*", 
            SearchOption.AllDirectories);
var fileGroups=
    files
        .Select(fn => new FileInfo(fn))
        .GroupBy(fi => fi.Length)
        .Where(g => g.Count()>1);

您可以通过为给定文件名

创建散列函数来进一步实现
int GetHash(fileName)  //fill in your definition

则...

var fg2 =
        fileGroups
            .SelectMany(
                g => g
                    .GroupBy(fi => GetHash(fi.FullName))
                    .Where(gg=>gg.Count()>1));

foreach(var dupeGroup in fg2)
{
    //everything in this group is probably duplicate
    //although you'd need to do byte by byte to be sure
    foreach(FileInfo dupe in dupeGroup)
    {

    }
}

通过执行此操作,您将大大减少所需的哈希量,因为您已根据大小预先筛选了候选项。