我想我知道这是挂起因为我没有等待来自Main的异步调用导致死锁但是我不能让Main async所以如何解决这个问题?我将首先展示我试图并行化的程序然后我将展示我的并行化尝试。我认为很明显我正在尝试获得最快的程序来检查文件夹列表的大小(来自多个共享)。如果可以在更高级别进行并行化并将输出写入CSV中,则无序,但我对一次处理一个共享感到满意。我尝试了几个并行代码的派生。这只是我最新的,所以它可能比我之前的尝试更错误。只知道这不是我唯一的尝试。我目前正在研究c#中的并行化,并且可能会在某些方面解决这个问题,但是如果你能提供非常感激的见解
C:\cygwin64\bin\bash.exe
这是我尝试并行化。
namespace ShareSize
{
class Program
{
static long Size { get; set; }
static void Main(string[] args)
{
using (StreamReader sr = new StreamReader(args[0]))
{
while (!sr.EndOfStream)
{
share = sr.ReadLine().Trim(',');
Console.WriteLine(share);
string[] root = Directory.GetDirectories(share);
MeasureFolders(root);
MeasureFiles(Directory.GetFiles(share));
Console.WriteLine("SIZE = " + Size);
using (StreamWriter sw = new StreamWriter(args[1], true))
{
sw.WriteLine(share + "," + Size / 1073741824);
}
Size = 0;
}
}
Console.ReadLine();
}
private static MeasureFolders(string[] root)
{
MeasureFolder(root);
}
private static MeasureFolder(string[] directories)
{
foreach (string d in directories)
{
try
{
Console.WriteLine($"Measure Folder {d}");
string[] files = Directory.GetFiles(d);
string[] subDirectories = Directory.GetDirectories(d);
if (files.Length != 0)
MeasureFiles(files);
if (subDirectories.Length != 0)
MeasureFolder(subDirectories);
}
catch
{
;
}
}
}
private static void MeasureFiles(string[] files)
{
foreach (var f in files)
{
Size += new FileInfo(f).Length;
}
}
}
}
非常感谢。
答案 0 :(得分:0)
也许我错过了这一点,但上面看起来有点过于复杂。以下代码片段获取任何给定路径的目录大小。我以这样的方式编写代码,即将序列化代码与并行化代码进行比较更容易。但是要考虑的最重要的事情之一是:如果要并行收集数据,则可能需要提前分配内存(数组),或者锁定对象以确保没有并发访问(lock(){})。两者都在下面演示。
注意:
Parallel.Foreach
,lock() {}
和 public long GetDirectorySizesBytes(string root) {
long dirsize = 0;
string[] directories = Directory.GetDirectories(root);
string[] files = Directory.GetFiles(root);
if (files != null) {
dirsize += GetFileSizesBytes(files);
}
foreach(var dir in directories) {
var size = GetDirectorySizesBytes(dir);
dirsize += size;
}
return dirsize;
}
public long GetFileSizesBytes(string[] files) {
long[] fileSizes = new long[files.Length];
for(int i = 0; i < files.Length; i++) {
fileSizes[i] = new FileInfo(files[i]).Length;
}
return fileSizes.Sum();
}
语法 public long ParallelGetDirectorySizesBytes(string root) {
long dirsize = 0;
string[] directories = Directory.GetDirectories(root);
string[] files = Directory.GetFiles(root);
if (files != null) {
dirsize += ParallelGetFileSizesBytes(files);
}
Parallel.ForEach(directories, dir => {
var size = ParallelGetDirectorySizesBytes(dir);
lock (lockObject) { //static lockObject defined at top of class
dirsize += size;
}
});
return dirsize;
}
public long ParallelGetFileSizesBytes(string[] files) {
long[] fileSizes = new long[files.Length];
Parallel.For(0, files.Length, i => {
fileSizes[i] = new FileInfo(files[i]).Length;
});
return fileSizes.Sum();
}
[TestMethod]
public void GetDirectoriesSizesTest() {
var actual = GetDirectorySizesBytes(@"C:\Exchanges");
var parallelActual = ParallelGetDirectorySizesBytes(@"C:\Exchanges");
long expected = 25769767281;
Assert.AreEqual(expected, actual);
Assert.AreEqual(expected, parallelActual);
}
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace StackOverflowProjects.Tests {
[TestClass]
public class DirectorySizeTests {
public static object lockObject = new object();
[TestMethod]
public void GetDirectoriesSizesTest() {
var actual = GetDirectorySizesBytes(@"C:\Exchanges");
var parallelActual = ParallelGetDirectorySizesBytes(@"C:\Exchanges");
long expected = 25769767281;
Assert.AreEqual(expected, actual);
Assert.AreEqual(expected, parallelActual);
}
public long GetDirectorySizesBytes(string root) {
long dirsize = 0;
string[] directories = Directory.GetDirectories(root);
string[] files = Directory.GetFiles(root);
if (files != null) {
dirsize += GetFileSizesBytes(files);
}
foreach(var dir in directories) {
var size = GetDirectorySizesBytes(dir);
dirsize += size;
}
return dirsize;
}
public long GetFileSizesBytes(string[] files) {
long[] fileSizes = new long[files.Length];
for(int i = 0; i < files.Length; i++) {
fileSizes[i] = new FileInfo(files[i]).Length;
}
return fileSizes.Sum();
}
public long ParallelGetDirectorySizesBytes(string root) {
long dirsize = 0;
string[] directories = Directory.GetDirectories(root);
string[] files = Directory.GetFiles(root);
if (files != null) {
dirsize += ParallelGetFileSizesBytes(files);
}
Parallel.ForEach(directories, dir => {
var size = ParallelGetDirectorySizesBytes(dir);
lock (lockObject) {
dirsize += size;
}
});
return dirsize;
}
public long ParallelGetFileSizesBytes(string[] files) {
long[] fileSizes = new long[files.Length];
Parallel.For(0, files.Length, i => {
fileSizes[i] = new FileInfo(files[i]).Length;
});
return fileSizes.Sum();
}
}
}
head