我一直试图弄清楚这几个小时,但却无法理解为什么这不起作用。我有一个TPL Dataflow批处理块,它接受我要上传的一批文件。我需要能够使用取消令牌,所以我使用的是UploadAsync方法。我希望将批处理中的每个作业彼此并行处理,但我只想一次处理一个批处理。
var actionBlock = new ActionBlock<TInput>(input =>
{
DoWork(input);
},
new ExecutionDataflowBlockOptions
{
TaskScheduler = new CustomScheduler(1, ApartmentState.MTA, ThreadPriority.Normal)
});
void DoWork(TInput input)
{
var castedMessage = input as Tuple<string, >[];
if (castedMessage == null) return;
ProcessBatch(castedMessage);
}
public void ProcessBatch(Tuple<string, string>[] batch)
{
if (batch == null) return;
Task<Tuple<bool, long>>[] batchUploadTaskArray = new Task<Tuple<bool, long>>[batch.Length * 2];
int taskArrayIndex = 0;
foreach(var job in batch)
{
if (job == null) return;
var chunkSegment = job.Item1;
var indexSegment = job.Item2;
var uploadTask1 = uploader.putFileAsync(job.Item1.LocalFilePath, job.Item1.RemoteChunkFilePath);
uploadTask1.ContinueWith(task =>
{
if(task.Result.Item1)
UploaderStatsManager.Instance.UpdateUploadDuration(task.Result.Item2);
}, TaskScheduler.Default
);
batchUploadTaskArray[taskArrayIndex++] = uploadTask1;
batchUploadTaskArray[taskArrayIndex++] = uploader.putFileAsync(idxLocalFilepath, indexSegment.RemoteIndexFilePath);
}
Task.WaitAll(batchUploadTaskArray);
Console.WriteLine("Done uploading");
}
//Method A - AWS uploader class
CancellationTokenSource cts = new CancellationTokenSource();
public async override Task<Tuple<bool, long>> putFileAsync(string filePath, string destPath)
{
var sw = new Stopwatch();
using (var s3Client = new AmazonS3Client(mAccessKeyId, mSecretAccessKeyId, new AmazonS3Config
{
RegionEndpoint = !String.Equals(mRegion.DisplayName, "Unknown") ? mRegion : RegionEndpoint.USEast1,
}))
{
using (var transferUtility = new TransferUtility(s3Client))
{
using (var file = File.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.Read))
{
var transferRequest = new TransferUtilityUploadRequest
{
Key = destPath.TrimStart('/'),
InputStream = file,
BucketName = mBucketName,
CannedACL = S3CannedACL.PublicRead
};
transferRequest.UploadProgressEvent += DisplayFileProgress;
var fInfo = new FileInfo(filePath);
sw.Start();
await transferUtility.UploadAsync(transferRequest, cts.Token).ContinueWith((task) => sw.Stop());
}
}
}
return new Tuple<bool, long>(true, sw.ElapsedMilliseconds);
}
//Method B - Alternate FTP class
public Task<Tuple<bool, long>> putFileAsync(string filePath, string destPath)
{
return Task<Tuple<bool, long>>.Run(() =>
{
var sw = Stopwatch.StartNew();
var putRC = ftp.putFile(filePath, destPath);
sw.Stop();
return new Tuple<bool, long>(putRC, sw.ElapsedMilliseconds);
});
}
//uploader stats class
public class UploaderStatsManager
{
public ConcurrentQueue<double> uploadDurationQueue;
private static UploaderStatsManager instance;
private UploaderStatsManager()
{
uploadDurationQueue = new ConcurrentQueue<double>();
}
public static UploaderStatsManager Instance
{
get
{
lock (statsLock)
{
if (instance == null)
instance = new UploaderStatsManager();
return instance;
}
}
}
internal void UpdateUploadDuration(double uploadDuration)
{
double durationOut = 0.0;
bool rc = true;
if(uploadDurationQueue.Count >= 12)
rc = uploadDurationQueue.TryDequeue(out durationOut);
if(rc)
uploadDurationQueue.Enqueue(uploadDuration);
}
}
我还使用另一个框架来放置不提供异步文件的FTP文件。如果我使用方法A,则第一个批处理进程将挂起在Task.WaitAll上。如果我使用方法B,一次只处理一个批处理,批处理不会挂起在Task.WaitAll。我试图将ProcessBatch方法更改为异步方法并使用“await Task.WhenAll”而不是Task.WaitAll,这适用于MethodA和MethodB,但它不会阻止批处理并行处理。
感谢您的帮助!
答案 0 :(得分:0)
我想出来了。在putFileAsync方法中,uploadAsync返回一个Task。我只是在该任务上调用Task.WaitAll并从方法头中删除async关键字。
public override Task<Tuple<bool, long>> putFileAsync(string filePath, string destPath)
{
return Task<Tuple<bool, long>>.Run(() =>
{
var sw = new Stopwatch();
using (var s3Client = new AmazonS3Client(mAccessKeyId, mSecretAccessKeyId, new AmazonS3Config
{
RegionEndpoint = !String.Equals(mRegion.DisplayName, "Unknown") ? mRegion : RegionEndpoint.USEast1,
}))
{
using (var transferUtility = new TransferUtility(s3Client))
{
using (var file = File.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.Read))
{
var transferRequest = new TransferUtilityUploadRequest
{
Key = destPath.TrimStart('/'),
InputStream = file,
BucketName = mBucketName,
CannedACL = S3CannedACL.PublicRead
};
transferRequest.UploadProgressEvent += DisplayFileProgress;
var fInfo = new FileInfo(filePath);
sw.Start();
Task uploadTask = transferUtility.UploadAsync(transferRequest, cts.Token).ContinueWith((task)=>sw.Stop());
Task.WaitAll(uploadTask);
}
}
}
return new Tuple<bool, long>(true, sw.ElapsedMilliseconds);
});
}