我正在使用一个restful api,每次调用最多会返回50条记录,如果你需要更多,你必须创建多个调用并传递一个偏移量。
有些时候我们要求所有'要加载的结果,我们使用的东西类似于下面的代码 - 这会一个接一个地请求并将结果添加到列表中,当达到最大值或任何一个调用中返回的金额小于要求金额。
如何重构这个(使用任务/并行/线程)在任何时候使用多个请求加载此数据并仍然得到完全相同的结果,我已经考虑创建多个Tasks
并等待它们但是问题是要加载的记录数量是未知的,直到“不再可用”为止。或达到最大值
public IEnumerable<T> GetItems(int maxAmount = -1)
{
var moreData = true;
var result = new List<T>();
var counter = 0;
var batchAmount = 50;
while(moreData)
{
var requestAmount = Math.Min(batchAmount,maxAmount-result.Count);
var items = GetItemsFromService<T>(requestAmount,counter);
counter += items.Count;
moreData = items.Count == requestAmount && (maxAmount == -1 || maxAmount> items.Count);
result.AddRange(items);
}
return result;
}
private IEnumerable<T> GetItemsFromService(int batchAmount,int offset)
{
//Lets assume that this gets data from a rest service that returns a maximum of batchAmount
//and offsets using the offset variable.
}
答案 0 :(得分:0)
很遗憾,您无法在此处使用async
,因为您依赖于之前请求中的项目数。这必须是同步的,除非您想对您收到的数据进行一些异步操作。
它必须是一个设计糟糕的API,它返回分页结果而没有总页数或项目总数。
答案 1 :(得分:0)
我设法让这个工作,基本上我一直发送分页请求,直到其中一个请求没有回来 - 因为它们是按顺序启动的,一旦响应回来没有我们不需要再做任何请求,只是允许现有的请求完成。
我的工作代码看起来像这样。
private IEnumerable<object> GetEntitiesInParallel(Type type, string apiPath, Dictionary<string, string> parameters, int startPosition, int maxAmount)
{
var context = new TaskThreadingContext(maxAmount, startPosition);
var threads = Enumerable.Range(0, NumberOfThreads).Select(i =>
{
var task = Task.Factory.StartNew(() =>
{
while (context.Continue)
{
var rawData = String.Empty;
var offset = context.NextAmount();
var result = GetEntitiesSingleRequest(type, parameters, offset, apiPath, out rawData);
if (result.Any())
{
context.AddResult(result.Cast<object>(), rawData);
}
else
{
context.NoResult();
}
}
});
return task;
}).ToArray();
Task.WaitAll(threads);
var results = context.GetResults<object>();
return results;
}
private IEnumerable<object> GetEntitiesSingleRequest(Type type,Dictionary<string,string> parameters,
int offset,string apiPath, out string rawData)
{
var request = Utility.CreateRestRequest(apiPath, Method.GET,ApiKey,50,offset,parameters);
type = typeof(List<>).MakeGenericType(type);
var method = Client.GetType().GetMethods().Single(m => m.IsGenericMethod && m.Name == "Execute").MakeGenericMethod(type);
try
{
dynamic response = (IRestResponse)method.Invoke(Client, new object[] { request });
var data = response.Data as IEnumerable;
var dataList = data.Cast<object>().ToList();
rawData = response.Content.Replace("\n", Environment.NewLine);
return dataList.OfType<object>().ToList();
}
catch (Exception ex)
{
if (ex.Message.IndexOf("404") != -1)
{
rawData = null;
return Enumerable.Empty<object>();
}
throw;
}
}
private class TaskThreadingContext
{
private int batchAmount = 50;
private object locker1 = new object();
private object locker2 = new object();
private CancellationTokenSource tokenSource;
private CancellationToken token;
private volatile bool cont = true;
private volatile int offset = 0;
private volatile int max = 0;
private volatile int start = 0;
private List<object> result = new List<object>();
private List<string> raw = new List<string>();
public bool Continue { get { return cont; } }
public TaskThreadingContext(int maxRows = 0,int startPosition = 0)
{
max = maxRows;
offset = start = startPosition;
}
public int NextAmount()
{
lock(locker1)
{
var ret = offset;
var temp = offset + batchAmount;
if (temp - start > max && max > 0)
{
temp = max - offset;
}
offset = temp;
if (offset - start >= max && max > 0)
{
cont = false;
}
return ret;
}
}
public TaskThreadingContext()
{
tokenSource = new CancellationTokenSource();
token = tokenSource.Token;
}
public void AddResult(IEnumerable<object> items,string rawData)
{
lock(locker2)
{
result.AddRange(items);
raw.Add(rawData);
}
}
public IEnumerable<T> GetResults<T>()
{
return this.result.Cast<T>().ToList();
}
public void NoResult()
{
cont = false;
}
}