我正在尝试通过将多个集合合并为一个(下面的代码段中的callQuery)然后从合并集合(dataCopy)更新另一个列表来使用LINQ来完成大型操作。
代码以这种方式构造的原因是在内存中处理这些计算(尽管其中一些可能更适合作为数据库操作),因为数据来自SQL和NoSQL DB源。
它有效,但它的速度非常慢。完成以下大小的列表需要10秒钟以上:
我是否完全错了这个或者我的代码中有什么东西可以解决这个问题?
dataCopy.ForEach(dc =>
{
//Apply Cost and Rate to each Hour:Value pair
dc.HourValuePairs.ForEach(dchv => {
var matchQuery =
(
//If there were holidays...
holidayStructureList != null
?
//If the day is a holiday
(holidayStructureList.Select(h => h.Date.Date).Contains(dc.Date.Date))
?
//Then ensure that the IsHoliday flag is set
callQuery.Where(
cq => cq.Date.Date == dc.Date.Date && cq.PDay.ToLower() == "holiday"
&& dc.Date >= cq.StartDate && dc.Date <= cq.EndDate
&& dchv.h >= cq.ItemStartHour && dchv.h <= cq.ItemEndHour
&& cq.IsHoliday == true
)
:
//Else, it's a regular day
callQuery.Where(
cq => cq.Date.Date == dc.Date.Date && cq.PDay.ToLower() == dc.Date.DayOfWeek.ToString().ToLower()
&& dc.Date >= cq.StartDate && dc.Date <= cq.EndDate
&& dchv.h >= cq.ItemStartHour && dchv.h <= cq.ItemEndHour
&& cq.IsHoliday == false
)
:
//No holidays found in the collection, all days are non-holidays
callQuery.Where(
cq => cq.Date.Date == dc.Date.Date && cq.PDay.ToLower() == dc.Date.DayOfWeek.ToString().ToLower()
&& dc.Date >= cq.StartDate && dc.Date <= cq.EndDate
&& dchv.h >= cq.ItemStartHour && dchv.h <= cq.ItemEndHour
&& cq.IsHoliday == false
)
);
dchv.Type = matchQuery.Select(x => x.TypeIndexes).FirstOrDefault();
dchv.Cost = decimal.Round(matchQuery.Select(x => x.Rate).FirstOrDefault() * (dchv.value == null ? 0 : Convert.ToDecimal(dchv.value)), 2);
dchv.Label = matchQuery.Select(x => x.Label).FirstOrDefault();
});
//Apply Type Grouping
var types = dc.HourValuePairs.GroupBy(x => x.Type).Select(y => new
{
Index = y.Key,
Value = y.Sum(z => z.value)
});
dc.ValueType1 = types.Where(x => x.TypeId == 1).Select(x => x.Value).FirstOrDefault() ?? 0m;
dc.ValueType2 = types.Where(x => x.TypeId == 2).Select(x => x.Value).FirstOrDefault() ?? 0m;
dc.ValueType2 = types.Where(x => x.TypeId == 3).Select(x => x.Value).FirstOrDefault() ?? 0m;
});
感谢您的帮助!
修改
根据Omada的评论,我尝试了以下内容,将查询执行时间降低到1.25秒。
dataCopy.ForEach(dc =>
{
var callQueryWithHolidays = callQuery.Where(
cq => cq.Date.Date == dc.Date.Date && cq.TOUProfileDay.ToLower() == "holiday"
&& dc.Date >= cq.EffectiveDate && dc.Date <= cq.EndDate
&& cq.IsHoliday == true
).ToList();
var callQueryWithoutHolidays = callQuery.Where(
cq => cq.Date.Date == dc.Date.Date && cq.TOUProfileDay.ToLower() == dc.Date.DayOfWeek.ToString().ToLower()
&& dc.Date >= cq.EffectiveDate && dc.Date <= cq.EndDate
&& cq.IsHoliday == false
).ToList();
//Apply Cost and Rate to each Hour:Value pair
dc.HourValuePairs.ForEach(dchv => {
var matchQuery =
(
//If there were holidays...
holidayStructureList != null
?
//If the day is a holiday
(holidayStructureList.Select(h => h.Date.Date).Contains(dc.Date.Date))
?
//Then ensure that the IsHoliday flag is set
callQueryWithHolidays.Where(
cq => dchv.h >= cq.ItemStartHour && dchv.h <= cq.ItemEndHour
)
:
//Else, it's a regular day
callQueryWithoutHolidays.Where(
cq => dchv.h >= cq.ItemStartHour && dchv.h <= cq.ItemEndHour
)
:
//No holidays found in the collection, all days are non-holidays
callQueryWithoutHolidays.Where(
cq => dchv.h >= cq.ItemStartHour && dchv.h <= cq.ItemEndHour
)
);
dchv.Type = matchQuery.Select(x => x.TypeIndexes).FirstOrDefault();
dchv.Cost = decimal.Round(matchQuery.Select(x => x.Rate).FirstOrDefault() * (dchv.value == null ? 0 : Convert.ToDecimal(dchv.value)), 2);
dchv.Label = matchQuery.Select(x => x.Label).FirstOrDefault();
});
//Apply Type Grouping
var types = dc.HourValuePairs.GroupBy(x => x.Type).Select(y => new
{
Index = y.Key,
Value = y.Sum(z => z.value)
});
dc.ValueType1 = types.Where(x => x.TypeId == 1).Select(x => x.Value).FirstOrDefault() ?? 0m;
dc.ValueType2 = types.Where(x => x.TypeId == 2).Select(x => x.Value).FirstOrDefault() ?? 0m;
dc.ValueType2 = types.Where(x => x.TypeId == 3).Select(x => x.Value).FirstOrDefault() ?? 0m;
});
答案 0 :(得分:0)
如上面的评论:
在您的
callQuery.Where
来电中,您正在过滤 是否是假期。你可以事先在外面做dataCopy.ForEach
循环并制作两个已过滤的列表。那你就不会 必须遍历内循环中的所有792。
这似乎对你有用! :)如果你需要低于1.25秒,你可能需要做@Alexei Levenkov建议并开始制作你的数据字典。