我试图通过Entity Framework 6.1.3向MS Sql Server db插入大约50.000行,但这需要太长时间。我跟着this answer。在添加每1000个实体后禁用AutoDetectChangesEnabled并调用SaveChanges。它仍然需要大约7-8分钟。我尝试使用远程服务器和本地服务器。没有太大区别。我不认为这是正常的。我忘记了什么吗?
这是我的代码:
static void Main(string[] args)
{
var personCount = 50000;
var personList = new List<Person>();
var random = new Random();
for (int i = 0; i < personCount; i++)
{
personList.Add(new Person
{
CreateDate = DateTime.Now,
DateOfBirth = DateTime.Now,
FirstName = "Name",
IsActive = true,
IsDeleted = false,
LastName = "Surname",
PhoneNumber = "01234567890",
PlaceOfBirth = "Trabzon",
Value0 = random.NextDouble(),
Value1 = random.Next(),
Value10 = random.NextDouble(),
Value2 = random.Next(),
Value3 = random.Next(),
Value4 = random.Next(),
Value5 = random.Next(),
Value6 = "Value6",
Value7 = "Value7",
Value8 = "Value8",
Value9 = random.NextDouble()
});
}
MyDbContext context = null;
try
{
context = new MyDbContext();
context.Configuration.AutoDetectChangesEnabled = false;
int count = 0;
foreach (var entityToInsert in personList)
{
++count;
context = AddToContext(context, entityToInsert, count, 1000, true);
}
context.SaveChanges();
}
finally
{
if (context != null)
context.Dispose();
}
}
private static MyDbContext AddToContext(MyDbContext context, Person entity, int count, int commitCount, bool recreateContext)
{
context.Set<Person>().Add(entity);
if (count % commitCount == 0)
{
context.SaveChanges();
if (recreateContext)
{
context.Dispose();
context = new MyDbContext();
context.Configuration.AutoDetectChangesEnabled = false;
}
}
return context;
}
人员类:
public class Person
{
public int Id { get; set; }
[MaxLength(50)]
public string FirstName { get; set; }
[MaxLength(50)]
public string LastName { get; set; }
public DateTime DateOfBirth { get; set; }
[MaxLength(50)]
public string PlaceOfBirth { get; set; }
[MaxLength(15)]
public string PhoneNumber { get; set; }
public bool IsActive { get; set; }
public DateTime CreateDate { get; set; }
public int Value1 { get; set; }
public int Value2 { get; set; }
public int Value3 { get; set; }
public int Value4 { get; set; }
public int Value5 { get; set; }
[MaxLength(50)]
public string Value6 { get; set; }
[MaxLength(50)]
public string Value7 { get; set; }
[MaxLength(50)]
public string Value8 { get; set; }
public double Value9 { get; set; }
public double Value10 { get; set; }
public double Value0 { get; set; }
public bool IsDeleted { get; set; }
}
从分析器跟踪的查询:
exec sp_executesql N'INSERT [dbo].[Person]([FirstName], [LastName], [DateOfBirth], [PlaceOfBirth], [PhoneNumber], [IsActive], [CreateDate], [Value1], [Value2], [Value3], [Value4], [Value5], [Value6], [Value7], [Value8], [Value9], [Value10], [Value0], [IsDeleted])
VALUES (@0, @1, @2, @3, @4, @5, @6, @7, @8, @9, @10, @11, @12, @13, @14, @15, @16, @17, @18)
SELECT [Id]
FROM [dbo].[Person]
WHERE @@ROWCOUNT > 0 AND [Id] = scope_identity()',N'@0 nvarchar(50),@1 nvarchar(50),@2 datetime2(7),@3 nvarchar(50),@4 nvarchar(15),@5 bit,@6 datetime2(7),@7 int,@8 int,@9 int,@10 int,@11 int,@12 nvarchar(50),@13 nvarchar(50),@14 nvarchar(50),@15 float,@16 float,@17 float,@18 bit',@0=N'Name',@1=N'Surname',@2='2017-01-19 10:59:09.9882591',@3=N'Trabzon',@4=N'01234567890',@5=1,@6='2017-01-19 10:59:09.9882591',@7=731825903,@8=1869842619,@9=1701414555,@10=1468342767,@11=1962019787,@12=N'Value6',@13=N'Value7',@14=N'Value8',@15=0,65330243467041405,@16=0,85324223938083377,@17=0,7146566792925152,@18=0
我想用EF解决这个问题。我现在有很多选择。但我们假设没有其他机会。
这里的主要问题是,我使用了与我引用的答案相同的方法。它在191秒内插入560000个实体。但我只能在7分钟内插入50000。
答案 0 :(得分:8)
您已通过禁用AutoDetectChanges消除了ChangeTracker问题。
我通常建议使用这些解决方案之一:
请参阅:http://entityframework.net/improve-ef-add-performance
由于您已将AutoDectectChanges设置为false,因此制作多个批次不会真正改善或降低性能。
主要问题是Entity Framework为您需要插入的每个实体进行数据库往返。因此,如果您插入50,000个实体,则会执行50,000次数据库往返,即 INSANE 。
解决问题需要做的是减少数据库往返次数。
一种免费的方法是使用SqlBulkCopy:https://msdn.microsoft.com/en-us/library/system.data.sqlclient.sqlbulkcopy(v=vs.110).aspx
免责声明:我是Entity Framework Extensions
的所有者此库允许您执行场景所需的所有批量操作:
您将能够在几秒钟内插入50,000个实体。
实施例
// Easy to use
context.BulkSaveChanges();
// Easy to customize
context.BulkSaveChanges(bulk => bulk.BatchSize = 100);
// Perform Bulk Operations
context.BulkDelete(customers);
context.BulkInsert(customers);
context.BulkUpdate(customers);
// Customize Primary Key
context.BulkMerge(customers, operation => {
operation.ColumnPrimaryKeyExpression =
customer => customer.Code;
});