我正在使用SQL Server 2012并拥有一个大约20 GB大小的文件。我想将文件中的每条记录插入数据库。我为此目的使用SqlBulkCopy
类。但是,由于数据的大小非常庞大,我将不得不逐个插入数据。这是代码:
String line;
SqlConnection conn = new SqlConnection(ConfigurationManager.ConnectionStrings["conStrtingName"].ConnectionString);
conn.Open();
StreamReader readFile = new StreamReader(filePath);
SqlTransaction transaction = conn.BeginTransaction();
try
{
SqlBulkCopy copy = new SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity, transaction);
copy.BulkCopyTimeout = 600;
copy.DestinationTableName = "Txn";
int counter = 0;
while ((line = readFile.ReadLine()) != null)
{
string[] fields = line.Split('\t');
if (fields.Length == 3)
{
DateTime date = Convert.ToDateTime(fields[0]);
decimal txnCount = Convert.ToDecimal(fields[1]);
string merchantName = fields[2];
if (!string.IsNullOrEmpty(merchantName))
{
long MerchantId = Array.IndexOf(Program.merchantArray, merchantName) + 1;
tables[workerId].Rows.Add(MerchantId, date, txnCount);
counter++;
if (counter % 100000 == 0)
Console.WriteLine("Worker: " + workerId + " - Transaction Records Read: " + counter);
if (counter % 1000000 == 0)
{
copy.WriteToServer(tables[workerId]);
transaction.Commit();
tables[workerId].Rows.Clear();
//transaction = conn.BeginTransaction();
Console.WriteLine("Worker: " + workerId + " - Transaction Records Inserted: " + counter);
}
}
}
}
Console.WriteLine("Total Transaction Records Read: " + counter);
if (tables[workerId].Rows.Count > 0)
{
copy.WriteToServer(tables[workerId]);
transaction.Commit();
tables[workerId].Rows.Clear();
Console.WriteLine("Worker: " + workerId + " - Transaction Records Inserted: " + counter);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
transaction.Rollback();
}
finally
{
conn.Close();
}
适用于前100000条记录。但是对于下一组记录,我得到例外The transaction is either not associated with the current connection or has been completed.
当控件到达下一组记录的transaction.Commit();
时,会发生这种情况。
我可以找到解决方法吗?
答案 0 :(得分:2)
问题是提交事务后的注释行。您需要取消注释,以及重新初始化您的SqlBulkCopy copy
变量。你最好重构你的代码,你需要事务和复制对象的唯一地方是当你刷新你正在填充的数据表时,就像这样(你可以进一步将重复部分分解成一个单独的方法):
String line;
SqlConnection conn = new SqlConnection(ConfigurationManager.ConnectionStrings["conStrtingName"].ConnectionString);
conn.Open();
StreamReader readFile = new StreamReader(filePath);
SqlTransaction transaction = null;
try
{
int counter = 0;
while ((line = readFile.ReadLine()) != null)
{
string[] fields = line.Split('\t');
if (fields.Length == 3)
{
DateTime date = Convert.ToDateTime(fields[0]);
decimal txnCount = Convert.ToDecimal(fields[1]);
string merchantName = fields[2];
if (!string.IsNullOrEmpty(merchantName))
{
long MerchantId = Array.IndexOf(Program.merchantArray, merchantName) + 1;
tables[workerId].Rows.Add(MerchantId, date, txnCount);
counter++;
if (counter % 100000 == 0)
Console.WriteLine("Worker: " + workerId + " - Transaction Records Read: " + counter);
if (counter % 1000000 == 0)
{
transaction = conn.BeginTransaction()
SqlBulkCopy copy = new SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity, transaction);
copy.BulkCopyTimeout = 600;
copy.DestinationTableName = "Txn";
copy.WriteToServer(tables[workerId]);
transaction.Commit();
transaction = null;
tables[workerId].Rows.Clear();
Console.WriteLine("Worker: " + workerId + " - Transaction Records Inserted: " + counter);
}
}
}
}
Console.WriteLine("Total Transaction Records Read: " + counter);
if (tables[workerId].Rows.Count > 0)
{
transaction = conn.BeginTransaction()
SqlBulkCopy copy = new SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity, transaction);
copy.BulkCopyTimeout = 600;
copy.DestinationTableName = "Txn";
copy.WriteToServer(tables[workerId]);
transaction.Commit();
transaction = null;
tables[workerId].Rows.Clear();
Console.WriteLine("Worker: " + workerId + " - Transaction Records Inserted: " + counter);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
if (transaction != null) transaction.Rollback();
}
finally
{
conn.Close();
}
问题是,现在你无法在出现问题时回滚所有更改。可能更好的解决方案是不手动拆分批量插入,而是使用某种IDataReader
实现来避免在内存中填充巨大的DataTable
(例如使用Marc Gravell的{ {3}})。
答案 1 :(得分:1)
您的交易每100000套提交一次。所以它已经过了#34;你必须先使用transaction = conn.BeginTransaction启动另一个。
可能很好的重做代码以更好地反映事务的生命周期。你也可以确保"复制"使用新事务重新创建。
答案 2 :(得分:0)
您可以像这样增加事务的超时(使用适合预期事务长度的值)。以下代码为15分钟:Source
using (TransactionScope scope =
new TransactionScope(TransactionScopeOption.Required,
new System.TimeSpan(0, 15, 0)))
{
// working code here
}