我有一个包含900万到1300万条记录的文件。由于列表具有限制大小,我已将代码分成100万条记录的块并尝试插入记录。
以下是以块的形式读取和分解文件的代码:
public static void InsertBulkData(string file)
{
List<FileData> lstFileData = new List<FileData>();
using (FileStream fs = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (BufferedStream bs = new BufferedStream(fs))
{
using (StreamReader sr = new StreamReader(bs))
{
string line = string.Empty;
while ((line = sr.ReadLine()) != null)
{
FileData obj = new FileData();
obj.Property1= line.Substring(5, 9).Trim();
obj.Property2= line.Substring(19, 40).Trim();
lstFileData.Add(obj);
if (lstFileData.Count == 1000000)
{
InsertDataIntoDatabase(lstFileData);
lstFileData = null;
GC.Collect();
lstFileData = new List<FileData>();
}
}
}
}
}
}
现在我们进入循环9次。但是当我保存数据时,它只是第一次保存。休息8次就会引发异常。
private static void InsertDataIntoDatabase(List<FileData> lstFileData)
{
String query = @"INSERT INTO table
(
PrimaryCol,
Column1,
Column2
)
VALUES
(
primaryCol.NEXTVAL,
:Property1,
:Property2,
)";
using (OracleConnection Conn = new OracleConnection())
{
try
{
Conn.ConnectionString = ConfigurationManager.ConnectionStrings["connection"].ToString();
Conn.Open();
using (var command = Conn.CreateCommand())
{
command.CommandText = query;
command.CommandType = CommandType.Text;
command.BindByName = true;
// In order to use ArrayBinding, the ArrayBindCount property
// of OracleCommand object must be set to the number of records to be inserted
command.ArrayBindCount = lstFileData.Count;
command.Parameters.Add(":Property1", OracleDbType.Varchar2, lstFileData.Select(c => c.Property1).ToArray(), ParameterDirection.Input);
command.Parameters.Add(":Property2", OracleDbType.Varchar2, lstFileData.Select(c => c.Property2).ToArray(), ParameterDirection.Input);
command.ExecuteNonQuery();
Conn.Close();
}
}
catch (Exception ex)
{
Conn.Close();
Console.WriteLine("Exception is:" + ex.InnerException);
}
}
}
所以在第一次迭代之后,我们在command.ExecuteNonQuery()中得到错误。任何人都可以说明为什么以及可能的解决方案是什么?
答案 0 :(得分:0)
您的问题来自使用达到其最大大小的缓冲区。此外,如果使用连续插入,则会出现性能问题。您应该使用另一种方法(使用批量复制)。以下是示例代码:
public static void BulkCopy(string connectionString, DataTable dataTableToSend, string SQLTableName)
{
SqlConnection connexion = null;
try
{
connexion = new SqlConnection(connectionString);
BulkCopy(connexion, dataTableToSend, SQLTableName);
}
catch (Exception e)
{
throw;
}
finally
{
connexion?.Close();
connexion?.Dispose();
}
}
int Modulo=0;
//Create a datatable MyDataTable with same structure of your table in database
// you can do : select * from MyTable where 1<>1 with command for get structured dataset (and datatable)
using (System.IO.StreamReader file = new System.IO.StreamReader(fileName, Encoding.UTF8))
{
while ((Line = file.ReadLine()) != null)
{
counter++;
DataRow newrow = MyDataTable.NewRow();
//Set value
newrow["I_ID"] = counter;
newrow["L_NomFichier"] = FileObj.Name;
newrow["D_CreationFichier"] = FileObj.LastWriteTime;
newrow["L_Ligne"] = Line.DBNullIfNullOrEmpty();
newrow["D_DATCRE"] = newrow["D_DATMAJ"] = DateTime.Now;
newrow["C_UTICRE"] = newrow["C_UTIMAJ"] = GlobalParam.User;
MyDataTable.Rows.Add(newrow);
if (Modulo % 1000)
{
SqlHelper.BulkCopy(DBAccess.CONN_STRING, MyDataTable, "YOURTABLENAME");
MyDataTable.Rows.Clear();
}
}
SqlHelper.BulkCopy(DBAccess.CONN_STRING, MyDataTable, "YOURTABLENAME");
MyDataTable.Rows.Clear();
file.Close();
}