我的CSV大小为4GB,我需要将其导入SQL服务器。 我认为它有超过2000万行。如果有人能在不到一个小时的时间里找到我这样做的方法,我将非常感激。
我已经在做什么:
using (FileStream filestream = new FileStream(path, FileMode.Open, FileAccess.Read)) {
using (StreamReader reader = new StreamReader(filestream, Encoding.UTF8)) {
string line = "";
bool isHeader = true;
int counter = 0;
while ((line = reader.ReadLine()) != null) {
if (isHeader) {
model.Columns = line.Split(new string[] { "\t" }, StringSplitOptions.RemoveEmptyEntries);
isHeader = false;
continue;
} else {
if (Settings.Default.RonudSet != 0) {
LoadInDB(indicator,RoundDecimals(line));
} else {
LoadInDB(indicator, line);
}
}
cmd.ExecuteNonQuery();
counter++;
}
}
model.RowCount = counter;
model.ColumnsCount = model.Columns.Length;
}
}
return model;
}
我的Db上传方法:
public void LoadInDB(char indicator, string key) {
using (SqlConnection conn = new SqlConnection(Settings.Default.DBconnection)) {
conn.Open();
try {
SqlCommand cmd = new SqlCommand("dbo.LipperFilesTestingInsertFileRowKey", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@FileRowKey", SqlDbType.NVarChar, 100));
cmd.Parameters["@FileRowKey"].Value = key;
cmd.Parameters.Add(new SqlParameter("@targetTableIndicator", SqlDbType.NVarChar, 100));
cmd.Parameters["@targetTableIndicator"].Value = indicator;
cmd.ExecuteNonQuery();
} catch (SqlException sqlExc) {
MyLog.WriteToLog(sqlExc.Message,MyLog.Messages.Error);
}
}
}
答案 0 :(得分:0)
SqlConnection.BeginTransaction
。这将改善DB端的性能。如果没有事务,SQL Server将提交每个插入,这是一种浪费。BULK INSERT
能力。