将大型数据表数据导出到c#windows应用程序中的.csv文件

时间:2012-09-07 07:18:04

标签: c# mysql winforms

我需要将大DataTable(> 50 lacs(5M)DataRows)导出到.csv文件

我使用的是以下代码,但需要很长时间。

public void CreateCSVFile(DataTable dtDataTablesList, string strFilePath)
{
    // Create the CSV file to which grid data will be exported.
    StreamWriter sw = new StreamWriter(strFilePath, false);
    //First we will write the headers.
    int iColCount = dtDataTablesList.Columns.Count;
    for (int i = 0; i < iColCount; i++)
    {
        sw.Write(dtDataTablesList.Columns[i]);
        if (i < iColCount - 1)
        {
            sw.Write("", "");
        }
    }
    sw.Write(sw.NewLine);

    // Now write all the rows.
    foreach (DataRow dr in dtDataTablesList.Rows)
    {
        for (int i = 0; i < iColCount; i++)
        {
            if (!Convert.IsDBNull(dr[i]))
            {
                sw.Write(dr[i].ToString());
            }
            if (i < iColCount - 1)
            {
                sw.Write("", "");
            }
        }
        sw.Write(sw.NewLine);
    }
    sw.Close();
}

请让我知道另一种快速做法。

5 个答案:

答案 0 :(得分:3)

您可以考虑使用StringBuilder ,而不是一直调用StreamWriter.Write(..)。将所有字符串附加到Builder并且仅在磁盘上写一次

string filePath = @"e:\temp\test.csv";
string delimiter = ",";

#region init DataTable
DataTable dt = new DataTable();
dt.Columns.Add(new DataColumn("a", typeof(string)));
dt.Columns.Add(new DataColumn("b", typeof(string)));
dt.Columns.Add(new DataColumn("c", typeof(string)));
dt.Columns.Add(new DataColumn("d", typeof(string)));
dt.Columns.Add(new DataColumn("e", typeof(string)));
dt.Columns.Add(new DataColumn("f", typeof(string)));
dt.Columns.Add(new DataColumn("g", typeof(string)));
dt.Columns.Add(new DataColumn("h", typeof(string)));
dt.Columns.Add(new DataColumn("i", typeof(string)));
dt.Columns.Add(new DataColumn("j", typeof(string)));
dt.Columns.Add(new DataColumn("k", typeof(string)));
dt.Columns.Add(new DataColumn("l", typeof(string)));
dt.Columns.Add(new DataColumn("m", typeof(string)));
dt.Columns.Add(new DataColumn("n", typeof(string)));
dt.Columns.Add(new DataColumn("o", typeof(string)));
dt.Columns.Add(new DataColumn("p", typeof(string)));

for (int i = 0; i < 100000; i++)
{
    DataRow dr = dt.NewRow();
    for (int j = 0; j < dt.Columns.Count; j++)
    {
        dr[j] = "test" + i + " " + j;
    }
    dt.Rows.Add(dr);
}
#endregion

Stopwatch sw = new Stopwatch();
sw.Start();
StringBuilder sb = new StringBuilder();
foreach (DataRow dr in dt.Rows)
{
    sb.AppendLine(string.Join(delimiter, dr.ItemArray));
}
File.WriteAllText(filePath, sb.ToString());
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);
Console.ReadLine();

修改

100000行花了我271毫秒创建了一个大约18 MB的文件

正如@aiodintsov指出的那样,如果有几个MB 的数据,可能/将会出现使用StringBuilder 的问题。所以我根据他的评论创造了一个例子。为我工作得很好。在2685毫秒内导出 1 000 000行

Stopwatch sw = new Stopwatch();
sw.Start();
using (StreamWriter swr = 
         new StreamWriter(File.Open(filePath, FileMode.CreateNew), Encoding.Default, 1000000))
         // change buffer size and Encoding to your needs
{
    foreach (DataRow dr in dt.Rows)
    {
        swr.WriteLine(string.Join(delimiter, dr.ItemArray));
    }
}
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);

答案 1 :(得分:1)

考虑将OleDbConnection用于类似

的连接字符串
  

“Provider = Microsoft.Jet.OLEDB.4.0; Data Source = c:\ txtFilesFolder \; Extended Properties =”text; HDR = Yes; FMT = Delimited“;

more sample connections strings

CSV文件和分隔符有一些规则,您应该特别小心处理引号,制表符,逗号,换行符等字符。有关此类规则的详细信息,请参阅RFC4180

UPD:尝试增加文件流缓冲区:

using(var stream = new FileStream(path,FileMode.Create,FileAccess.Write,FileShare.None,4*1024*1024))
{
  // your code referencing stream in StreamWriter()
}

您还可以在StreamWriter构造函数中指定更大的缓冲区大小。 没有太多其他方法可以提高性能 - StreamWriter已经足够快,标准类型的ToString()非常好。我怀疑你在那里输出用户类型,但如果你这样做,请验证他们的ToString()方法是否足够有效。其他一切都超出了你的控制范围。

答案 2 :(得分:1)

这是我的最终解决方案。

使用此代码,我们可以在2分钟内将50万条记录导出到csv文件。 而不是数据表,我使用datareader。

private void button1_Click(object sender, EventArgs e)
    {

        Stopwatch swra = new Stopwatch();
        swra.Start();
        string NewconnectionString = "myCoonectionString";
        StreamWriter CsvfileWriter = new StreamWriter(@"D:\testfile.csv");
        string sqlselectQuery = "select * from Mytable";
        SqlCommand sqlcmd = new SqlCommand();

        SqlConnection spContentConn = new SqlConnection(NewconnectionString);
        sqlcmd.Connection = spContentConn;
        sqlcmd.CommandTimeout = 0;
        sqlcmd.CommandType = CommandType.Text;
        sqlcmd.CommandText = sqlselectQuery;
        spContentConn.Open();
        using (spContentConn)
        {
            using (SqlDataReader sdr = sqlcmd.ExecuteReader())
            using (CsvfileWriter)
            {
                //For getting the Table Headers
                DataTable Tablecolumns = new DataTable();

                for (int i = 0; i < sdr.FieldCount; i++)
                {
                    Tablecolumns.Columns.Add(sdr.GetName(i));
                }
                CsvfileWriter.WriteLine(string.Join(",", Tablecolumns.Columns.Cast<datacolumn>().Select(csvfile => csvfile.ColumnName)));
                //For table headers

                while (sdr.Read())
                //based on your columns
                    YourWriter.WriteLine(sdr[0].ToString() + "," + sdr[1].ToString() + "," + sdr[2].ToString() + "," + sdr[3].ToString() + "," + sdr[4].ToString() + "," + sdr[5].ToString() + "," + sdr[6].ToString() + "," + sdr[7].ToString() + "," + sdr[8].ToString() + "," + sdr[9].ToString() + "," + sdr[10].ToString() + "," + sdr[11].ToString() + ",");

            }
        }
       swra.Stop();
Console.WriteLine(swra.ElapsedMilliseconds);
}

谢谢大家。

答案 3 :(得分:0)

加速的一种可能方法是使用StringBuilder并在StringBuilder中附加1K记录数据,然后将其写入sw.Write();

所以你的逻辑应该是首先在SB中写入1000条记录然后在SW.Write中写入。

这肯定会提高性能。

如果你用10K增加1000记录并测试性能将会好得多。

希望这有帮助。

答案 4 :(得分:0)

我已经修改了一些代码,使用StringBuilder作为缓冲区。这需要更多的RAM,但应该更有效率。使用初始容量和最大容量来避免OutOfmemoryExceptions以及最大效率:

public void CreateFastCSVFile(DataTable table, string strFilePath)
{
    const int capacity = 5000000;
    const int maxCapacity = 20000000;

    //First we will write the headers.
    StringBuilder csvBuilder = new StringBuilder(capacity);
    csvBuilder.AppendLine(string.Join(",", table.Columns.Cast<DataColumn>().Select(c => c.ColumnName)));

    // Create the CSV file and write all from StringBuilder
    using (var sw = new StreamWriter(strFilePath, false))
    {
        foreach (DataRow dr in table.Rows)
        {
            if (csvBuilder.Capacity >= maxCapacity)
            {
                sw.Write(csvBuilder.ToString());
                csvBuilder = new StringBuilder(capacity);
            }
            csvBuilder.Append(String.Join(",", dr.ItemArray));
        }
        sw.Write(csvBuilder.ToString());
    }
}

这是一个简单的样本数据测量(10000000/100 lac DataRows)。

采样数据:

var TblData = new DataTable();
TblData.Columns.Add("FeeID", typeof(int));
TblData.Columns.Add("Amount", typeof(int));
TblData.Columns.Add("FeeItem", typeof(string));
TblData.Columns.Add("Type", typeof(char));
for (int i = 0; i < 1000000; i++)
{
    TblData.Rows.Add(9, 8500, "Admission Free", 'T');
    TblData.Rows.Add(9, 950, "Annual Fee", 'T');
    TblData.Rows.Add(9, 150, "Application Free", 'T');
    TblData.Rows.Add(9, 850, "Boy's Uniform", DBNull.Value);
    TblData.Rows.Add(9, 50, DBNull.Value, 'R');
    TblData.Rows.Add(10, 7500, "Admission Free", 'T');
    TblData.Rows.Add(11, 900, "Annual Fee", 'T');
    TblData.Rows.Add(11, 150, "Application Free", 'T');
    TblData.Rows.Add(11, 850, DBNull.Value, 'T');
    TblData.Rows.Add(11, 50, "Computer Free", 'R');
}
int rowCount = TblData.Rows.Count; // 10000000

测量(对于207 MB文件,不到30秒。似乎没问题):

var watch = new System.Diagnostics.Stopwatch();
watch.Start();
CreateFastCSVFile(TblData, @"C:\Temp\TestCSV.csv");
watch.Stop();
Console.Write("Elapsed: {0}", watch.Elapsed); // 00:00:26 for 207 MB CSV-file