6

我需要将大DataTable(> 50 lacs( 5M ) DataRows ) 导出到.csv文件

我正在使用下面的代码,但它需要很长时间。

public void CreateCSVFile(DataTable dtDataTablesList, string strFilePath)
{
    // Create the CSV file to which grid data will be exported.
    StreamWriter sw = new StreamWriter(strFilePath, false);
    //First we will write the headers.
    int iColCount = dtDataTablesList.Columns.Count;
    for (int i = 0; i < iColCount; i++)
    {
        sw.Write(dtDataTablesList.Columns[i]);
        if (i < iColCount - 1)
        {
            sw.Write("", "");
        }
    }
    sw.Write(sw.NewLine);

    // Now write all the rows.
    foreach (DataRow dr in dtDataTablesList.Rows)
    {
        for (int i = 0; i < iColCount; i++)
        {
            if (!Convert.IsDBNull(dr[i]))
            {
                sw.Write(dr[i].ToString());
            }
            if (i < iColCount - 1)
            {
                sw.Write("", "");
            }
        }
        sw.Write(sw.NewLine);
    }
    sw.Close();
}

请让我知道任何其他快速完成的方法。

4

5 回答 5

4

StreamWriter.Write(..)您可以考虑使用 StringBuilder而不是一直调用。将所有字符串附加到 Builder 并且只在磁盘上写入一次

string filePath = @"e:\temp\test.csv";
string delimiter = ",";

#region init DataTable
DataTable dt = new DataTable();
dt.Columns.Add(new DataColumn("a", typeof(string)));
dt.Columns.Add(new DataColumn("b", typeof(string)));
dt.Columns.Add(new DataColumn("c", typeof(string)));
dt.Columns.Add(new DataColumn("d", typeof(string)));
dt.Columns.Add(new DataColumn("e", typeof(string)));
dt.Columns.Add(new DataColumn("f", typeof(string)));
dt.Columns.Add(new DataColumn("g", typeof(string)));
dt.Columns.Add(new DataColumn("h", typeof(string)));
dt.Columns.Add(new DataColumn("i", typeof(string)));
dt.Columns.Add(new DataColumn("j", typeof(string)));
dt.Columns.Add(new DataColumn("k", typeof(string)));
dt.Columns.Add(new DataColumn("l", typeof(string)));
dt.Columns.Add(new DataColumn("m", typeof(string)));
dt.Columns.Add(new DataColumn("n", typeof(string)));
dt.Columns.Add(new DataColumn("o", typeof(string)));
dt.Columns.Add(new DataColumn("p", typeof(string)));

for (int i = 0; i < 100000; i++)
{
    DataRow dr = dt.NewRow();
    for (int j = 0; j < dt.Columns.Count; j++)
    {
        dr[j] = "test" + i + " " + j;
    }
    dt.Rows.Add(dr);
}
#endregion

Stopwatch sw = new Stopwatch();
sw.Start();
StringBuilder sb = new StringBuilder();
foreach (DataRow dr in dt.Rows)
{
    sb.AppendLine(string.Join(delimiter, dr.ItemArray));
}
File.WriteAllText(filePath, sb.ToString());
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);
Console.ReadLine();

编辑

100000 行花了我 271 毫秒并创建了一个大约 18 MB 的文件

正如@aiodintsov 指出的那样,如果有几 MB的数据,使用 StringBuilder 可能/将会出现问题。所以我根据他的评论创建了一个例子。对我来说工作得很好。在 2685 毫秒内导出了1 000 000 行

Stopwatch sw = new Stopwatch();
sw.Start();
using (StreamWriter swr = 
         new StreamWriter(File.Open(filePath, FileMode.CreateNew), Encoding.Default, 1000000))
         // change buffer size and Encoding to your needs
{
    foreach (DataRow dr in dt.Rows)
    {
        swr.WriteLine(string.Join(delimiter, dr.ItemArray));
    }
}
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);
于 2012-09-07T07:45:09.867 回答
2

我已经稍微修改了您的代码以使用StringBuilder作为缓冲区。这需要更多的 RAM,但应该更有效。使用初始容量和 MaxCapacity 来避免OutOfmemoryExceptions并获得最大效率:

public void CreateFastCSVFile(DataTable table, string strFilePath)
{
    const int capacity = 5000000;
    const int maxCapacity = 20000000;

    //First we will write the headers.
    StringBuilder csvBuilder = new StringBuilder(capacity);
    csvBuilder.AppendLine(string.Join(",", table.Columns.Cast<DataColumn>().Select(c => c.ColumnName)));

    // Create the CSV file and write all from StringBuilder
    using (var sw = new StreamWriter(strFilePath, false))
    {
        foreach (DataRow dr in table.Rows)
        {
            if (csvBuilder.Capacity >= maxCapacity)
            {
                sw.Write(csvBuilder.ToString());
                csvBuilder = new StringBuilder(capacity);
            }
            csvBuilder.Append(String.Join(",", dr.ItemArray));
        }
        sw.Write(csvBuilder.ToString());
    }
}

这是使用样本数据(10000000/100 lac DataRows)进行的简单测量。

样本数据:

var TblData = new DataTable();
TblData.Columns.Add("FeeID", typeof(int));
TblData.Columns.Add("Amount", typeof(int));
TblData.Columns.Add("FeeItem", typeof(string));
TblData.Columns.Add("Type", typeof(char));
for (int i = 0; i < 1000000; i++)
{
    TblData.Rows.Add(9, 8500, "Admission Free", 'T');
    TblData.Rows.Add(9, 950, "Annual Fee", 'T');
    TblData.Rows.Add(9, 150, "Application Free", 'T');
    TblData.Rows.Add(9, 850, "Boy's Uniform", DBNull.Value);
    TblData.Rows.Add(9, 50, DBNull.Value, 'R');
    TblData.Rows.Add(10, 7500, "Admission Free", 'T');
    TblData.Rows.Add(11, 900, "Annual Fee", 'T');
    TblData.Rows.Add(11, 150, "Application Free", 'T');
    TblData.Rows.Add(11, 850, DBNull.Value, 'T');
    TblData.Rows.Add(11, 50, "Computer Free", 'R');
}
int rowCount = TblData.Rows.Count; // 10000000

测量(小于 30 秒。对于 207 MB 文件似乎没问题):

var watch = new System.Diagnostics.Stopwatch();
watch.Start();
CreateFastCSVFile(TblData, @"C:\Temp\TestCSV.csv");
watch.Stop();
Console.Write("Elapsed: {0}", watch.Elapsed); // 00:00:26 for 207 MB CSV-file
于 2012-09-07T08:35:52.637 回答
1

考虑将 OleDbConnection 与连接字符串一起使用,例如

"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=c:\txtFilesFolder;Extended Properties="text;HDR=Yes;FMT=Delimited";

更多示例连接字符串

CSV 文件和分隔符有一些规则,您应该格外小心处理引号、制表符、逗号、换行符等字符。有关此类规则的更多详细信息,请参阅RFC4180

UPD:尝试增加文件流缓冲区:

using(var stream = new FileStream(path,FileMode.Create,FileAccess.Write,FileShare.None,4*1024*1024))
{
  // your code referencing stream in StreamWriter()
}

您还可以在 StreamWriter 构造函数中指定更大的缓冲区大小。没有什么可以做的来提高性能 - StreamWriter 已经足够快了,标准类型的 ToString() 非常好。我怀疑你在那里输出用户类型,但如果你这样做,请验证他们的 ToString() 方法是否足够有效。其他一切都超出了您的控制范围。

于 2012-09-07T07:27:52.880 回答
1

这是我对此的最终解决方案。

使用此代码,我们可以在 2 分钟内将 500 万条记录导出到 csv 文件。而不是这里的数据表,我使用了数据阅读器。

private void button1_Click(object sender, EventArgs e)
    {

        Stopwatch swra = new Stopwatch();
        swra.Start();
        string NewconnectionString = "myCoonectionString";
        StreamWriter CsvfileWriter = new StreamWriter(@"D:\testfile.csv");
        string sqlselectQuery = "select * from Mytable";
        SqlCommand sqlcmd = new SqlCommand();

        SqlConnection spContentConn = new SqlConnection(NewconnectionString);
        sqlcmd.Connection = spContentConn;
        sqlcmd.CommandTimeout = 0;
        sqlcmd.CommandType = CommandType.Text;
        sqlcmd.CommandText = sqlselectQuery;
        spContentConn.Open();
        using (spContentConn)
        {
            using (SqlDataReader sdr = sqlcmd.ExecuteReader())
            using (CsvfileWriter)
            {
                //For getting the Table Headers
                DataTable Tablecolumns = new DataTable();

                for (int i = 0; i < sdr.FieldCount; i++)
                {
                    Tablecolumns.Columns.Add(sdr.GetName(i));
                }
                CsvfileWriter.WriteLine(string.Join(",", Tablecolumns.Columns.Cast<datacolumn>().Select(csvfile => csvfile.ColumnName)));
                //For table headers

                while (sdr.Read())
                //based on your columns
                    YourWriter.WriteLine(sdr[0].ToString() + "," + sdr[1].ToString() + "," + sdr[2].ToString() + "," + sdr[3].ToString() + "," + sdr[4].ToString() + "," + sdr[5].ToString() + "," + sdr[6].ToString() + "," + sdr[7].ToString() + "," + sdr[8].ToString() + "," + sdr[9].ToString() + "," + sdr[10].ToString() + "," + sdr[11].ToString() + ",");

            }
        }
       swra.Stop();
Console.WriteLine(swra.ElapsedMilliseconds);
}

谢谢大家。

于 2012-09-14T09:39:40.137 回答
0

一种可能的加速方法是使用 StringBuilder 并将 1K 记录数据附加到 StringBuilder 中,然后将其写入 sw.Write();

所以你的逻辑应该是先在 SB 中写入 1000 条记录,然后在 SW.Write 中。

这肯定会提高性能。

如果你用 10K 增加 1000 条记录并测试性能会好得多。

希望这可以帮助。

于 2012-09-07T07:41:34.080 回答