我们在 C# 中使用 SqlBulk Copy 类。在 sql 中插入批量数据。我们有一个包含 1000 万条记录的表。
我们在一个循环中插入一批 10,000 个数据
我们正面临物理内存问题。内存增加而不是减少。
下面是我们的代码。使用 sql 大容量复制时我们如何释放内存,或者还有其他方法可以进行大容量插入。
using (System.Data.SqlClient.SqlBulkCopy bulkCopy = new System.Data.SqlClient.SqlBulkCopy(SQlConn,SqlBulkCopyOptions.TableLock,null))
{
//bulkCopy = new System.Data.SqlClient.SqlBulkCopy(SQlConn);
bulkCopy.DestinationTableName = DestinationTable;
bulkCopy.BulkCopyTimeout = 0;
bulkCopy.BatchSize = dt1.Rows.Count;
Logger.Log("DATATABLE FINAL :" + dt1.Rows.Count.ToString(), Logger.LogType.Info);
if (SQlConn.State == ConnectionState.Closed || SQlConn.State == ConnectionState.Broken)
SQlConn.Open();
bulkCopy.WriteToServer(dt1); //DataTable
SQlConn.Close();
SQlConn.Dispose();
bulkCopy.Close();
if (bulkCopy != null)
{
((IDisposable)bulkCopy).Dispose();
}
}
这里更新完整的代码。
try
{
using (SqlConnection SQlConn = new SqlConnection(Common.SQLConnectionString))
{
DataTable dt1 = FillEmptyDateFields(dtDestination);
//SqlTableCreator ObjTbl = new SqlTableCreator(SQlConn);
//ObjTbl.DestinationTableName = DestinationTable;
using (System.Data.SqlClient.SqlBulkCopy bulkCopy = new System.Data.SqlClient.SqlBulkCopy(SQlConn,SqlBulkCopyOptions.TableLock,null))
{
//bulkCopy = new System.Data.SqlClient.SqlBulkCopy(SQlConn);
bulkCopy.DestinationTableName = DestinationTable;
bulkCopy.BulkCopyTimeout = 0;
bulkCopy.BatchSize = dt1.Rows.Count;
Logger.Log("DATATABLE FINAL :" + dt1.Rows.Count.ToString(), Logger.LogType.Info);
if (SQlConn.State == ConnectionState.Closed || SQlConn.State == ConnectionState.Broken)
SQlConn.Open();
bulkCopy.WriteToServer(dt1);
SQlConn.Close();
SQlConn.Dispose();
bulkCopy.Close();
if (bulkCopy != null)
{
((IDisposable)bulkCopy).Dispose();
}
}
}
dtDestination.Dispose();
System.GC.Collect();
dtDestination = null;
}
catch (Exception ex)
{
Logger.Log(ex, Logger.LogType.Error);
throw ex;
}