我相信以下代码可以使用以下格式将 csv 文件上传到数据库:
.csv 格式:
约翰·安德森·米
- 它将使用插入语句记录到 TableRef 表(完成)
- 需要根据 csv 中有多少列表记录在 BulkImportDetails 中插入 custId 和 RefId。目前它只插入 1 条记录。
表 1:TableRef (guid refid(pk, not null), taskname, taskdescription) 表 2: BulkImportDetails (guid custid (pk, not null), guid refid(fk, not null, firstname, surname, age)
我如何将基于 csv 格式的信息插入到两个表中,请告知。谢谢你
/// <summary>
/// Process the file supplied and process the CSV to a dynamic datatable
/// </summary>
/// <param name="fileName">String</param>
/// <returns>DataTable</returns>
private static DataTable ProcessCSV(string fileName)
{
string connString = ConfigurationManager.ConnectionStrings["DefaultConnection"].ConnectionString;
int AppID = Convert.ToInt32(ConfigurationManager.AppSettings["AppID"]);
Guid ReferralID = Guid.NewGuid();
DateTime date = DateTime.Now;
using (SqlConnection conn = new SqlConnection(connString))
{
conn.Open();
using (SqlCommand cmd =
new SqlCommand("INSERT INTO TestTable VALUES(" +
"refid, @ServerID, @AppID, @CreateDate, @CreatedBy, @CreateDescription, @Description)", conn))
{
cmd.Parameters.AddWithValue("@refid", ReferralID);
cmd.Parameters.AddWithValue("@ServerID", 2);
cmd.Parameters.AddWithValue("@AppID", AppID);
cmd.Parameters.AddWithValue("@CreateDate", date);
cmd.Parameters.AddWithValue("@CreatedBy", "Create by bulk insert");
cmd.Parameters.AddWithValue("@CreateDescription", "Create by bulk insert");
cmd.Parameters.AddWithValue("@Description", "Create by bulk insert");
int rows = cmd.ExecuteNonQuery();
//rows number of record got inserted
}
}
//Set up our variables
string Feedback = string.Empty;
string line = string.Empty;
string[] strArray;
DataTable dt = new DataTable();
DataRow row;
// work out where we should split on comma, but not in a sentance
Regex r = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))");
//Set the filename in to our stream
StreamReader sr = new StreamReader(fileName);
//Read the first line and split the string at , with our regular express in to an array
line = sr.ReadLine();
strArray = r.Split(line);
//For each item in the new split array, dynamically builds our Data columns. Save us having to worry about it.
Array.ForEach(strArray, s => dt.Columns.Add(new DataColumn()));
dt.Columns.Add("CustID", Type.GetType("System.Guid"));
dt.Columns.Add("Refid", Type.GetType("System.Guid"));
dt.Columns["CustID"].SetOrdinal(3);
dt.Columns["Refid"].SetOrdinal(4);
//Read each line in the CVS file until it's empty
while ((line = sr.ReadLine()) != null)
{
row = dt.NewRow();
//add our current value to our data row
row["CustID"] = Guid.NewGuid();
row["Refid"] = ReferralID;
row.ItemArray = r.Split(line);
dt.Rows.Add(row);
}
dt.Columns["CustID"].SetOrdinal(0);
dt.Columns["Refid"].SetOrdinal(1);
//Tidy Streameader up
sr.Dispose();
//return a the new DataTable
return dt;
}
/// <summary>
/// Take the DataTable and using WriteToServer(DataTable) send it all to the database table "BulkImportDetails" in one go
/// </summary>
/// <param name="dt">DataTable</param>
/// <returns>String</returns>
private static String ProcessBulkCopy(DataTable dt)
{
string Feedback = string.Empty;
string connString = ConfigurationManager.ConnectionStrings["DataBaseConnectionString"].ConnectionString;
//make our connection and dispose at the end
using( SqlConnection conn = new SqlConnection(connString))
{
//make our command and dispose at the end
using (var copy = new SqlBulkCopy(conn))
{
//Open our connection
conn.Open();
///Set target table and tell the number of rows
copy.DestinationTableName = "BulkImportDetails";
copy.BatchSize = dt.Rows.Count;
try
{
//Send it to the server
copy.WriteToServer(dt);
Feedback = "Upload complete";
}
catch (Exception ex)
{
Feedback = ex.Message;
}
}
}
return Feedback;
}