我在将大量数据写入 SQL Server 上的 FILESTREAM 列时遇到问题。具体来说,大约 1.5-2GB 的小文件可以很好地处理,但是当大小达到 6GB 及以上时,我会在传输结束时出现间歇性 IOException
的“句柄无效” 。.CopyTo()
我曾考虑以块的形式写入数据,但 SQL Server 在允许将数据附加到该字段之前复制了该字段的支持文件,这完全破坏了大文件的性能。
这是代码:
public long AddFragment (string location , string description = null)
{
const string sql =
@"insert into [Fragment] ([Description],[Data]) " +
"values (@description,0x); " +
"select [Id], [Data].PathName(), " +
"GET_FILESTREAM_TRANSACTION_CONTEXT() " +
"from " +
"[Fragment] " +
"where " +
"[Id] = SCOPE_IDENTITY();";
long id;
using (var scope = new TransactionScope(
TransactionScopeOption.Required,
new TransactionOptions {
Timeout = TimeSpan.FromDays(1)
}))
{
using (var connection = new SqlConnection(m_ConnectionString))
{
connection.Open();
byte[] serverTx;
string serverLocation;
using (var command = new SqlCommand (sql, connection))
{
command.Parameters.Add("@description",
SqlDbType.NVarChar).Value = description;
using (var reader = command.ExecuteReader ())
{
reader.Read();
id = reader.GetSqlInt64(0).Value;
serverLocation = reader.GetSqlString (1).Value;
serverTx = reader.GetSqlBinary (2).Value;
}
}
using (var source = new FileStream(location, FileMode.Open,
FileAccess.Read, FileShare.Read, 4096,
FileOptions.SequentialScan))
using (var target = new SqlFileStream(serverLocation,
serverTx, FileAccess.Write))
{
source.CopyTo ( target );
}
}
scope.Complete();
}
return id;
}