我已经用 SharpDX 替换了一些不可靠的 GDI+ 例程,以从 Stream 加载双色调 TIFF 图像,在其上渲染文本,然后将其作为 Stream 保存回 TIFF 格式。
但是 SharpDX 代码需要更长的时间来做同样的事情,我想知道我是否做错了什么。
正如您从此处的示例中看到的那样,我有 2 个不同的功能:
- RenderImageFromExistingImage
保存渲染图像
using System; using System.Diagnostics; using System.IO; using SharpDX; using SharpDX.Direct2D1; using SharpDX.DirectWrite; using SharpDX.DXGI; using SharpDX.WIC; using Factory = SharpDX.Direct2D1.Factory; using FactoryType = SharpDX.Direct2D1.FactoryType; using PixelFormat = SharpDX.WIC.PixelFormat; using WicBitmap = SharpDX.WIC.Bitmap; public class ImageCreator2 { private static ImagingFactory _wicFactory; private static Factory _d2DFactory; private static SharpDX.DirectWrite.Factory _dwFactory; private int _imageWidth = 1000, _imageHeight = 500; private readonly int _imageDpi = 96; public ImageCreator2() { _wicFactory = new ImagingFactory(); _d2DFactory = new Factory(FactoryType.SingleThreaded); _dwFactory = new SharpDX.DirectWrite.Factory(SharpDX.DirectWrite.FactoryType.Shared); } private void RenderImage(WicRenderTarget renderTarget) { using (var blackBrush = new SolidColorBrush(renderTarget, Color4.Black)) using (var tformat = new TextFormat(_dwFactory, "Arial", 30f)) using (var tformat2 = new TextFormat(_dwFactory, "Arial", 11f)) { renderTarget.BeginDraw(); renderTarget.Clear(Color.White); renderTarget.DrawText("TEST", tformat, new RectangleF(300f, 30f, 100f, 20f), blackBrush); renderTarget.DrawText("MORE TEST", tformat2, new RectangleF(30f, 150f, 100f, 20f), blackBrush); renderTarget.DrawLine(new Vector2(0f, 25f), new Vector2(500f, 25f), blackBrush); renderTarget.DrawLine(new Vector2(0f, 210f), new Vector2(500f, 210f), blackBrush); renderTarget.EndDraw(); } } public void BuildImageFromExistingImage(byte[] image, Stream systemStream) { using (var checkStream = new MemoryStream(image)) using ( var inDecoder = new BitmapDecoder(_wicFactory, checkStream, DecodeOptions.CacheOnDemand)) using (var converter = new FormatConverter(_wicFactory)) { if (inDecoder.FrameCount > 0) { using (var frame = inDecoder.GetFrame(0)) { converter.Initialize(frame, PixelFormat.Format32bppPRGBA, BitmapDitherType.None, null, 0.0f, BitmapPaletteType.MedianCut); _imageWidth = converter.Size.Width; _imageHeight = converter.Size.Height; } } else { throw new Exception(); } var renderProperties = new RenderTargetProperties( RenderTargetType.Software, new SharpDX.Direct2D1.PixelFormat(Format.Unknown, AlphaMode.Unknown), _imageDpi, _imageDpi, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT); using (var wicBitmap = new WicBitmap( _wicFactory, converter, BitmapCreateCacheOption.CacheOnDemand)) using ( var renderTarget = new WicRenderTarget(_d2DFactory, wicBitmap, renderProperties)) { RenderImage(renderTarget); using ( var encoder = new BitmapEncoder(_wicFactory, ContainerFormatGuids.Tiff)) { encoder.Initialize(systemStream); using (var bitmapFrameEncode = new BitmapFrameEncode(encoder)) { var pixFormat = PixelFormat.Format32bppPRGBA; bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(_imageWidth, _imageHeight); bitmapFrameEncode.SetResolution(96, 96); bitmapFrameEncode.SetPixelFormat(ref pixFormat); //This takes 30-40ms per image. var watch = new Stopwatch(); try { watch.Start(); bitmapFrameEncode.WriteSource(wicBitmap); } finally { watch.Stop(); } Console.WriteLine("Saved real image in {0} ms.", watch.Elapsed.TotalMilliseconds); bitmapFrameEncode.Commit(); } encoder.Commit(); } } } } public void SaveRenderedImage(Stream systemStream) { var renderProperties = new RenderTargetProperties( RenderTargetType.Default, new SharpDX.Direct2D1.PixelFormat(Format.Unknown, AlphaMode.Unknown), _imageDpi, _imageDpi, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT); using (var wicBitmap = new WicBitmap( _wicFactory, _imageWidth, _imageHeight, PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnDemand )) using (var renderTarget = new WicRenderTarget(_d2DFactory, wicBitmap, renderProperties)) { RenderImage(renderTarget); using ( var encoder = new BitmapEncoder(_wicFactory, ContainerFormatGuids.Tiff)) { encoder.Initialize(systemStream); using (var bitmapFrameEncode = new BitmapFrameEncode(encoder)) { bitmapFrameEncode.Initialize(); bitmapFrameEncode.SetSize(_imageWidth, _imageHeight); bitmapFrameEncode.SetResolution(_imageDpi, _imageDpi); //This takes 8-10ms per image. var watch = new Stopwatch(); try { watch.Start(); bitmapFrameEncode.WriteSource(wicBitmap); } finally { watch.Stop(); } Console.WriteLine("Saved generated image in {0} ms.", watch.Elapsed.TotalMilliseconds); bitmapFrameEncode.Commit(); } encoder.Commit(); } } } }
它们大部分是相同的,并且做的事情大致相同,除了第一个 (RenderImageFromExistingImage) 采用现有的 1000x500 双色 TIFF 图像作为基础图像,第二个 (SaveRenderedImage) 从头开始创建一个类似大小的 WIC 位图.
获取现有图像的函数执行大约需要 30-40 毫秒,其中大部分时间(约 30 毫秒)由BitmapFrameEncode.WriteSource占用。此函数等效于被替换的 GDI+ 代码。
从头开始创建 WicBitmap 的函数需要8-10 毫秒来执行,而在BitmapFrameEncode.WriteSource中不需要花费大量时间,这与它所替换的 GDI+ 函数的时间大致相同。唯一的区别是这个功能不加载预先存在的图像,这是我需要的。
与 SaveRenderedImage 相比,为什么BitmapFrameEncode.WriteSource(它似乎是 IWICBitmapFrameEncode 的一个薄包装)在BuildImageFromExistingImage中如此缓慢?
我的猜测是 BuildImageFromExistingImage 速度较慢,因为它正在对传入图像进行额外的转换(使用FormatConverter ),将其转换为 D2D 将处理的像素格式,并且这样做的时间损失在BitmapFrameEncode之前不会发挥作用.WriteSource发生。
有什么我做错了吗?或者与基于 GDI+ 的调用相比,WIC(Windows 映像组件)是否慢?
理想情况下,我需要第一种情况 (BuildImageFromExistingImage) 与它所替换的 GDI+ 代码一样快,并且希望它应该可以使其与打算替换的 GDI+ 代码一样快,如果不是更快的话。