根据前面的问题,我正在尝试使用 Direct3D11 非常快地执行一些图像变形,但似乎无法让所有东西一起工作。
当我尝试将 DXGI Surface 读入 Direct2D 位图时,一切都失败了。
var renderSurface = renderTexture.QueryInterface<SharpDX.DXGI.Surface>();
var props = new BitmapProperties
{
PixelFormat = new SharpDX.Direct2D1.PixelFormat(
Format.B8G8R8A8_UNorm,
AlphaMode.Premultiplied
)
};
// This throws "No such interface supported"
var direct2DBitmap =
new SharpDX.Direct2D1.Bitmap(_bitmapRenderTarget, renderSurface, props);
我也尝试使用该Map
方法直接读取表面,但在我尝试对数据做任何事情之前就失败了:
var renderSurface = renderTexture.QueryInterface<SharpDX.DXGI.Surface>();
// This throws "The parameter is incorrect."
renderSurface.Map(MapFlags.Read);
更完整的代码如下。另外,如果这里有什么与这份工作无关的东西……我全神贯注。我不是一个真正的 3D 人,只是试图解决一个没有简单 Direct2D 解决方案的非常具体的问题。
public void AddImage(BitmapSource bitmapSource, SharpDX.Vector2[] abcd)
{
Device device;
Texture2D renderTexture = null;
device = new Device(DriverType.Hardware, DeviceCreationFlags.BgraSupport);
renderTexture = new Texture2D(device, new Texture2DDescription
{
Width = _size.Width,
Height = _size.Height,
Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
MipLevels = 1,
ArraySize = 1,
SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource,
Usage = ResourceUsage.Default,
});
var renderView = new RenderTargetView(device, renderTexture);
//
// Load the input bitmap as a texture
//
var texture = CreateTexture2DFromBitmap(device, bitmapSource);
var textureView = new ShaderResourceView(device, texture);
var sampler = new SamplerState(device, new SamplerStateDescription()
{
Filter = Filter.MinMagMipLinear,
AddressU = TextureAddressMode.Wrap,
AddressV = TextureAddressMode.Wrap,
AddressW = TextureAddressMode.Wrap,
BorderColor = Color.Black,
ComparisonFunction = Comparison.Never,
MaximumAnisotropy = 16,
MipLodBias = 0,
MinimumLod = 0,
MaximumLod = 16,
});
//
// Setup the scene
// These shaders are from the MiniCubeTexture sample for SharpDX
//
var vertexShaderByteCode = ShaderBytecode.CompileFromFile("MiniCubeTexture.fx", "VS", "vs_4_0");
var vertexShader = new VertexShader(device, vertexShaderByteCode);
var pixelShaderByteCode = ShaderBytecode.CompileFromFile("MiniCubeTexture.fx", "PS", "ps_4_0");
var pixelShader = new PixelShader(device, pixelShaderByteCode);
var layout = new InputLayout(device, ShaderSignature.GetInputSignature(vertexShaderByteCode), new[] {
new InputElement("POSITION", 0, SharpDX.DXGI.Format.R32G32B32A32_Float, 0, 0),
new InputElement("TEXCOORD", 0, SharpDX.DXGI.Format.R32G32_Float, 16, 0),
});
var vertices = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.VertexBuffer, new[] {
// 3D coordinates UV Texture coordinates
abcd[0].X, abcd[0].Y, 0.0f, 1.0f, 0.0f, 1.0f, // Front
abcd[1].X, abcd[1].Y, 0.0f, 1.0f, 0.0f, 0.0f,
abcd[2].X, abcd[2].Y, 0.0f, 1.0f, 1.0f, 0.0f,
abcd[3].X, abcd[3].Y, 0.0f, 1.0f, 0.0f, 1.0f,
});
var contantBuffer = new SharpDX.Direct3D11.Buffer(device, Utilities.SizeOf<Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
var context = device.ImmediateContext;
context.InputAssembler.InputLayout = layout;
context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleStrip;
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertices, Utilities.SizeOf<Vector4>() + Utilities.SizeOf<Vector2>(), 0));
context.VertexShader.SetConstantBuffer(0, contantBuffer);
context.VertexShader.Set(vertexShader);
context.Rasterizer.SetViewports(new Viewport(0, 0, _size.Width, _size.Height, 0.0f, 1.0f));
context.PixelShader.Set(pixelShader);
context.PixelShader.SetSampler(0, sampler);
context.PixelShader.SetShaderResource(0, textureView);
context.OutputMerger.SetTargets(renderView);
//
// Render 3D
//
context.ClearRenderTargetView(renderView, Colors.Transparent);
var worldViewProj = Matrix.Identity;// Matrix.Translation (_size.Width, _size.Height / 2, 0) * Matrix.Scaling (_size.Width / 2, _size.Height / 2, 1);
context.UpdateSubresource(ref worldViewProj, contantBuffer);
context.Draw(4, 0);
//
// Composite
//
//Create a render target
var renderSurface = renderTexture.QueryInterface<SharpDX.DXGI.Surface>();
var props = new BitmapProperties
{
PixelFormat = new SharpDX.Direct2D1.PixelFormat(Format.B8G8R8A8_UNorm,AlphaMode.Premultiplied)
};
var direct2DBitmap = new SharpDX.Direct2D1.Bitmap(_bitmapRenderTarget, renderSurface, props);
_wicRenderTarget.BeginDraw();
_wicRenderTarget.DrawBitmap(direct2DBitmap, 1.0f, SharpDX.Direct2D1.BitmapInterpolationMode.Linear);
_wicRenderTarget.EndDraw();
direct2DBitmap.Dispose();
}
private static SharpDX.Direct3D11.Texture2D CreateTexture2DFromBitmap(SharpDX.Direct3D11.Device device, SharpDX.WIC.BitmapSource bitmapSource)
{
// Allocate DataStream to receive the WIC image pixels
int stride = bitmapSource.Size.Width * 4;
using (var buffer = new SharpDX.DataStream(bitmapSource.Size.Height * stride, true, true))
{
// Copy the content of the WIC to the buffer
bitmapSource.CopyPixels(stride, buffer);
return new SharpDX.Direct3D11.Texture2D(device, new SharpDX.Direct3D11.Texture2DDescription()
{
Width = bitmapSource.Size.Width,
Height = bitmapSource.Size.Height,
ArraySize = 1,
BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource,
Usage = SharpDX.Direct3D11.ResourceUsage.Immutable,
CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None,
Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm,
MipLevels = 1,
OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
}, new SharpDX.DataRectangle(buffer.DataPointer, stride));
}
}