2

我正在尝试使用 MediaFoundation 将 ID3D11Texture2D 编码为 mp4。以下是我当前的代码。

初始化接收器写入器

private int InitializeSinkWriter(String outputFile, int videoWidth, int videoHeight)
    {
        IMFMediaType mediaTypeIn = null;
        IMFMediaType mediaTypeOut = null;
        IMFAttributes attributes = null;

        int hr = 0;

        if (Succeeded(hr)) hr = (int)MFExtern.MFCreateAttributes(out attributes, 1);
        if (Succeeded(hr)) hr = (int)attributes.SetUINT32(MFAttributesClsid.MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 1);            
        if (Succeeded(hr)) hr = (int)attributes.SetUINT32(MFAttributesClsid.MF_LOW_LATENCY, 1);

        // Create the sink writer 
        if (Succeeded(hr)) hr = (int)MFExtern.MFCreateSinkWriterFromURL(outputFile, null, attributes, out sinkWriter);

        // Create the output type
        if (Succeeded(hr)) hr = (int)MFExtern.MFCreateMediaType(out mediaTypeOut);
        if (Succeeded(hr)) hr = (int)mediaTypeOut.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video);
        if (Succeeded(hr)) hr = (int)mediaTypeOut.SetGUID(MFAttributesClsid.MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType.MPEG4);
        if (Succeeded(hr)) hr = (int)mediaTypeOut.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.H264);
        if (Succeeded(hr)) hr = (int)mediaTypeOut.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, videoBitRate);
        if (Succeeded(hr)) hr = (int)mediaTypeOut.SetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, (int)MFVideoInterlaceMode.Progressive);            

        if (Succeeded(hr)) hr = (int)MFExtern.MFSetAttributeSize(mediaTypeOut, MFAttributesClsid.MF_MT_FRAME_SIZE, videoWidth, videoHeight);
        if (Succeeded(hr)) hr = (int)MFExtern.MFSetAttributeRatio(mediaTypeOut, MFAttributesClsid.MF_MT_FRAME_RATE, VIDEO_FPS, 1);
        if (Succeeded(hr)) hr = (int)MFExtern.MFSetAttributeRatio(mediaTypeOut, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
        if (Succeeded(hr)) hr = (int)sinkWriter.AddStream(mediaTypeOut, out streamIndex);



        // Create the input type 
        if (Succeeded(hr)) hr = (int)MFExtern.MFCreateMediaType(out mediaTypeIn);
        if (Succeeded(hr)) hr = (int)mediaTypeIn.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video);
        if (Succeeded(hr)) hr = (int)mediaTypeIn.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.ARGB32);
        if (Succeeded(hr)) hr = (int)mediaTypeIn.SetUINT32(MFAttributesClsid.MF_SA_D3D11_AWARE, 1);
        if (Succeeded(hr)) hr = (int)mediaTypeIn.SetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, (int)MFVideoInterlaceMode.Progressive);
        if (Succeeded(hr)) hr = (int)MFExtern.MFSetAttributeSize(mediaTypeIn, MFAttributesClsid.MF_MT_FRAME_SIZE, videoWidth, videoHeight);
        if (Succeeded(hr)) hr = (int)MFExtern.MFSetAttributeRatio(mediaTypeIn, MFAttributesClsid.MF_MT_FRAME_RATE, VIDEO_FPS, 1);
        if (Succeeded(hr)) hr = (int)MFExtern.MFSetAttributeRatio(mediaTypeIn, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
        if (Succeeded(hr)) hr = (int)sinkWriter.SetInputMediaType(streamIndex, mediaTypeIn, null);


        // Start accepting data
        if (Succeeded(hr)) hr = (int)sinkWriter.BeginWriting();


        COMBase.SafeRelease(mediaTypeOut);
        COMBase.SafeRelease(mediaTypeIn);

        return hr;
    }

书写框架

 int hr = 0;
        IMFSample sample = null;
        IMFMediaBuffer buffer = null;
        IMF2DBuffer p2Dbuffer = null;
        object texNativeObject = Marshal.GetObjectForIUnknown(surface.NativePointer);

        if (Succeeded(hr)) hr = (int)MFExtern.MFCreateDXGISurfaceBuffer(new Guid("6f15aaf2-d208-4e89-9ab4-489535d34f9c"), texNativeObject, 0, false, out p2Dbuffer);

        buffer = MFVideoEncoderST.ReinterpretCast<IMF2DBuffer,IMFMediaBuffer>(p2Dbuffer);
        int length=0;
        if (Succeeded(hr)) hr = (int)p2Dbuffer.GetContiguousLength(out length);
        if (Succeeded(hr)) hr = (int)buffer.SetCurrentLength(length);


        if (Succeeded(hr)) hr = (int)MFExtern.MFCreateVideoSampleFromSurface(null, out sample);

        if (Succeeded(hr)) hr = (int)sample.AddBuffer(buffer);
        if (Succeeded(hr)) hr = (int)sample.SetSampleTime(prevRecordingDuration);
        if (Succeeded(hr)) hr = (int)sample.SetSampleDuration((recordDuration - prevRecordingDuration));

        if (Succeeded(hr)) hr = (int)sinkWriter.WriteSample(streamIndex, sample);


        COMBase.SafeRelease(sample);
        COMBase.SafeRelease(buffer);

使用 MFTRACE 我收到以下错误。

    02:48:04.99463 CMFSinkWriterDetours::WriteSample @024BEA18 Stream Index 0x0, Sample @17CEACE0, Time 571ms, Duration 16ms, Buffers 1, Size 4196352B,2088,2008 02:48:04.99465 CMFSinkWriterDetours::WriteSample @024BEA18 failed hr=0x887A0005 (null)2088,2008 
02:48:05.01090 CMFSinkWriterDetours::WriteSample @024BEA18 Stream Index 0x0, Sample @17CE9FC0, Time 587ms, Duration 17ms, Buffers 1, Size 4196352B,2088,2008 02:48:05.01091 CMFSinkWriterDetours::WriteSample @024BEA18 failed hr=0x887A0005 (null)2088,2008 
02:48:05.02712 CMFSinkWriterDetours::WriteSample @024BEA18 Stream Index 0x0, Sample @17CEACE0, Time 604ms, Duration 16ms, Buffers 1, Size 4196352B,2088,2008 02:48:05.02713 CMFSinkWriterDetours::WriteSample @024BEA18 failed hr=0x887A0005 (null)

谁能告诉我我的代码有什么问题?我只能产生 0 字节的 mp4 文件。

4

2 回答 2

7

我在这里遇到了一些潜在的问题。Roman提到了两个大的,所以我会详细说明。我还有一些其他的批评/建议给你。

不使用IMFDXGIDeviceManager

为了在 Media Foundation 中使用硬件加速,您需要创建一个 DirectX 设备管理器对象,IDirect3DDeviceManager9用于 DX9 或在您的情况下IMFDXGIDeviceManager用于 DXGI。我强烈建议阅读该接口的所有 MSDN 文档。这是必要的原因是因为必须在所有正在使用的协作硬件 MF 转换之间共享相同的 DX 设备,因为它们都需要访问设备控制的共享 GPU 内存,并且每个设备都需要在设备工作时独占控制设备,所以需要一个锁定系统。设备管理器对象提供了锁定系统,也是为一个或多个转换提供 DX 设备的标准方式。对于 DXGI,您可以使用MFCreateDXGIDeviceManager.

从那里,您需要创建您的 DX11 设备,并IMFDXGIDeviceManager::ResetDevice使用您的 DX11 设备进行呼叫。然后,您需要为 Sink Writer 本身设置设备管理器,这在您上面提供的代码中没有完成。这是这样完成的:

// ... inside your InitializeSinkWriter function that you listed above

// I'm assuming you've already created and set up the DXGI device manager elsewhere
IMFDXGIDeviceManager pDeviceManager;

// Passing 3 as the argument because we're adding 3 attributes immediately, saves re-allocations
if (Succeeded(hr)) hr = (int)MFExtern.MFCreateAttributes(out attributes, 3);
if (Succeeded(hr)) hr = (int)attributes.SetUINT32(MFAttributesClsid.MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 1);            
if (Succeeded(hr)) hr = (int)attributes.SetUINT32(MFAttributesClsid.MF_LOW_LATENCY, 1);

// Here's the key piece!
if (Succeeded(hr)) hr = (int)attributes.SetUnknown(MFAttributesClsid.MF_SINK_WRITER_D3D_MANAGER, pDeviceManager);

// Create the sink writer 
if (Succeeded(hr)) hr = (int)MFExtern.MFCreateSinkWriterFromURL(outputFile, null, attributes, out sinkWriter);

这实际上将启用对硬件编码器的 D3D11 支持,并允许它访问读取Texture2D您传入的内容。值得注意的是,它MF_SINK_WRITER_D3D_MANAGER适用于 DX9 和 DXGI 设备管理器。


编码器缓冲IMFSample同一纹理的多个实例

这也是您的问题的潜在原因 - 至少它会导致许多意外行为,即使它不是明显问题的原因。根据 Roman 的评论,许多编码器将缓冲多个帧作为其编码过程的一部分。使用 Sink Writer 时您不会看到这种行为,因为它会为您处理所有细节工作。但是,您尝试完成的工作(即发送 D3D11 纹理作为输入帧)的级别非常低,以至于您开始不得不担心 Sink Writer 使用的编码器 MFT 的内部细节。

大多数视频编码器MFT将使用一定大小的内部缓冲区来存储通过IMFTransform::ProcessInput. 这具有在生成任何输出之前必须提供多个样本作为输入的副作用。视频编码器需要按顺序访问多个样本,因为它们使用后续帧来确定如何编码当前帧。换句话说,如果解码器正在处理第 0 帧,它可能还需要查看第 1、2 和 3 帧。从技术角度来看,这是因为帧间预测和运动估计之类的东西。一旦编码器完成对最旧样本的处理,它就会生成一个输出缓冲区(另一个IMFSample对象,但这次在输出端通过IMFTransform::ProcessOutput) 然后丢弃它正在处理的输入样本(通过调用IUnknown::Release),然后请求更多输入,并最终移动到下一帧。您可以在 MSDN 文章Processing Data in the Encoder中阅读有关此过程的更多信息

正如 Roman 所暗示的,这意味着您将一个 inside 封装ID3D11Texture2D在一个IMFMediaBufferinside an 中IMFSample,然后将其传递给 Sink Writer。作为编码过程的一部分,编码器可能会缓冲该样本。随着编码器的工作,其内容Texture2D可能会发生变化,这可能会导致各种问题。即使这不会导致程序错误,它肯定会导致非常奇怪的编码视频输出。想象一下,如果编码器试图预测一帧的视觉内容在下一帧中如何变化,然后两帧的实际视觉内容都从编码器下方更新出来!

之所以会出现这个特定问题,是因为编码器只有一个指向您的IMFSample实例的指针引用,它最终只是一个指向您的ID3D11Texture2D对象的指针,而该对象是一种对可变图形内存的指针引用。最终,由于程序的某些其他部分,该图形内存的内容正在发生变化,但由于它始终更新相同的 GPU 纹理,因此您发送编码器的每个样本都指向相同的单个纹理。这意味着每当您通过更改 GPU 内存更新纹理时,所有活动IMFSample对象都会反映这些更改,因为它们都有效地指向相同的 GPU 纹理。

要解决此问题,您需要分配多个对象,以便在将纹理提供给 Sink Writer 时ID3D11Texture2D可以将一个纹理与一个配对。IMFSample这将通过使每个样本指向一个唯一的纹理来解决所有样本指向同一个 GPU 纹理的问题。但是,您不一定知道需要创建多少纹理,因此处理此问题的最安全方法是编写自己的纹理分配器。无论如何,这仍然可以在 C# 中完成,MediaFoundation.NET 定义了您需要使用的接口。

分配器应该维护一个“空闲”SharpDX.Texture2D对象的列表——那些当前没有被接收器写入器/编码器使用的对象。你的程序应该能够从分配器请求新的纹理对象,在这种情况下,它要么从空闲列表中返回一个对象,要么创建一个新的纹理来适应请求。

下一个问题是知道IMFSample对象何时被编码器丢弃,以便您可以将附加的纹理添加回空闲列表。碰巧的是,MFCreateVideoSampleFromSurface您当前使用的函数分配了实现IMFTrackedSample接口的样本。您将需要该接口,以便在样本被释放时收到通知,以便您可以回收Texture2D对象。

诀窍是你必须告诉样本是分配者。首先,您的分配器类需要实现IMFAsyncCallback. 如果您通过 设置样本上的分配器类,则将调用IMFTrackedSample::SetAllocator分配器的方法,并在编码器释放样本时将其作为参数传递。这是分配器类的一般示例。 IMFAsyncCallback::InvokeIMFAsyncResult

sealed class TextureAllocator : IMFAsyncCallback, IDisposable
{
    private ConcurrentStack<SharpDX.Direct3D11.Texture2D> m_freeStack;
    private static readonly Guid s_IID_ID3D11Texture2D = new Guid("6f15aaf2-d208-4e89-9ab4-489535d34f9c");

    // If all textures are the exact same size and color format,
    // consider making those parameters private class members and
    // requiring they be specified as arguments to the constructor.
    public TextureAllocator()
    {
        m_freeStack = new ConcurrentStack<SharpDX.Direct3D11.Texture2D>();
    }

    private bool disposedValue = false;
    private void Dispose(bool disposing)
    {
        if(!disposedValue)
        {
            if(disposing)
            {
                // Dispose managed resources here
            }

            if(m_freeStack != null)
            {
                SharpDX.Direct3D11.Texture2D texture;
                while(m_freeStack.TryPop(out texture))
                {
                    texture.Dispose();
                }
                m_freeStack = null;
            }

            disposedValue = true;
        }
    }

    public void Dispose()
    {
        Dispose(true);
        GC.SuppressFinalize(this);
    }

    ~TextureAllocator()
    {
        Dispose(false);
    }

    private SharpDX.Direct3D11.Texture2D InternalAllocateNewTexture()
    {
        // Allocate a new texture with your format, size, etc here.
    }

    public SharpDX.Direct3D11.Texture2D AllocateTexture()
    {
        SharpDX.Direct3D11.Texture2D existingTexture;
        if(m_freeStack.TryPop(out existingTexture))
        {
            return existingTexture;
        }
        else
        {
            return InternalAllocateNewTexture();
        }
    }

    public IMFSample CreateSampleAndAllocateTexture()
    {
        IMFSample pSample;
        IMFTrackedSample pTrackedSample;
        HResult hr;

        // Create the video sample. This function returns an IMFTrackedSample per MSDN
        hr = MFExtern.MFCreateVideoSampleFromSurface(null, out pSample);
        MFError.ThrowExceptionForHR(hr);

        // Query the IMFSample to see if it implements IMFTrackedSample
        pTrackedSample = pSample as IMFTrackedSample;
        if(pTrackedSample == null)
        {
            // Throw an exception if we didn't get an IMFTrackedSample
            // but this shouldn't happen in practice.
            throw new InvalidCastException("MFCreateVideoSampleFromSurface returned a sample that did not implement IMFTrackedSample");
        }

        // Use our own class to allocate a texture
        SharpDX.Direct3D11.Texture2D availableTexture = AllocateTexture();
        // Convert the texture's native ID3D11Texture2D pointer into
        // an IUnknown (represented as as System.Object)
        object texNativeObject = Marshal.GetObjectForIUnknown(availableTexture.NativePointer);

        // Create the media buffer from the texture
        IMFMediaBuffer p2DBuffer;
        hr = MFExtern.MFCreateDXGISurfaceBuffer(s_IID_ID3D11Texture2D, texNativeObject, 0, false, out p2DBuffer);
        // Release the object-as-IUnknown we created above
        COMBase.SafeRelease(texNativeObject);
        // If media buffer creation failed, throw an exception
        MFError.ThrowExceptionForHR(hr);

        // Set the owning instance of this class as the allocator
        // for IMFTrackedSample to notify when the sample is released
        pTrackedSample.SetAllocator(this, null);

        // Attach the created buffer to the sample
        pTrackedSample.AddBuffer(p2DBuffer);

        return pTrackedSample;
    }

    // This is public so any textures you allocate but don't make IMFSamples 
    // out of can be returned to the allocator manually.
    public void ReturnFreeTexture(SharpDX.Direct3D11.Texture2D freeTexture)
    {
        m_freeStack.Push(freeTexture);
    }

    // IMFAsyncCallback.GetParameters
    // This is allowed to return E_NOTIMPL as a way of specifying
    // there are no special parameters.
    public HResult GetParameters(out MFAsync pdwFlags, out MFAsyncCallbackQueue pdwQueue)
    {
        pdwFlags = MFAsync.None;
        pdwQueue = MFAsyncCallbackQueue.Standard;
        return HResult.E_NOTIMPL;
    }

    public HResult Invoke(IMFAsyncResult pResult)
    {
        object pUnkObject;
        IMFSample pSample = null;
        IMFMediaBuffer pBuffer = null;
        IMFDXGIBuffer pDXGIBuffer = null;

        // Get the IUnknown out of the IMFAsyncResult if there is one
        HResult hr = pResult.GetObject(out pUnkObject);
        if(Succeeded(hr))
        {
            pSample = pUnkObject as IMFSample;
        }

        if(pSample != null)
        {
            // Based on your implementation, there should only be one 
            // buffer attached to one sample, so we can always grab the
            // first buffer. You could add some error checking here to make
            // sure the sample has a buffer count that is 1.
            hr = pSample.GetBufferByIndex(0, out pBuffer);
        }

        if(Succeeded(hr))
        {
            // Query the IMFMediaBuffer to see if it implements IMFDXGIBuffer
            pDXGIBuffer = pBuffer as IMFDXGIBuffer;
        }

        if(pDXGIBuffer != null)
        {
           // Got an IMFDXGIBuffer, so we can extract the internal 
           // ID3D11Texture2D and make a new SharpDX.Texture2D wrapper.
           hr = pDXGIBuffer.GetResource(s_IID_ID3D11Texture2D, out pUnkObject);
        }

        if(Succeeded(hr))
        {
           // If we got here, pUnkObject is the native D3D11 Texture2D as
           // a System.Object, but it's unlikely you have an interface 
           // definition for ID3D11Texture2D handy, so we can't just cast
           // the object to the proper interface.

           // Happily, SharpDX supports wrapping System.Object within
           // SharpDX.ComObject which makes things pretty easy.
           SharpDX.ComObject comWrapper = new SharpDX.ComObject(pUnkObject);

           // If this doesn't work, or you're using something like SlimDX
           // which doesn't support object wrapping the same way, the below
           // code is an alternative way.
           /*
           IntPtr pD3DTexture2D = Marshal.GetIUnknownForObject(pUnkObject);
           // Create your wrapper object here, like this for SharpDX
           SharpDX.ComObject comWrapper = new SharpDX.ComObject(pD3DTexture2D);
           // or like this for SlimDX
           SlimDX.Direct3D11.Texture2D.FromPointer(pD3DTexture2D);
           Marshal.Release(pD3DTexture2D);
           */

           // You might need to query comWrapper for a SharpDX.DXGI.Resource
           // first, then query that for the SharpDX.Direct3D11.Texture2D.
           SharpDX.Direct3D11.Texture2D texture = comWrapper.QueryInterface<SharpDX.Direct3D11.Texture2D>();
           if(texture != null)
           {
               // Now you can add "texture" back to the allocator's free list
               ReturnFreeTexture(texture);
           }
        }
    }
}


MF_SA_D3D_AWARESink Writer 输入媒体类型的设置

我不认为这会导致HRESULT你得到坏事,但无论如何这都不是正确的做法。MF_SA_D3D_AWARE(及其 DX11 对应物MF_SA_D3D11_AWARE)是由IMFTransform对象设置的属性,用于通知您该变换分别支持通过 DX9 或 DX11 进行的图形加速。无需在 Sink Writer 的输入媒体类型上设置此项。


SafeReleasetexNativeObject

我建议您致电COMBase.SafeRelease()texNativeObject否则您可能会泄漏内存。否则,您将不必要地延长该 COM 对象的生命周期,直到 GC 为您清理引用计数


不必要的铸造

这是上面代码的一部分:

buffer = MFVideoEncoderST.ReinterpretCast<IMF2DBuffer,IMFMediaBuffer>(p2Dbuffer);
int length=0;
if (Succeeded(hr)) hr = (int)p2Dbuffer.GetContiguousLength(out length);
if (Succeeded(hr)) hr = (int)buffer.SetCurrentLength(length);

我不确定您的ReinterpretCast函数在做什么,但如果您确实需要QueryInterface在 C# 中执行样式转换,您可以使用as运算符或常规转换。

// pMediaBuffer is of type IMFMediaBuffer and has been created elsewhere
IMF2DBuffer p2DBuffer = pMediaBuffer as IMF2DBuffer;
if(p2DBuffer != null)
{
    // pMediaBuffer is an IMFMediaBuffer that also implements IMF2DBuffer
}
else
{
    // pMediaBuffer does not implement IMF2DBuffer
}
于 2017-06-07T14:41:10.457 回答
1

第一个问题:IMFDXGIDeviceManager::ResetDevice总是失败。

在我之前回答的评论中与@kripto 合作后,我们诊断出许多其他问题。最大的问题是IMFDXGIDeviceManager为了使硬件 H.264 编码器 MFT 能够接受Texture2D包含在IMFDXGIBuffer. 代码中有一个很难注意到的错误:

// pDevice is a SharpDX.Direct3D11.Texture2D instance
// pDevice.NativePointer is an IntPtr that refers to the native IDirect3D11Device
// being wrapped by SharpDX.
IMFDXGIDeviceManager pDeviceManager;
object d3dDevice = Marshal.GetIUnknownForObject(pDevice.NativePointer);
HResult hr = MFExtern.MFCreateDXGIDeviceManager(out resetToken, out pDeviceManager);
if(Succeeded(hr))
{
    // The signature of this is:
    // HResult ResetDevice(object d3d11device, int resetToken);
    hr = pDeviceManager.ResetDevice(d3dDevice, resetToken);
}

这是上面代码中发生的事情。设备管理器已创建,但为了让编码器 MFT 访问Texture2D样本,它需要创建纹理的同一 Direct3D 设备的副本。因此,您必须调用IMFDXGIDeviceManager::ResetDevice设备管理器才能为其提供 Direct3D 设备。有关ResetDevice. SharpDX 仅提供对IntPtr指向 native的 的访问IDirect3D11Device,但 MediaFoundation.NET 接口需要object传入一个来代替。

看到错误了吗?上面的代码类型检查和编译都很好,但包含一个严重错误。错误是使用Marshal.GetIUnknownForObject而不是Marshal.GetObjectForIUnknown. 有趣的是,因为object可以IntPtr很好地装箱,所以您可以使用完全相反的编组功能,它仍然可以很好地编译。问题是我们正在尝试将 anIntPtr转换为 .NET RCW 内部object,这是ResetDeviceMediaFoundation.NET 所期望的。此错误导致ResetDevice返回E_INVALIDARG而不是正常工作。


第二题:奇怪的编码器输出

第二个问题是英特尔快速同步视频 H.264 编码器 MFT 并不是特别满意,尽管它被正确创建,但在结果文件的开头有一两秒的黑色输出,以及阻塞和前几秒的动作错误,有时一半的视频是灰色的,并且没有显示实际复制的桌面图像。

我想确保将实际Texture2D对象正确发送到编码器,因此我编写了一个简单的类来将 Direct3D 11 转储Texture2D.png文件中。我已经在这里为需要它的任何其他人提供了它 - 这需要 SharpDX 和 MediaFoundation.NET 才能工作,尽管您可以通过CopyMemory在循环中使用来消除 MF 依赖关系以考虑不同的步幅。请注意,这仅设置为使用DXGI.Format.B8G8R8A8_UNorm格式的纹理。它可能适用于其他格式的纹理,但输出看起来很奇怪。

using System;
using System.Drawing;

namespace ScreenCapture
{
    class Texture2DDownload : IDisposable
    {
        private SharpDX.Direct3D11.Device m_pDevice;
        private SharpDX.Direct3D11.Texture2D m_pDebugTexture;

        public Texture2DDownload(SharpDX.Direct3D11.Device pDevice)
        {
            m_pDevice = pDevice;
        }

        /// <summary>
        /// Compare all the relevant properties of the texture descriptions for both input textures.
        /// </summary>
        /// <param name="texSource">The source texture</param>
        /// <param name="texDest">The destination texture that will have the source data copied into it</param>
        /// <returns>true if the source texture can be copied to the destination, false if their descriptions are incompatible</returns>
        public static bool TextureCanBeCopied(SharpDX.Direct3D11.Texture2D texSource, SharpDX.Direct3D11.Texture2D texDest)
        {
            if(texSource.Description.ArraySize != texDest.Description.ArraySize)
                return false;

            if(texSource.Description.Format != texDest.Description.Format)
                return false;

            if(texSource.Description.Height != texDest.Description.Height)
                return false;

            if(texSource.Description.MipLevels != texDest.Description.MipLevels)
                return false;

            if(texSource.Description.SampleDescription.Count != texDest.Description.SampleDescription.Count)
                return false;

            if(texSource.Description.SampleDescription.Quality != texDest.Description.SampleDescription.Quality)
                return false;

            if(texSource.Description.Width != texDest.Description.Width)
                return false;

            return true;
        }

        /// <summary>
        /// Saves the contents of a <see cref="SharpDX.Direct3D11.Texture2D"/> to a file with name contained in <paramref name="filename"/> using the specified <see cref="System.Drawing.Imaging.ImageFormat"/>.
        /// </summary>
        /// <param name="texture">The <see cref="SharpDX.Direct3D11.Texture2D"/> containing the data to save.</param>
        /// <param name="filename">The filename on disk where the output image should be saved.</param>
        /// <param name="imageFormat">The format to use when saving the output file.</param>
        public void SaveTextureToFile(SharpDX.Direct3D11.Texture2D texture, string filename, System.Drawing.Imaging.ImageFormat imageFormat)
        {
            // If the existing debug texture doesn't exist, or the incoming texture is different than the existing debug texture...
            if(m_pDebugTexture == null || !TextureCanBeCopied(m_pDebugTexture, texture))
            {
                // Dispose of any existing texture
                if(m_pDebugTexture != null)
                {
                    m_pDebugTexture.Dispose();
                }

                // Copy the original texture's description...
                SharpDX.Direct3D11.Texture2DDescription newDescription = texture.Description;

                // Then modify the parameters to create a CPU-readable staging texture
                newDescription.BindFlags = SharpDX.Direct3D11.BindFlags.None;
                newDescription.CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.Read;
                newDescription.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None;
                newDescription.Usage = SharpDX.Direct3D11.ResourceUsage.Staging;

                // Re-generate the debug texture by copying the new texture's description
                m_pDebugTexture = new SharpDX.Direct3D11.Texture2D(m_pDevice, newDescription);
            }

            // Copy the texture to our debug texture
            m_pDevice.ImmediateContext.CopyResource(texture, m_pDebugTexture);

            // Map the debug texture's resource 0 for read mode
            SharpDX.DataStream data;
            SharpDX.DataBox dbox = m_pDevice.ImmediateContext.MapSubresource(m_pDebugTexture, 0, 0, SharpDX.Direct3D11.MapMode.Read, SharpDX.Direct3D11.MapFlags.None, out data);

            // Create a bitmap that's the same size as the debug texture
            Bitmap b = new Bitmap(m_pDebugTexture.Description.Width, m_pDebugTexture.Description.Height, System.Drawing.Imaging.PixelFormat.Format32bppRgb);

            // Lock the bitmap data to get access to the native bitmap pointer
            System.Drawing.Imaging.BitmapData bd = b.LockBits(new Rectangle(0, 0, b.Width, b.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb);

            // Use the native pointers to do a native-to-native memory copy from the mapped subresource to the bitmap data
            // WARNING: This might totally blow up if you're using a different color format than B8G8R8A8_UNorm, I don't know how planar formats are structured as D3D textures!
            //
            // You can use Win32 CopyMemory to do the below copy if need be, but you have to do it in a loop to respect the Stride and RowPitch parameters in case the texture width
            // isn't on an aligned byte boundary.
            MediaFoundation.MFExtern.MFCopyImage(bd.Scan0, bd.Stride, dbox.DataPointer, dbox.RowPitch, bd.Width * 4, bd.Height);

            /// Unlock the bitmap
            b.UnlockBits(bd);

            // Unmap the subresource mapping, ignore the SharpDX.DataStream because we don't need it.
            m_pDevice.ImmediateContext.UnmapSubresource(m_pDebugTexture, 0);
            data = null;

            // Save the bitmap to the desired filename
            b.Save(filename, imageFormat);
            b.Dispose();
            b = null;
        }

        #region IDisposable Support
        private bool disposedValue = false; // To detect redundant calls

        protected virtual void Dispose(bool disposing)
        {
            if(!disposedValue)
            {
                if(disposing)
                {
                }

                if(m_pDebugTexture != null)
                {
                    m_pDebugTexture.Dispose();
                }

                disposedValue = true;
            }
        }

        // TODO: override a finalizer only if Dispose(bool disposing) above has code to free unmanaged resources.
        ~Texture2DDownload() {
            // Do not change this code. Put cleanup code in Dispose(bool disposing) above.
            Dispose(false);
        }

        // This code added to correctly implement the disposable pattern.
        public void Dispose()
        {
            // Do not change this code. Put cleanup code in Dispose(bool disposing) above.
            Dispose(true);
            GC.SuppressFinalize(this);
        }
        #endregion
    }
}

一旦我确认我在进入编码器的途中有好的图像,我发现代码不是在调用IMFSinkWriter::SendStreamTick之后IMFSinkWriter::BeginWriting而是在发送第一个IMFSample. 初始样本也有一个非零的时间增量,这导致了初始黑色输出。为了解决这个问题,我添加了以下代码:

// Existing code to set the sample time and duration
// recordDuration is the current frame time in 100-nanosecond units
// prevRecordingDuration is the frame time of the last frame in
// 100-nanosecond units
sample.SetSampleTime(recordDuration);
sample.SetSampleDuration(recordDuration - prevRecordingDuration);

// The fix is here:
if(frames == 0)
{
    sinkWriter.SendStreamTick(streamIndex, recordDuration);
    sample.SetUINT32(MFAttributesClsid.MFSampleExtension_Discontinuity, 1);
}

sinkWriter.WriteSample(streamIndex, sample);
frames++;

通过向接收器写入器发送流滴答,它确定现在将任何值recordDuration视为输出视频流的时间 = 0 点。换句话说,一旦你调用SetStreamTick并传入一个帧时间戳,所有后续时间戳都会从它们中减去初始时间戳。这就是使第一个示例帧立即显示在输出文件中的方法。

此外,无论何时SendStreamTick调用,直接提供给接收器编写器的样本都必须MFSampleExtension_Discontinuity设置为1它的属性。这意味着正在发送的样本中存在间隙,并且传递给编码器的帧是该间隙之后的第一帧。这或多或少地告诉编码器从样本中制作一个关键帧,这可以防止我之前看到的运动和阻塞效果。


结果

实施这些修复后,我测试了该应用程序并以 1920x1080 分辨率和每秒 60 帧的输出实现了全屏捕获。比特率设置为 4096 kbit。对于大多数工作负载,英特尔 i7-4510U 笔记本电脑 CPU 的 CPU 使用率介于 2.5% 和 7% 之间 - 极端运动量会使其高达 10% 左右。通过 SysInternals 的 Process Explorer 的 GPU 利用率在 1% 到 2% 之间。


[1] 我相信其中一些是 Direct3D 9 的遗留物,当时 DirectX API 中没有很好地内置多线程支持,并且每当任何组件(即解码器、渲染器、编码器)使用它时都必须专门锁定设备. 使用 D3D 9 您调用ResetDevice但以后再也不能使用您自己的指向设备的指针。相反,您必须调用LockDevice甚至UnlockDevice在您自己的代码中获取设备指针,因为 MFT 可能在同一时刻正在使用该设备。在 Direct3D 11 中,在 MFT 和控制应用程序中同时使用同一设备似乎没有问题 - 尽管如果发生任何随机崩溃,我建议阅读大量关于如何IMFDXGIDeviceManager::LockDeviceUnlockDevice工作并实施这些以确保设备始终被排他控制。

于 2017-06-14T09:35:10.537 回答