1

事实上,它是多种技术的组合,但我的问题的答案(我认为)最接近 Direct3D 9。我正在连接任意 D3D9 应用程序,在大多数情况下它是一个游戏,并注入我自己的代码来修改EndScene 函数的行为。后缓冲区被复制到一个表面,该表面设置为指向推送源 DirectShow 过滤器中的位图。过滤器以 25 fps 的速度对位图进行采样,并将视频流式传输到 .avi 文件中。在游戏的屏幕上显示了一个文本叠加层,告诉用户应该停止游戏捕获的热键组合,但这个叠加层不应该出现在录制的视频中。除了一个令人讨厌的事实外,一切都运行得又快又漂亮。在随机的情况下,带有文本的帧过度进入录制的视频。这不是一个真正想要的人工制品,最终用户只想在视频中看到他的游戏玩法,而不是别的。我很想听听是否有人可以分享为什么会发生这种情况的想法。这是 EndScene 挂钩的源代码:

using System;
using SlimDX;
using SlimDX.Direct3D9;
using System.Diagnostics;
using DirectShowLib;
using System.Runtime.InteropServices;

[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
[System.Security.SuppressUnmanagedCodeSecurity]
[Guid("EA2829B9-F644-4341-B3CF-82FF92FD7C20")]

public interface IScene
{
    unsafe int PassMemoryPtr(void* ptr, bool noheaders);
    int SetBITMAPINFO([MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)]byte[] ptr, bool noheaders);
}

public class Class1
{
    object _lockRenderTarget = new object();
    public string StatusMess { get; set; }
    Surface _renderTarget;
    //points to image bytes
    unsafe void* bytesptr;
    //used to store headers AND image bytes
    byte[] bytes;
    IFilterGraph2 ifg2;
    ICaptureGraphBuilder2 icgb2;
    IBaseFilter push;
    IBaseFilter compressor;
    IScene scene;
    IBaseFilter mux;
    IFileSinkFilter sink;
    IMediaControl media;
    bool NeedRunGraphInit = true;
    bool NeedRunGraphClean = true;
    DataStream s;
    DataRectangle dr;

    unsafe int   EndSceneHook(IntPtr devicePtr)
    {
        int hr;

        using (Device device = Device.FromPointer(devicePtr))
            {
           try
            {
                lock (_lockRenderTarget)
                {

                    bool TimeToGrabFrame = false;

                    //....
                    //logic based on elapsed milliseconds deciding if it is time to grab another frame

                    if (TimeToGrabFrame)
                    {

                        //First ensure we have a Surface to render target data into
                        //called only once
                        if (_renderTarget == null)
                        {

                            //Create offscreen surface to use as copy of render target data
                            using (SwapChain sc = device.GetSwapChain(0))
                            {

                                //Att: created in system memory, not in video memory
                                _renderTarget = Surface.CreateOffscreenPlain(device, sc.PresentParameters.BackBufferWidth, sc.PresentParameters.BackBufferHeight, sc.PresentParameters.BackBufferFormat, Pool.SystemMemory);

                            } //end using
                        } // end if

                        using (Surface backBuffer = device.GetBackBuffer(0, 0))
                        {
                            //The following line is where main action takes place:
                            //Direct3D 9 back buffer gets copied to Surface _renderTarget,
                            //which has been connected by references to DirectShow's
                            //bitmap capture filter
                            //Inside the filter ( code not shown in this listing) the bitmap is periodically
                            //scanned to create a streaming video.
                            device.GetRenderTargetData(backBuffer, _renderTarget);

                            if (NeedRunGraphInit) //ran only once
                            {
                                ifg2 = (IFilterGraph2)new FilterGraph();
                                icgb2 = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
                                icgb2.SetFiltergraph(ifg2);
                                push = (IBaseFilter) new PushSourceFilter();
                                scene = (IScene)push;

                                //this way we get bitmapfile and bitmapinfo headers
                                //ToStream is slow, but run it only once to get the headers
                                s = Surface.ToStream(_renderTarget, ImageFileFormat.Bmp);
                                bytes = new byte[s.Length];

                                s.Read(bytes, 0, (int)s.Length);
                                hr = scene.SetBITMAPINFO(bytes, false);

                                //we just supplied the header to the PushSource
                                //filter. Let's pass reference to
                                //just image bytes from LockRectangle

                                dr = _renderTarget.LockRectangle(LockFlags.None);
                                s = dr.Data;
                                Result r = _renderTarget.UnlockRectangle();
                                bytesptr = s.DataPointer.ToPointer();
                                hr = scene.PassMemoryPtr(bytesptr, true);

                                //continue building graph
                                ifg2.AddFilter(push, "MyPushSource");

                                icgb2.SetOutputFileName(MediaSubType.Avi, "C:\foo.avi", out mux, out sink);

                                icgb2.RenderStream(null, null, push, null, mux);

                                 media = (IMediaControl)ifg2;

                                 media.Run();

                                 NeedRunGraphInit = false;
                                 NeedRunGraphClean = true;

                                 StatusMess = "now capturing, press shift-F11 to stop";

                            } //end if

                        } // end using backbuffer
                    } //  end if Time to grab frame

                } //end lock
            } // end  try

            //It is usually thrown when the user makes game window inactive
            //or it is thrown deliberately when time is up, or the user pressed F11 and
            //it resulted in stopping a capture.
            //If it is thrown for another reason, it is still a good
            //idea to stop recording and free the graph
            catch (Exception ex) 
            {
               //..
               //stop the DirectShow graph and cleanup

            } // end catch

            //draw overlay
            using (SlimDX.Direct3D9.Font font = new SlimDX.Direct3D9.Font(device, new System.Drawing.Font("Times New Roman", 26.0f, FontStyle.Bold)))
            {
                font.DrawString(null, StatusMess, 20, 100, System.Drawing.Color.FromArgb(255, 255, 255, 255));
            }

            return device.EndScene().Code;

        } // end using device

    } //end EndSceneHook
4

1 回答 1

0

有时会发生这种情况,如果有人感兴趣,我终于自己找到了这个问题的答案。事实证明,某些 Direct3D9 应用程序中的后缓冲区不一定会在每次调用挂钩的 EndScene 时刷新。因此,有时带有来自上一个 EndScene 钩子调用的文本覆盖的后缓冲区被传递给负责收集输入帧的 DirectShow 源过滤器。我开始用已知 RGB 值的微小 3 像素叠加来标记每一帧,并在将帧传递给 DirectShow 过滤器之前检查这个虚拟叠加是否仍然存在。如果覆盖层存在,则传递先前缓存的帧而不是当前帧。这种方法有效地从 DirectShow 图表中记录的视频中删除了文本覆盖。

于 2012-05-10T05:25:39.690 回答