5

我有一个非常基本的视频录制项目,可以在 Swift 中完美运行,但是移植到 Xamarin 中的空白项目中的相同代码正在生成每隔几秒钟不断跳帧的视频。代码开始ViewDidLoad并通过 a 停止UIButton这是下面的记录代码:

RPScreenRecorder rp = RPScreenRecorder.SharedRecorder;
AVAssetWriter assetWriter;
AVAssetWriterInput videoInput;

public override void ViewDidLoad()
{
    base.ViewDidLoad();
    StartScreenRecording();
}

public void StartScreenRecording()
{
    VideoSettings videoSettings = new VideoSettings();
    NSError wError;
    assetWriter = new AVAssetWriter(videoSettings.OutputUrl, AVFileType.AppleM4A, out wError);
    videoInput = new AVAssetWriterInput(AVMediaType.Video, videoSettings.OutputSettings);

    videoInput.ExpectsMediaDataInRealTime = true;

    assetWriter.AddInput(videoInput);

    if (rp.Available)
    {
        rp.StartCaptureAsync((buffer, sampleType, error) =>
        {
            if (buffer.DataIsReady)
            {

                if (assetWriter.Status == AVAssetWriterStatus.Unknown)
                {

                    assetWriter.StartWriting();

                    assetWriter.StartSessionAtSourceTime(buffer.PresentationTimeStamp);

                }

                if (assetWriter.Status == AVAssetWriterStatus.Failed)
                {
                    return;
                }

                if (sampleType == RPSampleBufferType.Video)
                {
                    if (videoInput.ReadyForMoreMediaData)
                    {
                        videoInput.AppendSampleBuffer(buffer);
                    }
                }

            }

        });
    }

}

public void StopRecording()
{
    rp.StopCapture((error) => {
        if (error == null)
        {
            assetWriter.FinishWriting(() => { });
        }
    });
}

这是 VideoSettings 文件的样子:

public class VideoSettings
{
    public string VideoFilename => "render";
    public string VideoFilenameExt = "mp4";
    public nfloat Width { get; set; }
    public nfloat Height { get; set; }
    public AVVideoCodec AvCodecKey => AVVideoCodec.H264;

    public NSUrl OutputUrl
    {
        get
        {
            return GetFilename(VideoFilename,VideoFilenameExt);
        }
    }

    private NSUrl GetFilename(string filename, string extension)
    {
        NSError error;
        var docs = new NSFileManager().GetUrl(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomain.User, null, true, out error).ToString() + filename + 1 + "." + extension;
        if (error == null)
        {
            return new NSUrl(docs);
        }
        return null;
    }


    public AVVideoSettingsCompressed OutputSettings
    {
        get
        {
            return new AVVideoSettingsCompressed
            {
                Codec = AvCodecKey,
                Width = Convert.ToInt32(UIScreen.MainScreen.Bounds.Size.Width),
                Height = Convert.ToInt32(UIScreen.MainScreen.Bounds.Size.Height)
            };
        }
    }
}
4

1 回答 1

1

TL;DR: You need a try {} finally {} block in order to help the GC to timely release your sample buffers:

try {
    // Do stuff with the buffer.
} finally {
    buffer.Dispose ();
}

Long story:

This is because the GC is not fast enough to realize that the CMSampleBuffer needs to be released and you run out of sample buffers, and it is the reason why you kind of see lag because until a new buffer was available it took the snapshot of that actual frame.

Also don't worry about calling Dispose () Xamarin does the right thing so there is no need for additional checks

public void Dispose ()
{
    this.Dispose (true);
    GC.SuppressFinalize (this);
}

protected virtual void Dispose (bool disposing)
{
    if (this.invalidate.IsAllocated) {
        this.invalidate.Free ();
    }
    if (this.handle != IntPtr.Zero) {
        CFObject.CFRelease (this.handle);
        this.handle = IntPtr.Zero;
    }
}

So your code should look something like this:

if (rp.Available)
{
    // TODO: Also note that we are not using the Async version here
    rp.StartCapture((buffer, sampleType, error) =>
    {
        try {
            if (buffer.DataIsReady) {

                if (assetWriter.Status == AVAssetWriterStatus.Unknown) {

                    assetWriter.StartWriting ();

                    assetWriter.StartSessionAtSourceTime (buffer.PresentationTimeStamp);

                }

                if (assetWriter.Status == AVAssetWriterStatus.Failed) {
                    return;
                }

                if (sampleType == RPSampleBufferType.Video) {
                    if (videoInput.ReadyForMoreMediaData) {
                        videoInput.AppendSampleBuffer (buffer);
                    }
                }

            }
        } finally {
            buffer.Dispose ();
        }


    }, null);
}
于 2018-03-27T23:37:19.517 回答