2

我使用 RPScreenRecorder.shared().startCapture 进行屏幕录制,并使用 AVAssetWriterInput 编码成 h264 视频文件,但它给了我直接的 .mp4,我希望在录制屏幕进行流式传输时逐帧播放 h264 视频文件。有什么方法可以访问来自 RPScreenRecorder.shared().startCapture 的样本缓冲区数据?这是代码。在这里我得到整个 mp4 文件,但我只想要视频帧

import Foundation
import ReplayKit
import AVKit


class ScreenRecorder
{
    var assetWriter:AVAssetWriter!
    var videoInput:AVAssetWriterInput!

    let viewOverlay = WindowUtil()

    let fileNameTxt = "Test"
    let dir = try? FileManager.default.url(for: .documentDirectory,
                                           in: .userDomainMask, appropriateFor: nil, create: true)
    var sampleFileBuffer : String = ""

    //MARK: Screen Recording
    func startRecording(withFileName fileName: String, recordingHandler:@escaping (Error?)-> Void)
    {
        if #available(iOS 11.0, *)
        {

            let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
            assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
                AVFileType.mp4)
            let videoOutputSettings: Dictionary<String, Any> = [
                AVVideoCodecKey : AVVideoCodecType.h264,
                AVVideoWidthKey : UIScreen.main.bounds.size.width,
                AVVideoHeightKey : UIScreen.main.bounds.size.height
            ];


            videoInput  = AVAssetWriterInput (mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
            videoInput.expectsMediaDataInRealTime = true
            assetWriter.add(videoInput)

            // If the directory was found, we write a file to it and read it back
             let fileURLTxt = dir?.appendingPathComponent(fileNameTxt).appendingPathExtension("txt") 


            RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
//print(sample, bufferType, error)

                recordingHandler(error)

                if CMSampleBufferDataIsReady(sample)
                {
                    if self.assetWriter.status == AVAssetWriterStatus.unknown
                    {
                        self.assetWriter.startWriting()
                        self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
                    }

                    if self.assetWriter.status == AVAssetWriterStatus.failed {
                        print("Error occured, status = \(self.assetWriter.status.rawValue), \(self.assetWriter.error!.localizedDescription) \(String(describing: self.assetWriter.error))")
                        return
                    }

                    if (bufferType == .video)
                    {

                        if self.videoInput.isReadyForMoreMediaData
                        {
                             self.videoInput.append(sample)
                           // self.sampleFileBuffer = self.videoInput as! String
                            self.sampleFileBuffer = String(sample as! String)         //sample as! String
                            do {

                                try self.sampleFileBuffer.write(to: fileURLTxt!, atomically: true, encoding: .utf8)
                            } catch {
                                print("Failed writing to URL: \(fileURLTxt), Error: " + error.localizedDescription)
                            }


                        }
                    }
                    self.sampleFileBuffer = ""
                }

            }) { (error) in
                recordingHandler(error)

            }
        } else
        {
            // Fallback on earlier versions
        }
    }

    func stopRecording(handler: @escaping (Error?) -> Void)
    {
        if #available(iOS 11.0, *)
        {
            RPScreenRecorder.shared().stopCapture
            {    (error) in
                    handler(error)
                    self.assetWriter.finishWriting
                {
                    print(ReplayFileUtil.fetchAllReplays())

                }
            }
        } 
    }


}
4

2 回答 2

0

在您的代码中,示例是 CMSampleBuffer。调用 CMSampleBufferGetImageBuffer() 并获取 CVImageBuffer。对于锁定帧缓冲区调用 CVPixelBufferLockBaseAddress(imageBuffer)。就我而言,imageBuffer 有 2 个平面,Y 和 UV。调用 CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) 获取 Y 平面地址。使用 planeIndex=1 调用相同的 API 并获取 UV 平面地址。

获得飞机的基地址后,您可以读取为 uint8*。调用 CVPixelBufferGetXXX API 以获取宽度、高度、每行字节数。不要忘记调用 CVPixelBufferUnlockBaseAddress。

于 2018-09-19T23:32:02.050 回答
0

原来有一种非常简单的方法可以做到这一点:

import CoreGraphics
import CoreMedia
import Foundation
import QuartzCore
import UIKit

private func createImage(from sampleBuffer: CMSampleBuffer) -> UIImage? {
    guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
        return nil
    }

    let ciImage = CIImage(cvPixelBuffer: imageBuffer)
    let context = CIContext()

    guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
        return nil
    }

    return UIImage(cgImage: cgImage)
}

在浪费了一个下午弄清楚如何以手动方式进行操作后发现了这一点

于 2020-06-07T00:07:26.617 回答