0

CMSampleBuffer从 iPhone 的前置摄像头收到一个。目前它的尺寸是 1920x1080,我想把它缩小到 1280x720。我想使用 vImageScale 函数,但无法正常工作。相机的像素格式是kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,所以我尝试了以下方法,但它输出了一个不正确的奇怪绿色图像:

private var scaleBuffer: vImage_Buffer = {
  var scaleBuffer: vImage_Buffer = vImage_Buffer()
  let newHeight = 720
  let newWidth = 1280
  scaleBuffer.data = UnsafeMutableRawPointer.allocate(byteCount: Int(newWidth * newHeight * 4), alignment: MemoryLayout<UInt>.size)
  scaleBuffer.width = vImagePixelCount(newWidth)
  scaleBuffer.height = vImagePixelCount(newHeight)
  scaleBuffer.rowBytes = Int(newWidth * 4)
  return scaleBuffer
}()

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
    {

  guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
    return
  }

  CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))

  // create vImage_Buffer out of CVImageBuffer
  var inBuff: vImage_Buffer = vImage_Buffer()
  inBuff.width = UInt(CVPixelBufferGetWidth(imageBuffer))
  inBuff.height = UInt(CVPixelBufferGetHeight(imageBuffer))
  inBuff.rowBytes = CVPixelBufferGetBytesPerRow(imageBuffer)
  inBuff.data = CVPixelBufferGetBaseAddress(imageBuffer)

  // perform scale
  var err = vImageScale_CbCr8(&inBuff, &scaleBuffer, nil, 0)
  if err != kvImageNoError {
      print("Can't scale a buffer")
      return
  }
  CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))

  var newBuffer: CVPixelBuffer?
  let attributes : [NSObject:AnyObject] = [
    kCVPixelBufferCGImageCompatibilityKey : true as AnyObject,
    kCVPixelBufferCGBitmapContextCompatibilityKey : true as AnyObject
  ]

  let status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                                  Int(scaleBuffer.width), Int(scaleBuffer.height),
                                                  kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, scaleBuffer.data,
                                                  Int(scaleBuffer.width) * 4,
                                                  nil, nil,
                                                  attributes as CFDictionary?, &newBuffer)

  guard status == kCVReturnSuccess,
        let b = newBuffer else {
    return
  }

  // Do something with the buffer to output it
}

这里出了什么问题?在这里查看这个答案,看来我需要分别缩放“Y”和“UV”平面。我怎样才能快速做到这一点,然后将它们组合回一个 CVPixelBuffer?

4

1 回答 1

0

imageBuffer返回的CMSampleBufferGetImageBuffer实际包含两个离散平面 - 亮度平面和色度平面(请注意,对于 420,色度平面是亮度平面大小的一半)。此示例代码项目中对此进行了讨论。

这让你几乎到了那里。我没有使用 Core Video 的经验CVPixelBufferCreateWithBytes,但此代码将为您创建缩放YpCbCr缓冲区,并将它们转换为交错的 ARGB 缓冲区:

let lumaBaseAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0)
let lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0)
let lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0)
let lumaRowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0)

var sourceLumaBuffer = vImage_Buffer(data: lumaBaseAddress,
                                     height: vImagePixelCount(lumaHeight),
                                     width: vImagePixelCount(lumaWidth),
                                     rowBytes: lumaRowBytes)

let chromaBaseAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1)
let chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1)
let chromaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1)
let chromaRowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1)

var sourceChromaBuffer = vImage_Buffer(data: chromaBaseAddress,
                                       height: vImagePixelCount(chromaHeight),
                                       width: vImagePixelCount(chromaWidth),
                                       rowBytes: chromaRowBytes)

var destLumaBuffer = try! vImage_Buffer(size: CGSize(width: Int(sourceLumaBuffer.width / 4),
                                                     height: Int(sourceLumaBuffer.height / 4)),
                                        bitsPerPixel: 8)

var destChromaBuffer = try! vImage_Buffer(size: CGSize(width: Int(sourceChromaBuffer.width / 4),
                                                       height: Int(sourceChromaBuffer.height / 4)),
                                          bitsPerPixel: 8 * 2)

vImageScale_CbCr8(&sourceChromaBuffer, &destChromaBuffer, nil, 0)
vImageScale_Planar8(&sourceLumaBuffer, &destLumaBuffer, nil, 0)

var argbBuffer = try! vImage_Buffer(size: destLumaBuffer.size,
                                    bitsPerPixel: 8 * 4)

vImageConvert_420Yp8_CbCr8ToARGB8888(&destLumaBuffer,
                                     &destChromaBuffer,
                                     &argbBuffer,
                                     &infoYpCbCrToARGB,
                                     nil,
                                     255,
                                     vImage_Flags(kvImagePrintDiagnosticsToConsole))

destLumaBuffer.free()
destChromaBuffer.free()
argbBuffer.free()
于 2021-07-08T08:54:40.213 回答