我CMSampleBuffer
收到了来自我的 iPhone 相机的信息。它是 1920x1080 像素,但我希望它是 1280x720 像素。我有以下代码来执行此操作:
private var yScaleBuffer: vImage_Buffer = {
var scaleBuffer: vImage_Buffer = vImage_Buffer()
let newHeight = 720
let newWidth = 1280
scaleBuffer.data = UnsafeMutableRawPointer.allocate(byteCount: Int(newWidth * newHeight * 8), alignment: MemoryLayout<UInt>.size)
scaleBuffer.width = vImagePixelCount(newWidth)
scaleBuffer.height = vImagePixelCount(newHeight)
scaleBuffer.rowBytes = Int(newWidth * 8)
return scaleBuffer
}()
private var uvScaleBuffer: vImage_Buffer = {
var scaleBuffer: vImage_Buffer = vImage_Buffer()
let newHeight = 720
let newWidth = 1280
scaleBuffer.data = UnsafeMutableRawPointer.allocate(byteCount: Int(newWidth * newHeight * 8), alignment: MemoryLayout<UInt>.size)
scaleBuffer.width = vImagePixelCount(newWidth)
scaleBuffer.height = vImagePixelCount(newHeight)
scaleBuffer.rowBytes = Int(newWidth * 8)
return scaleBuffer
}()
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
{
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
print("Error acquiring sample buffer")
return
}
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
// create vImage_Buffer out of CVImageBuffer
var yInBuff: vImage_Buffer = vImage_Buffer()
yInBuff.width = UInt(CVPixelBufferGetWidth(imageBuffer))
yInBuff.height = UInt(CVPixelBufferGetHeight(imageBuffer))
yInBuff.rowBytes = CVPixelBufferGetBytesPerRow(imageBuffer)
yInBuff.data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
// perform scale of Y plane
let yError = vImageScale_Planar8(&yInBuff, &yScaleBuffer, nil, 0)
if yError != kvImageNoError {
print("Can't scale a buffer")
return
}
// create vImage_Buffer out of CVImageBuffer
var uvInBuff: vImage_Buffer = vImage_Buffer()
uvInBuff.width = UInt(CVPixelBufferGetWidth(imageBuffer))
uvInBuff.height = UInt(CVPixelBufferGetHeight(imageBuffer))
uvInBuff.rowBytes = CVPixelBufferGetBytesPerRow(imageBuffer)
uvInBuff.data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1)
// perform scale of UV plane
let uvError = vImageScale_CbCr8(&yInBuff, &uvScaleBuffer, nil, 0)
if uvError != kvImageNoError {
print("Can't scale a buffer")
return
}
CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
var newBuffer: CVPixelBuffer?
let attributes : [NSObject:AnyObject] = [
kCVPixelBufferCGImageCompatibilityKey : true as AnyObject,
kCVPixelBufferCGBitmapContextCompatibilityKey : true as AnyObject
]
var planeBaseAddresses = [UnsafeMutableRawPointer?]()
planeBaseAddresses.append(&yScaleBuffer.data)
planeBaseAddresses.append(&uvScaleBuffer.data)
var planeWidths = [Int]()
planeWidths.append(Int(yScaleBuffer.width))
planeWidths.append(Int(uvScaleBuffer.width))
var planeHeights = [Int]()
planeHeights.append(Int(yScaleBuffer.height))
planeHeights.append(Int(uvScaleBuffer.height))
var planeBytesPerRows = [Int]()
planeWidths.append(yScaleBuffer.rowBytes)
planeWidths.append(uvScaleBuffer.rowBytes)
let status = CVPixelBufferCreateWithPlanarBytes(
kCFAllocatorDefault,
1280,
720,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
nil,
0,
2,
&planeBaseAddresses,
&planeWidths,
&planeHeights,
&planeBytesPerRows,
nil,
nil,
attributes as CFDictionary?,
&newBuffer
)
guard status == kCVReturnSuccess,
let b = newBuffer else {
print("Can't create new CVPixelBuffer")
return
}
// ... Then output the buffer to the screen (this uses a framework which accepts a CVPixelBuffer which I know works)
}
这不会产生错误,但我的输出只是绿色。这里出了什么问题?