在开发视频编辑应用程序时,我(和不同机器上的一些用户)注意到使用 ProRes422 编解码器编码的奇数宽度的 QuickTime 文件有一些不寻常之处。在标准中打开时,AVPlayer它们的右边缘包含一个伪影(粉红色的小线)。CMSampleBuffer使用标准读取文件时存在相同的工件AVAssetReader。等宽视频或使用 ProRes4444 编码器时不会发生这种情况。这是它的样子。

我尝试在其他应用程序(Elmedia Player、Camera Bag Pro、VLC)中打开这两个视频,并且我测试的每个非 Apple 应用程序都使用工件呈现奇数宽度的视频。标准的 QuickTime Player 不显示工件,Safari 在将文件拖放到那里时也不显示。我尝试过的其他 Apple 应用程序(照片、预览、Apple TV)也没有粉红色的线条。
我真的很想知道为什么会发生这种情况,如果 ProRes422 需要一些特殊处理,如何才能避免工件:
- 在
CMSampleBuffer阅读视频时使用AVAssetReader; - 以及在标准
AVPlayer控制下播放视频时。
可以使用在 macOS Xcode Playground 中运行的以下代码生成上述屏幕截图中的偶数和奇数宽度视频:
// Produce two QuickTime MOV (ProRes422) files of odd and even width. Odd one will contain
// a pink line artifact on the right edge in some players. Even one won't. Why?
import AVFoundation
import CoreImage
import CoreMedia
let sizeEven = CGSize(width: 500, height: 250)
let sizeOdd = CGSize(width: 501, height: 250)
let urlEven = URL(fileURLWithPath: "/Users/ianbytchek/Downloads/blue-even.mov")
let urlOdd = URL(fileURLWithPath: "/Users/ianbytchek/Downloads/blue-odd.mov")
func createQuickTime(url: URL, size: CGSize) {
try? FileManager.default.removeItem(at: url)
let outputSettings = [AVVideoCodecKey: AVVideoCodecType.proRes422, AVVideoWidthKey: size.width, AVVideoHeightKey: size.height] as [String: Any]
let input = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
let writer = AVAssetWriter(url: url, fileType: .mov, inputs: [input])
precondition(writer.startWriting())
writer.startSession(atSourceTime: .zero)
for i in 0 ..< 10 {
while !input.isReadyForMoreMediaData { Thread.sleep(forTimeInterval: 1 / 100) }
let image = CIImage.blue.cropped(to: CGRect(origin: .zero, size: size))
let buffer = CMSampleBuffer.create(size: size, image: image, timestamp: Double(i) / 10)
precondition(input.append(buffer))
}
let semaphore = DispatchSemaphore(value: 0)
input.markAsFinished()
writer.finishWriting(completionHandler: { semaphore.signal() })
semaphore.wait()
}
createQuickTime(url: urlOdd, size: sizeOdd)
createQuickTime(url: urlEven, size: sizeEven)
extension AVAssetWriter {
convenience init(url: URL, fileType: AVFileType, inputs: [AVAssetWriterInput]) {
try! self.init(url: url, fileType: fileType)
inputs.forEach({ precondition(self.canAdd($0)); self.add($0) })
}
}
extension CMSampleBuffer {
static func create(imageBuffer: CVImageBuffer, sampleTiming: CMSampleTimingInfo) -> CMSampleBuffer {
let formatDescription = CMVideoFormatDescription.create(imageBuffer: imageBuffer)
var sampleTiming = sampleTiming
var sampleBuffer: CMSampleBuffer?
CMSampleBufferCreateReadyWithImageBuffer(allocator: nil, imageBuffer: imageBuffer, formatDescription: formatDescription, sampleTiming: &sampleTiming, sampleBufferOut: &sampleBuffer)
return sampleBuffer!
}
static func create(imageBuffer: CVImageBuffer, timestamp: Double) -> CMSampleBuffer {
let scale = CMTimeScale(NSEC_PER_SEC)
let presentationTimestamp = CMTime(value: CMTimeValue(timestamp * Double(scale)), timescale: scale)
let sampleTiming = CMSampleTimingInfo(duration: CMTime.invalid, presentationTimeStamp: presentationTimestamp, decodeTimeStamp: CMTime.invalid)
return self.create(imageBuffer: imageBuffer, sampleTiming: sampleTiming)
}
static func create(size: CGSize, image: CIImage, timestamp: Double) -> CMSampleBuffer {
let colorSpace = CGColorSpace(name: CGColorSpace.sRGB)!
let pixelBuffer = CVPixelBuffer.create(size: size, pixelFormat: kCMPixelFormat_32ARGB, attributes: [kCVImageBufferCGColorSpaceKey: colorSpace])
CIContext().render(image, to: pixelBuffer, bounds: image.extent, colorSpace: colorSpace)
return self.create(imageBuffer: pixelBuffer, timestamp: timestamp)
}
}
extension CMVideoFormatDescription {
static func create(imageBuffer: CVImageBuffer) -> CMVideoFormatDescription {
var formatDescription: CMVideoFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: imageBuffer, formatDescriptionOut: &formatDescription)
return formatDescription!
}
}
extension CVPixelBuffer {
static func create(size: CGSize, pixelFormat: OSType? = nil, attributes: [CFString: Any]? = nil) -> CVPixelBuffer {
var pixelBuffer: CVPixelBuffer?
CVPixelBufferCreate(nil, Int(size.width), Int(size.height), pixelFormat ?? kCVPixelFormatType_32ARGB, attributes as CFDictionary?, &pixelBuffer)
return pixelBuffer!
}
}