这是我编写的一个应用程序,它可以将实时视频从一个 iOS 设备流式传输到另一个设备:
https://app.box.com/s/94dcm9qjk8giuar08305qspdbe0pc784
使用 Xcode 9 构建;在 iOS 11 上运行。
触摸两台设备之一上的相机图标以开始将视频流式传输到另一台设备。
顺便说一句,听起来你在 iOS 设备上播放视频的背景不是很扎实。MPMoviePlayerController 或 MP-anything 真的没有使用意义。当你明白了这一点,并开始将精力投入到 AVFoundation 中时,这将对你非常有帮助:
这是 NSStream 子类 NSInputStream 的事件处理程序的相关部分:
case NSStreamEventHasBytesAvailable: {
NSLog(@"NSStreamEventHasBytesAvailable");
uint8_t * mbuf[DATA_LENGTH];
mlen = [(NSInputStream *)stream read:(uint8_t *)mbuf maxLength:DATA_LENGTH];
NSLog(@"mlen == %lu", mlen);
[mdata appendBytes:(const void *)mbuf length:mlen];
NSLog(@"mdata length == %lu", mdata.length);
if (mlen < DATA_LENGTH) {
NSLog(@"displayImage");
UIImage *image = [UIImage imageWithData:mdata];
[self.peerConnectionViewController.view.subviews[0].layer setContents:(__bridge id)image.CGImage];
mdata = nil;
mlen = DATA_LENGTH;
mdata = [[NSMutableData alloc] init];
}
} break;
而且,这是您的视频输出的事件处理程序,无论是来自相机还是来自视频文件:
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
CGImageRelease(newImage);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
NSData *data = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.25)];
__block BOOL baseCaseCondition = NO; // obviously this should be data driven, not hardcoded
__block NSInteger _len = DATA_LENGTH;
__block NSInteger _byteIndex = 0;
typedef void (^RecursiveBlock)(void (^)());
RecursiveBlock aRecursiveBlock;
aRecursiveBlock = ^(RecursiveBlock block) {
NSLog(@"Block called...");
baseCaseCondition = (data.length > 0 && _byteIndex < data.length) ? TRUE : FALSE;
if ((baseCaseCondition) && block)
{
_len = (data.length - _byteIndex) == 0 ? 1 : (data.length - _byteIndex) < DATA_LENGTH ? (data.length - _byteIndex) : DATA_LENGTH;
//
NSLog(@"START | byteIndex: %lu/%lu writing len: %lu", _byteIndex, data.length, _len);
//
uint8_t * bytes[_len];
[data getBytes:&bytes range:NSMakeRange(_byteIndex, _len)];
_byteIndex += [self.outputStream write:(const uint8_t *)bytes maxLength:_len];
//
NSLog(@"END | byteIndex: %lu/%lu wrote len: %lu", _byteIndex, data.length, _len);
//
dispatch_barrier_async(dispatch_get_main_queue(), ^{
block(block);
});
}
};
if (self.outputStream.hasSpaceAvailable)
aRecursiveBlock(aRecursiveBlock);
}