1

我正在尝试从视频和音频的组合中显示视频。但是,我似乎有一个问题,一旦视频状态永远不会达到AVPlayerStatusReadyToPlay.

如果我将视频资产或音频资产直接包含到播放器项目中,它将起作用。因此,我知道资产没有问题。

这是我的代码:

       - (void) loadPlayer {
            NSURL *videoURL = **;
            AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];

            NSURL *audioURL = **;
            AVURLAsset *audioAsset = [AVURLAsset URLAssetWithURL:audioURL options:nil];


            NSArray *keys = [NSArray arrayWithObject:@"duration"];
            [videoAsset loadValuesAsynchronouslyForKeys:keys completionHandler:^() {

                NSError *error = nil;
                AVKeyValueStatus durationStatus = [videoAsset statusOfValueForKey:@"duration" error:&error];

                switch (durationStatus) {
                    case AVKeyValueStatusLoaded:;
                        _videoDuration = videoAsset.duration;
                        if (_audioDuration.flags == kCMTimeFlags_Valid) {
                            [self loadPlayWithVideoAsset:videoAsset withDuration:_videoDuration andAudioAsset:audioAsset withDuration:_audioDuration];
                        }
                        break;
                }
            }];

            [audioAsset loadValuesAsynchronouslyForKeys:keys completionHandler:^() {

                NSError *error = nil;
                AVKeyValueStatus durationStatus = [audioAsset statusOfValueForKey:@"duration" error:&error];

                switch (durationStatus) {
                    case AVKeyValueStatusLoaded:;
                        _audioDuration = audioAsset.duration;
                        if (_videoDuration.flags == kCMTimeFlags_Valid) {
                            [self loadPlayWithVideoAsset:videoAsset withDuration:_videoDuration andAudioAsset:audioAsset withDuration:_audioDuration];
                        }
                        break;
                }
            }];
        }

        - (void) loadPlayWithVideoAsset:(AVURLAsset *)videoAsset withDuration:(CMTime)videoDuration andAudioAsset:(AVURLAsset *)audioAsset withDuration:(CMTime)audioDuration {


        AVMutableComposition *composition = [AVMutableComposition composition];

        //Video
        AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
        NSError *videoError = nil;
        if (![compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoDuration)
                                            ofTrack:videoTrack
                                             atTime:kCMTimeZero
                                              error:&videoError])  {
            NSLog(@"videoError: %@",videoError);
        }



        //Audio
        AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        AVAssetTrack *audioTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
        NSError *audioError = nil;
        if (![compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,audioDuration)
                                            ofTrack:audioTrack
                                             atTime:kCMTimeZero
                                              error:&audioError]) {
            NSLog(@"audioError: %@",audioError);
        }


        NSInteger compare = CMTimeCompare(videoDuration, audioDuration);

        if (compare == 1) {
            //The video is larger
            CMTime timeDiff = CMTimeSubtract(videoDuration, audioDuration);
            [compositionAudioTrack insertEmptyTimeRange:CMTimeRangeMake(audioDuration, timeDiff)];
        }
        else {
            CMTime timeDiff = CMTimeSubtract(audioDuration, videoDuration);
            [compositionVideoTrack insertEmptyTimeRange:CMTimeRangeMake(videoDuration, timeDiff)];
        }
        AVPlayerItem * playerItem = [AVPlayerItem playerItemWithAsset:composition];
        self.mPlayer = [AVPlayer playerWithPlayerItem:playerItem];
        self.mPlaybackView = [[AVPlayerPlaybackView alloc] initWithFrame:CGRectZero];
        [self.view addSubview:self.mPlaybackView];
        [self.mPlayer addObserver:self forKeyPath:@"status" options:0 context:AVPlayerPlaybackViewControllerStatusObservationContext];
}
- (void)observeValueForKeyPath:(NSString*) path ofObject:(id)object change:(NSDictionary*)change context:(void*)context
{
    if (self.mPlayer.status == AVPlayerStatusReadyToPlay) {
        [self.mPlaybackView setPlayer:self.mPlayer];
        isReadyToPlay = YES;
        _playVideoBtn.hidden = NO;
    }
}
- (void) playVideo {
    if (YES || isReadyToPlay) {
        [self.mPlayer play];
    }
}
4

1 回答 1

0

根据我的经验,仅当资源/视频与应用程序捆绑在一起时,AVPlayer 才能与 AVMutableComposition 一起使用。如果视频资源在网络上,则 AVPlayer 不会使用 AVMUtableComposition 播放,尽管 AVPlayerItem 和 AVPlayer 将状态报告为“准备播放”。

于 2014-12-02T16:12:32.730 回答