3

我正在使用 ELCImagePicker 从库中选择多个视频,并在尝试导出从库中选择的多个录制视频时出现“与 assetsd 的连接被中断或 assetsd 死亡”错误。但是,如果我使用 ELCImagePicker 选择所有下载的视频,或者我使用 UIImagePicker 从库中选择这些录制的视频,它工作正常。这类问题有什么解决办法吗?

我的代码:

-(void)elcImagePickerController:(ELCImagePickerController *)picker didFinishPickingMediaWithInfo:(NSArray *)info
{
    [self dismissViewControllerAnimated:YES completion:nil];


    for (NSDictionary *dict in info) {
        if ([dict objectForKey:UIImagePickerControllerMediaType] == ALAssetTypeVideo){
            if ([dict objectForKey:UIImagePickerControllerOriginalImage]){

                videoUrl=[dict objectForKey:UIImagePickerControllerReferenceURL];


                [self InsertVideoAsset];

            }
        }
    }
    [self GetMargedVideo];
}

有时我的合并作品只播放音频,而不是视频,但有时音频和视频都可以正常工作。下面的代码有问题吗?请帮我...

-(void)GetMargedVideo{

    LastTime=kCMTimeZero;
    TotalTime=kCMTimeZero;

    mixComposition=nil; // AVMutableComposition
    mainCompositionInst=nil; // AVMutableVideoComposition


    mixComposition=[AVMutableComposition composition];

    mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    renderWidth=1280;
    renderHeight=1280;

    [Objects removeAllObjects];


    //LayerInstruction used to get video layer Instructions

    AVMutableVideoCompositionLayerInstruction *firstlayerInstruction;

    self.stokeimage.hidden=YES;
    for(int i=0; i<[VideoInfo count];i++)
    {
        self.stokeimage.hidden=NO;
        TargetVideo=i;


        VideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                 preferredTrackID:kCMPersistentTrackID_Invalid];

        AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                 preferredTrackID:kCMPersistentTrackID_Invalid];



        VideoProperty *vp =[VideoInfo objectAtIndex:i];


        STime=vp.startTime;
        ETime=vp.endTime;
        TimeDiff=CMTimeSubtract(ETime, STime);


        LastTime=TotalTime;

        TotalTime=CMTimeAdd(TotalTime, TimeDiff);
        vp.appearTime=LastTime;



        TargetTime=LastTime;


        avasset=[AVAsset assetWithURL:vp.Url];


        //Insert  Video and Audio to the Composition "mixComposition"

        [VideoTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
                            ofTrack:[[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:LastTime error:nil];

        if([[avasset tracksWithMediaType:AVMediaTypeAudio] count])
        {

            if(!GetMusic)
            {

            [AudioTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
                                            ofTrack:[[avasset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:LastTime error:nil];
            }

        }


        // Add instructions



        if(vp.GetInstuction)
        {
            // GET INSTRUCTION: if Video already have instructions

            firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];

            [firstlayerInstruction setTransform:vp.LayerInstruction atTime:LastTime];
            [firstlayerInstruction setOpacity:0 atTime:TotalTime];
            [Objects addObject:firstlayerInstruction];

        }
      else
      {

          // GET INSTRUCTION: When a Video add first time to the composition


           AVAssetTrack *assetTrack = [[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];


            AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];

            CGAffineTransform videoTransform = assetTrack.preferredTransform;
            CGSize naturalSize = assetTrack.naturalSize;

            BOOL bLandscape = NO;
            CGSize renderSize = CGSizeMake(self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale], self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale]);
            renderSize =CGSizeMake(renderWidth, renderHeight);
            if(self.videoplayer.frame.size.width > self.videoplayer.frame.size.height && bIsVideoPortrait)
            {
                bLandscape = YES;
                renderSize = CGSizeMake(renderSize.height, renderSize.width);
                naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
            }
            else if(self.videoplayer.frame.size.height > self.videoplayer.frame.size.width && !bIsVideoPortrait)
            {
                bLandscape = YES;
                renderSize = CGSizeMake(renderSize.height, renderSize.width);
                naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
            }


          //Orientation Check
          CGAffineTransform firstTransform = assetTrack.preferredTransform;
          BOOL PotraitVideo=NO;
          if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
              PotraitVideo=YES;
//              NSLog(@"Potratit Video");
          }
          if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
              PotraitVideo=YES;
//              NSLog(@"Potratit Video");
          }

        //Orientation Check Finish


            if(bIsVideoPortrait)
                naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);



            scaleValue = 1;

            translationPoint = CGPointMake(self.videoplayer.frame.origin.x, self.videoplayer.frame.origin.y);



            CGFloat pointX = translationPoint.x * naturalSize.width / self.videoplayer.frame.size.width;
            CGFloat pointY = translationPoint.y * naturalSize.height / self.videoplayer.frame.size.height;
                        pointY=0;
                        pointX=0;


            CGAffineTransform new = CGAffineTransformConcat(videoTransform, CGAffineTransformMakeScale(scaleValue, scaleValue));
            CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(pointX, pointY));

            CGFloat rotateTranslateX = 0;
            CGFloat rotateTranslateY = 0;

            if(rotationValue - 0.0f > 0.01f && rotationValue - 180.f < 0.01)
                rotateTranslateX = MIN((naturalSize.width * rotationValue) / 90.0f, naturalSize.width);
            if(rotationValue - 90.0f > 0.01f && rotationValue < 360.0f)
                rotateTranslateY = MIN((naturalSize.height * rotationValue) / 180.0f, naturalSize.height);

            CGAffineTransform rotationT = CGAffineTransformConcat(newer, CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(rotationValue)));
            CGAffineTransform rotateTranslate = CGAffineTransformConcat(rotationT, CGAffineTransformMakeTranslation(rotateTranslateX, rotateTranslateY));


            CGSize temp = CGSizeApplyAffineTransform(assetTrack.naturalSize, videoTransform);
            CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
            if(bLandscape)
            {
                size = CGSizeMake(size.height, size.width);
            }
            float s1 = renderSize.width/size.width;
            float s2 = renderSize.height/size.height;
            float s = MIN(s1, s2);
            CGAffineTransform new2 = CGAffineTransformConcat(rotateTranslate, CGAffineTransformMakeScale(s,s));
            float x = (renderSize.width - size.width*s)/2;
            float y = (renderSize.height - size.height*s)/2;
            newer2 = CGAffineTransformIdentity;
            if(bLandscape)
                newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));
            else
                newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));

            //Store layerInstruction to an array "Objects"

            [layerInstruction setTransform:newer2 atTime:LastTime];

            [layerInstruction setOpacity:0.0 atTime: TotalTime];

            [Objects addObject:layerInstruction];


            vp.GetInstuction=YES;
            vp.LayerInstruction=newer2;
            vp.Portrait=PotraitVideo;


            [VideoInfo replaceObjectAtIndex:i withObject:vp];

        }



    }


    if(GetMusic)
    {


        OriginalAsset=mixComposition;
        AudioTrack=nil;
        AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                 preferredTrackID:kCMPersistentTrackID_Invalid];
        [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, TotalTime)
                            ofTrack:[[MusicAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    }


   //Apply all the instruction to the the Videocomposition "mainCompositionInst"

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, TotalTime);
    mainInstruction.layerInstructions = Objects;
    mainCompositionInst = [AVMutableVideoComposition videoComposition];
    mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
    mainCompositionInst.frameDuration = CMTimeMake(1, 30);
    mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);

    [self PlayVideo];    

}
4

0 回答 0