3

ライブラリから複数のビデオを選択するために ELCImagePicker を使用していますが、ライブラリから選択された複数の録画ビデオをエクスポートしようとすると、「assetsd への接続が中断されたか、assetsd が停止しました」というエラーが表示されます。しかし、ELCImagePicker を使用してダウンロードしたすべてのビデオを選択するか、UIImagePicker を使用してこれらの録画済みビデオをライブラリから選択すると、問題なく動作します。この種の問題の解決策はありますか?

私のコード:

-(void)elcImagePickerController:(ELCImagePickerController *)picker didFinishPickingMediaWithInfo:(NSArray *)info
{
    [self dismissViewControllerAnimated:YES completion:nil];


    for (NSDictionary *dict in info) {
        if ([dict objectForKey:UIImagePickerControllerMediaType] == ALAssetTypeVideo){
            if ([dict objectForKey:UIImagePickerControllerOriginalImage]){

                videoUrl=[dict objectForKey:UIImagePickerControllerReferenceURL];


                [self InsertVideoAsset];

            }
        }
    }
    [self GetMargedVideo];
}

マージされたコンポジションがビデオではなくオーディオのみを再生する場合もありますが、オーディオとビデオの両方が正常に機能する場合もあります。以下のコードに問題はありますか?私を助けてください...

-(void)GetMargedVideo{

    LastTime=kCMTimeZero;
    TotalTime=kCMTimeZero;

    mixComposition=nil; // AVMutableComposition
    mainCompositionInst=nil; // AVMutableVideoComposition


    mixComposition=[AVMutableComposition composition];

    mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    renderWidth=1280;
    renderHeight=1280;

    [Objects removeAllObjects];


    //LayerInstruction used to get video layer Instructions

    AVMutableVideoCompositionLayerInstruction *firstlayerInstruction;

    self.stokeimage.hidden=YES;
    for(int i=0; i<[VideoInfo count];i++)
    {
        self.stokeimage.hidden=NO;
        TargetVideo=i;


        VideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                 preferredTrackID:kCMPersistentTrackID_Invalid];

        AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                 preferredTrackID:kCMPersistentTrackID_Invalid];



        VideoProperty *vp =[VideoInfo objectAtIndex:i];


        STime=vp.startTime;
        ETime=vp.endTime;
        TimeDiff=CMTimeSubtract(ETime, STime);


        LastTime=TotalTime;

        TotalTime=CMTimeAdd(TotalTime, TimeDiff);
        vp.appearTime=LastTime;



        TargetTime=LastTime;


        avasset=[AVAsset assetWithURL:vp.Url];


        //Insert  Video and Audio to the Composition "mixComposition"

        [VideoTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
                            ofTrack:[[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:LastTime error:nil];

        if([[avasset tracksWithMediaType:AVMediaTypeAudio] count])
        {

            if(!GetMusic)
            {

            [AudioTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
                                            ofTrack:[[avasset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:LastTime error:nil];
            }

        }


        // Add instructions



        if(vp.GetInstuction)
        {
            // GET INSTRUCTION: if Video already have instructions

            firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];

            [firstlayerInstruction setTransform:vp.LayerInstruction atTime:LastTime];
            [firstlayerInstruction setOpacity:0 atTime:TotalTime];
            [Objects addObject:firstlayerInstruction];

        }
      else
      {

          // GET INSTRUCTION: When a Video add first time to the composition


           AVAssetTrack *assetTrack = [[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];


            AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];

            CGAffineTransform videoTransform = assetTrack.preferredTransform;
            CGSize naturalSize = assetTrack.naturalSize;

            BOOL bLandscape = NO;
            CGSize renderSize = CGSizeMake(self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale], self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale]);
            renderSize =CGSizeMake(renderWidth, renderHeight);
            if(self.videoplayer.frame.size.width > self.videoplayer.frame.size.height && bIsVideoPortrait)
            {
                bLandscape = YES;
                renderSize = CGSizeMake(renderSize.height, renderSize.width);
                naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
            }
            else if(self.videoplayer.frame.size.height > self.videoplayer.frame.size.width && !bIsVideoPortrait)
            {
                bLandscape = YES;
                renderSize = CGSizeMake(renderSize.height, renderSize.width);
                naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
            }


          //Orientation Check
          CGAffineTransform firstTransform = assetTrack.preferredTransform;
          BOOL PotraitVideo=NO;
          if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
              PotraitVideo=YES;
//              NSLog(@"Potratit Video");
          }
          if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
              PotraitVideo=YES;
//              NSLog(@"Potratit Video");
          }

        //Orientation Check Finish


            if(bIsVideoPortrait)
                naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);



            scaleValue = 1;

            translationPoint = CGPointMake(self.videoplayer.frame.origin.x, self.videoplayer.frame.origin.y);



            CGFloat pointX = translationPoint.x * naturalSize.width / self.videoplayer.frame.size.width;
            CGFloat pointY = translationPoint.y * naturalSize.height / self.videoplayer.frame.size.height;
                        pointY=0;
                        pointX=0;


            CGAffineTransform new = CGAffineTransformConcat(videoTransform, CGAffineTransformMakeScale(scaleValue, scaleValue));
            CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(pointX, pointY));

            CGFloat rotateTranslateX = 0;
            CGFloat rotateTranslateY = 0;

            if(rotationValue - 0.0f > 0.01f && rotationValue - 180.f < 0.01)
                rotateTranslateX = MIN((naturalSize.width * rotationValue) / 90.0f, naturalSize.width);
            if(rotationValue - 90.0f > 0.01f && rotationValue < 360.0f)
                rotateTranslateY = MIN((naturalSize.height * rotationValue) / 180.0f, naturalSize.height);

            CGAffineTransform rotationT = CGAffineTransformConcat(newer, CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(rotationValue)));
            CGAffineTransform rotateTranslate = CGAffineTransformConcat(rotationT, CGAffineTransformMakeTranslation(rotateTranslateX, rotateTranslateY));


            CGSize temp = CGSizeApplyAffineTransform(assetTrack.naturalSize, videoTransform);
            CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
            if(bLandscape)
            {
                size = CGSizeMake(size.height, size.width);
            }
            float s1 = renderSize.width/size.width;
            float s2 = renderSize.height/size.height;
            float s = MIN(s1, s2);
            CGAffineTransform new2 = CGAffineTransformConcat(rotateTranslate, CGAffineTransformMakeScale(s,s));
            float x = (renderSize.width - size.width*s)/2;
            float y = (renderSize.height - size.height*s)/2;
            newer2 = CGAffineTransformIdentity;
            if(bLandscape)
                newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));
            else
                newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));

            //Store layerInstruction to an array "Objects"

            [layerInstruction setTransform:newer2 atTime:LastTime];

            [layerInstruction setOpacity:0.0 atTime: TotalTime];

            [Objects addObject:layerInstruction];


            vp.GetInstuction=YES;
            vp.LayerInstruction=newer2;
            vp.Portrait=PotraitVideo;


            [VideoInfo replaceObjectAtIndex:i withObject:vp];

        }



    }


    if(GetMusic)
    {


        OriginalAsset=mixComposition;
        AudioTrack=nil;
        AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                 preferredTrackID:kCMPersistentTrackID_Invalid];
        [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, TotalTime)
                            ofTrack:[[MusicAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    }


   //Apply all the instruction to the the Videocomposition "mainCompositionInst"

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, TotalTime);
    mainInstruction.layerInstructions = Objects;
    mainCompositionInst = [AVMutableVideoComposition videoComposition];
    mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
    mainCompositionInst.frameDuration = CMTimeMake(1, 30);
    mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);

    [self PlayVideo];    

}
4

0 に答える 0