2015-11-16 2 views
1

Я получаю AVAssetExportSessionStatusFailed при попытке Concat видеофрагменты с этим сообщением:Concat видеофрагменты

Export Failed with error messsage: Error 
Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" 
UserInfo=0x170675cc0 {NSLocalizedDescription=Operation Stopped, 
NSLocalizedFailureReason=The video could not be composed.}, Operation 
Stopped 

Вот мой код:

self.finalComposition = [AVMutableComposition composition]; 
self.finalCompositionTrack = [_finalComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
self.finalCompositionAudioTrack = [_finalComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

CMTime currentTime = kCMTimeZero; 

AVURLAsset *asset = nil; 
NSMutableArray *audioTracks = [[NSMutableArray alloc] init]; 
NSMutableArray *videos = [[NSMutableArray alloc] init]; 

for (int videoCounter = 0; videoCounter < _videoArray.count ; videoCounter++) 
{ 
    id object = [_videoArray objectAtIndex:videoCounter]; 

    if ([object isKindOfClass:[MVideoRecord class]]) 
    { 
     MVideoRecord *video = object; 
     NSURL *url = [NSURL fileURLWithPath:video.pathToVideo]; 

     NSFileManager *fileManager = [NSFileManager defaultManager]; 
     if (![fileManager fileExistsAtPath:video.pathToVideo]) 
     { 
      [self showError:@"Invalid video"]; 
     } 

     NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]; 

     asset = [AVURLAsset URLAssetWithURL:url options:options]; 

     NSError *error = nil; 
     if (!self.videoCompostion) 
     { 
      self.videoCompostion = [AVMutableVideoComposition videoComposition]; 
      self.videoCompostion.frameDuration = CMTimeMake(1, 30); 
      self.videoCompostion.renderSize = CGSizeMake(640, 360); 
      self.videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
      videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime); 
      self.videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:self.finalCompositionTrack]; 
     } 

     for (AVAssetTrack *track in asset.tracks) 
     { 
      CGSize size = track.naturalSize; 

      if(track.naturalTimeScale == 600) 
      { 
       CGAffineTransform transform = [track preferredTransform]; 
       int orientation = [self orientationForTrack: asset]; 

       if (orientation < 2) 
       { 
        float x = 640/size.width; 
        float y = 360/size.height; 


        CGAffineTransform videoScale = CGAffineTransformMakeScale(x, y); 

        [_videoCompositionLayerInstruction setTransform:CGAffineTransformConcat(transform, videoScale) atTime:currentTime];     } 
       else 
       { 
        float s = 480/size.height; 
        CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s)); 
        [_videoCompositionLayerInstruction setTransform:new atTime:currentTime]; 
       } 

       if (![_finalCompositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(video.videoStart.doubleValue, 600), CMTimeMakeWithSeconds(video.videoEnd.doubleValue, 600)) ofTrack:track atTime:currentTime error:&error]) 
       { 
        [self showError:error.localizedFailureReason]; 
       } 
      } 
      else if (track.naturalTimeScale == 44100) 
      { 
       CMTime start = kCMTimeZero; 
       CMTime duration = CMTimeMakeWithSeconds(video.videoEnd.doubleValue, 600); 

       NSError *error; 
       [finalCompositionAudioTrack insertTimeRange:CMTimeRangeMake(start, duration) 
            ofTrack:[[track.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:currentTime error:&error]; 

       NSLog(@"%@", error); 
      } 
     } 
     currentTime = CMTimeAdd(currentTime, CMTimeMake(video.videoEnd.doubleValue*600, 600)); 
    } 
} 

//apply the translation to video composition 
_videoCompositionInstruction.layerInstructions = [NSArray arrayWithObject: _videoCompositionLayerInstruction]; 
_videoCompostion.instructions = [NSArray arrayWithObject:_videoCompositionInstruction]; 

//get filepath of last object... 
MVideoRecord *lastRecord = [_videoArray objectAtIndex:_videoArray.count - 2]; 

NSString *finalExportURLString = [lastRecord.pathToVideo stringByReplacingOccurrencesOfString:@".MOV" withString:@"_finalExport.mp4"]; 

//testing fix for video missing audio after final export 
//string = [exportURL.absoluteString stringByReplacingOccurrencesOfString:@".MOV" withString:@"_finalExport.MOV"]; 


// File Management 
NSFileManager *fileManager = [NSFileManager defaultManager]; 

self.finalExportURL = [NSURL fileURLWithPath:finalExportURLString]; 
self.finalExportSession = [[AVAssetExportSession alloc] initWithAsset:_finalComposition presetName:TEST_EXPORT_SESSION_QUALITY]; 
if ([fileManager fileExistsAtPath:finalExportURL.path]) 
{ 
    NSError *fileError = nil; 
    if (![fileManager removeItemAtPath:finalExportURLString error:&fileError]) 
    { 
     DCLog(@"Error removing old path: %@", fileError.localizedDescription); 
    } 
} 

_finalExportSession.outputURL = self.finalExportURL; 
_finalExportSession.outputFileType = @"public.mpeg-4"; 
_finalExportSession.videoComposition = self.videoCompostion; 

[self.finalExportSession exportAsynchronouslyWithCompletionHandler:^{ 
    switch (_finalExportSession.status) 
    { case AVAssetExportSessionStatusFailed: 
     { 
      DCLog(@"Export Failed with error messsage: %@, %@", _finalExportSession.error, _finalExportSession.error.localizedDescription); 
      break; 
     } 
     case AVAssetExportSessionStatusCompleted: 
     { 
      DCLog(@"Export Success");     
      break; 
     } 
    }; 
}]; 

Что я делаю неправильно?

Самое странное, если я изменю:

[finalCompositionAudioTrack insertTimeRange:CMTimeRangeMake(start, duration) 
             ofTrack:[[track.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:currentTime error:&error]; 

к:

[finalCompositionAudioTrack insertTimeRange:CMTimeRangeMake(start, duration) 
             ofTrack:[[track.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:&error]; 

Это работает, но конечно, звук воспроизводится неправильно. Звук для первого видео воспроизводится для второго.

+0

У кого-нибудь есть идеи? –

ответ

0

Я исправил свою проблему, создав AVMutableCompositionTrack для каждой звуковой дорожки. Я переместил код ниже внутри цикла, и он сработал.

compositionAudioTrack = [_finalComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];