2014-08-29 2 views
4

Как я могу сжать видео с использованием битрейта?Как я могу сжать видео в ios, используя бит-скорость?

Я попытался ниже кода для сжатия видео, но он не работает, потому что он дает мне ошибку, как

****** Нагрузочному приложение из-за неперехваченное исключение «NSInvalidArgumentException», причины: «* - [AVAssetReader startReading] не может быть вызвана снова после прочтения уже начал»****

 - (void) imagePickerController: (UIImagePickerController *) picker 
      didFinishPickingMediaWithInfo: (NSDictionary *) info 
    { 


     // Handle movie capture 
     NSURL *movieURL = [info objectForKey: 
          UIImagePickerControllerMediaURL]; 

     NSData *data = [NSData dataWithContentsOfURL:movieURL]; 

     NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,  NSUserDomainMask, YES); 
     NSString *documentsDirectory = [paths objectAtIndex:0]; 
     NSString *tempPath = [documentsDirectory stringByAppendingFormat:@"/vid1.mp4"]; 

     BOOL success = [data writeToFile:tempPath atomically:NO]; 

     if (success) 
     { 
         NSLog(@"VIdeo Successfully written"); 
     } 
     else 
     { 
         NSLog(@"VIdeo Wrting failed"); 
     } 


     NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:@"1234"] stringByAppendingString:@".mp4"]]; 

     // Compress movie first 
     [self convertVideoToLowQuailtyWithInputURL:movieURL outputURL:uploadURL]; 
    } 




- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL 
             outputURL:(NSURL*)outputURL 
    { 
     //setup video writer 
     AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil]; 

     AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

     CGSize videoSize = videoTrack.naturalSize; 

     NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil]; 

     NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil]; 

     AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput 
               assetWriterInputWithMediaType:AVMediaTypeVideo 
               outputSettings:videoWriterSettings]; 

     videoWriterInput.expectsMediaDataInRealTime = YES; 

     videoWriterInput.transform = videoTrack.preferredTransform; 

     AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil]; 

     [videoWriter addInput:videoWriterInput]; 

     //setup video reader 
     NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 

     AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings]; 

     AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil]; 

     [videoReader addOutput:videoReaderOutput]; 

     //setup audio writer 
     AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput 
               assetWriterInputWithMediaType:AVMediaTypeAudio 
               outputSettings:nil]; 

     audioWriterInput.expectsMediaDataInRealTime = NO; 

     [videoWriter addInput:audioWriterInput]; 

     //setup audio reader 
     AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 

     AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; 

     AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil]; 

     [audioReader addOutput:audioReaderOutput]; 

     [videoWriter startWriting]; 

     //start writing from video reader 
     [videoReader startReading]; 

     [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

     dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL); 

     [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock: 
     ^{ 

      while ([videoWriterInput isReadyForMoreMediaData]) 
      { 

       CMSampleBufferRef sampleBuffer; 

       if ([videoReader status] == AVAssetReaderStatusReading && 
        (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) 
       { 

        [videoWriterInput appendSampleBuffer:sampleBuffer]; 
        CFRelease(sampleBuffer); 
       } 

       else 
       { 
        [videoWriterInput markAsFinished]; 

        if ([videoReader status] == AVAssetReaderStatusCompleted) 
        { 
          [audioReader startReading]; 

          [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

          dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL); 

          [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{ 

           while (audioWriterInput.readyForMoreMediaData) 
           { 
            CMSampleBufferRef sampleBuffer; 

            if ([audioReader status] == AVAssetReaderStatusReading && 
             (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) { 

             [audioWriterInput appendSampleBuffer:sampleBuffer]; 
             CFRelease(sampleBuffer); 
            } 

            else 
            { 

             [audioWriterInput markAsFinished]; 

             if ([audioReader status] == AVAssetReaderStatusCompleted) 
             { 
              [videoWriter finishWritingWithCompletionHandler:^() 
              { 
               NSLog(@"Output URl : %@",outputURL); 
              }]; 
             } 
            } 
           } 

          } 
           ];      
        } 
       } 
      } 
     } 

     ]; 


    } 
+0

Пожалуйста, помогите мне решить эту проблему как можно скорее – Dipen

+0

Заканчивать этот ответ: https://stackoverflow.com/questions/27075391/need-assistance-regarding-video-compression-in-ios/27098739#27098739 –

ответ

2

Вы можете использовать приведенные ниже параметры, чтобы сжать видео по качествам.

  • AVAssetExportPresetLowQuality
  • AVAssetExportPresetMediumQuality
  • AVAssetExportPresetHighestQuality

Код:

- (void)CompressVideo 
{ 
    if(firstAsset !=nil) 
    { 
     //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. 
     AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

     //  http://stackoverflow.com/questions/22715881/merge-video-files-with-their-original-audio-in-ios 

     //VIDEO TRACK 
     AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
     [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 

     //For Audio Track inclusion 
     //============================================================================================ 
     NSArray *arr = [firstAsset tracksWithMediaType:AVMediaTypeAudio]; 
     AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
     [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[arr lastObject] atTime:kCMTimeZero error:nil]; 
     //=============================================================================================== 

     AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); 

     //FIXING ORIENTATION// 
     AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; 
     AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
     UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 
     BOOL isFirstAssetPortrait_ = NO; 

     CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; 
     if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;} 
     if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;} 
     if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;} 
     if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown; 
     } 
     CGFloat FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.width; 

     if(isFirstAssetPortrait_) 
     { 
      FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.height; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
     } 
     else 
     { 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; 
     } 
     [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 

     MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil]; 

     AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
     MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
     MainCompositionInst.frameDuration = CMTimeMake(1, 30); 
     //  MainCompositionInst.renderSize = CGSizeMake(VideoWidth, 900); 
     MainCompositionInst.renderSize = CGSizeMake(VideoWidth, [UIScreen mainScreen].bounds.size.height); 

     NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
     NSString *documentsDirectory = [paths objectAtIndex:0]; 
     NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"CompressedVideo.mov"]; 

     NSLog(@"myPath Docs : %@",myPathDocs); 

     NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

     if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) 
     { 
      NSError *error; 
      [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:&error]; 
     } 

     //Movie Quality 
     //================================================== 
     AVAssetExportSession *exporter = [[AVAssetExportSession alloc]  initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; 
     //================================================== 

     exporter.outputURL=url; 

     //Movie Type 
     //================================================== 
     exporter.outputFileType = AVFileTypeQuickTimeMovie; 
     //================================================== 
     exporter.videoComposition = MainCompositionInst; 
     exporter.shouldOptimizeForNetworkUse = YES; 
     [exporter exportAsynchronouslyWithCompletionHandler:^ 
     { 
      dispatch_async(dispatch_get_main_queue(),^
          { 
           videoUrToUload = url; 
           [self exportDidFinish:exporter]; 
          }); 
     }]; 
    } 
} 

- (void)exportDidFinish:(AVAssetExportSession *)session 
{ 
    if(session.status == AVAssetExportSessionStatusCompleted) 
    { 
     //Store URL Somewhere using session.url 
    } 
} 
+0

Этот код работает отлично. Он также преобразует видео в более низкое качество. – Dipen

1

меня такая же проблема, но после аварии сделать некоторые изменения, этот метод работал для меня .. Просто замените этот метод с ваш метод выше ...

- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL 
            outputURL:(NSURL*)outputURL 
{ 
    //setup video writer 
    AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil]; 

    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

    CGSize videoSize = videoTrack.naturalSize; 

    NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil]; 

    NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil]; 

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput 
              assetWriterInputWithMediaType:AVMediaTypeVideo 
              outputSettings:videoWriterSettings]; 

    videoWriterInput.expectsMediaDataInRealTime = YES; 

    videoWriterInput.transform = videoTrack.preferredTransform; 

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil]; 

    [videoWriter addInput:videoWriterInput]; 

    //setup video reader 
    NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 

    AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings]; 

    AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil]; 

    [videoReader addOutput:videoReaderOutput]; 

    //setup audio writer 
    AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput 
              assetWriterInputWithMediaType:AVMediaTypeAudio 
              outputSettings:nil]; 

    audioWriterInput.expectsMediaDataInRealTime = NO; 

    [videoWriter addInput:audioWriterInput]; 

    //setup audio reader 
    AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 

    AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; 

    AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil]; 

    [audioReader addOutput:audioReaderOutput]; 

    [videoWriter startWriting]; 

    //start writing from video reader 
    [videoReader startReading]; 

    [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

    dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL); 

    [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock: 
    ^{ 

     while ([videoWriterInput isReadyForMoreMediaData]) { 

      CMSampleBufferRef sampleBuffer; 

      if ([videoReader status] == AVAssetReaderStatusReading && 
       (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) { 

       [videoWriterInput appendSampleBuffer:sampleBuffer]; 
       CFRelease(sampleBuffer); 
      } 

      else { 

       [videoWriterInput markAsFinished]; 

       if ([videoReader status] == AVAssetReaderStatusCompleted) { 
        if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) { 

        } 
        else{ 
         //start writing from audio reader 
         [audioReader startReading]; 

         [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

         dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL); 

         [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{ 

          while (audioWriterInput.readyForMoreMediaData) { 

           CMSampleBufferRef sampleBuffer; 

           if ([audioReader status] == AVAssetReaderStatusReading && 
            (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) { 

            [audioWriterInput appendSampleBuffer:sampleBuffer]; 
            CFRelease(sampleBuffer); 
           } 
           else { 

            [audioWriterInput markAsFinished]; 

            if ([audioReader status] == AVAssetReaderStatusCompleted) { 

             [videoWriter finishWritingWithCompletionHandler:^(){ 
              //           [self sendMovieFileAtURL:outputURL]; 
              NSString *moviePath = [outputURL path]; 
              if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(moviePath)) { 
               UISaveVideoAtPathToSavedPhotosAlbum(moviePath, self, 
                        @selector(video:didFinishSavingWithError:contextInfo:), nil); 

              } 
             }]; 
             break; 
            } 
           } 
          } 

         } 
          ]; 
        } 
       } 

      } 
     } 
    } 
    ]; 
} 
+3

ваш код работает, но он генерирует поврежденное видео, что он не может быть запущен на любой медиаплеер и после некоторого времени появляется предупреждение о сохранении памяти, и приложение прекращается. –

+0

По какой-то причине этот код работает отлично для меня в первый раз, когда я его вызываю. Если я назову его второй раз, он сработает в [audioReader startReading]; говоря «не может быть вызвано снова после того, как чтение уже началось» – Jan

+0

Все еще проблемы с сбоем. – Dipen

Смежные вопросы