2015-06-18 1 views
0

По какой-то нечетной причине AVCaptureVideoDataOutputSampleBufferDelegate не запускается. Я добавил делегата и все такое, я не уверен, почему он не запускается в моем коде. Может ли кто-нибудь помочь мне понять, почему?SampleBufferDelegate не работает

Делегаты в моей .h

@class AVPlayer; 
@class AVPlayerClass; 

@interface Camera : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate> { 

.m кода (initializeCamera вызывается в ViewDidLoad)

 -(void)initializeCamera { 


Session = [[AVCaptureSession alloc]init]; 
[Session setSessionPreset:AVCaptureSessionPresetPhoto]; 



AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 
NSError *error = nil; 
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error]; 

[Session addInput:audioInput]; 

// Preview Layer*************** 

AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:Session]; 
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 
CALayer *rootLayer = [[self view] layer]; 
[rootLayer setMasksToBounds:YES]; 
CGRect frame = self.CameraView.frame; 

[previewLayer setFrame:frame]; 

[rootLayer insertSublayer:previewLayer atIndex:0]; 



[Session beginConfiguration]; 

//Remove existing input 
[Session removeInput:newVideoInput]; 

newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack]; 
// FrontCamera = NO; 


[Session setSessionPreset:AVCaptureSessionPresetHigh]; 
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080]) 

    //Check size based configs are supported before setting them 
    [Session setSessionPreset:AVCaptureSessionPreset1920x1080]; 



//Add input to session 
NSError *err = nil; 
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err]; 
if(!newVideoInput || err) 
{ 
    NSLog(@"Error creating capture device input: %@", err.localizedDescription); 
} 
else if ([Session canAddInput:newVideoInput]) 
{ 
    [Session addInput:newVideoInput]; 
} 
[Session commitConfiguration]; 



stillImageOutput = [[AVCaptureStillImageOutput alloc]init]; 
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]; 
[stillImageOutput setOutputSettings:outputSettings]; 

[Session addOutput:stillImageOutput]; 


MovieFileOutput = [[AVCaptureMovieFileOutput alloc]init]; 
Float64 TotalSeconds = 10; 

int32_t preferredTimeScale = 60; 

CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); 
MovieFileOutput.maxRecordedDuration = maxDuration; 
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; 


if ([Session canAddOutput:MovieFileOutput]) 
    [Session addOutput:MovieFileOutput]; 






// Create a VideoDataOutput and add it to the session 

// AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; 
// 
// [Session addOutput:output]; 
// 
// // Configure your output. 
// 
// dispatch_queue_t queue = dispatch_get_main_queue(); 
// 
// [output setSampleBufferDelegate:self queue:queue]; 
// 
// // dispatch_release(queue); 
// 
// // Specify the pixel format 
// 
// output.videoSettings = [NSDictionary dictionaryWithObject: 
// 
//  [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
// 
//        forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 
// 
// 
// 
// 
// 



// AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
// 
// [dataOutput setAlwaysDiscardsLateVideoFrames:YES]; 
// [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
//                forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; 
// [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; 
// 
// if ([Session canAddOutput:dataOutput]) 
//  [Session addOutput:dataOutput]; 

// sessionに追加 


// [self setupVideoOutput]; 

[Session setSessionPreset:AVCaptureSessionPresetHigh]; 
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080]) 

    //Check size based configs are supported before setting them 
    [Session setSessionPreset:AVCaptureSessionPreset1920x1080]; 



[Session startRunning]; 


} 

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connections { 

NSLog(@"Buff"); 

pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer); 

VideoBuffer = pixelBuffer; 

} 

    -(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { 


NSLog(@"The drop"); 
} 

ответ

0

Мой код не запускающие AVCaptureVideoDataOutputSampleBufferDelegate, потому что я использую AVCaptureMovieFileOutput вместо AVCaptureVideoDataOutput. AVCaptureMovieFileOutput, по-видимому, не использует пробные буферы. Как только я сейчас, как правильно настроить AVCaptureVideoDataOutput для использования буферов примеров, я отправлю свой код. Надеюсь, это поможет кому-то.

Смежные вопросы