2013-08-16 2 views
1

У меня есть uiview, который реализует openGL для рисования. Я пытаюсь реализовать базовую функцию записи, как показано здесь.Живая запись OpenGL не работает.

Все настройки прекрасно работают, как и рисунок openGL. Но я получаю ошибку на третьем или четвертом кадре, который AVAdaptor пытается добавить (первые несколько идут хорошо).

Я думаю, что это проблема с вызовом CVPixelBufferPoolCreatePixelBuffer, но я не могу отслеживать его вниз ...

Вот код. Код записи в нижней части:

#import "ochrDrawingView.h" 

#define POINT_FREQUENCY_CONSTANT 0.3 // 1 is a point for every pixel of distance. Note: performance drops above 0.5 on long lines. 
#define ARBITRARY_BUFFER_SIZE 2048 

typedef struct { 
    GLfloat Position[2]; 
    GLfloat TextureCoordinates[2]; 
} TexturedVertex; 

typedef struct { 
    TexturedVertex BottomLeft; 
    TexturedVertex BottomRight; 
    TexturedVertex TopLeft; 
    TexturedVertex TopRight; 
} TexturedSquare; 

#pragma mark - Implementation 

@implementation ochrDrawingView { 

    // OpenGL 
    EAGLContext *context; 
    CAEAGLLayer *eaglLayer; 
    GLuint renderBuffer; 
    GLuint frameBuffer; 
    GLKBaseEffect *effect; 
    GLint backingWidth; 
    GLint backingHeight; 

    // Drawing 
    CGSize brushSize; 
    float brushScale; 
    BOOL isFirstTouch; 
    CGPoint origin; 
    CGPoint midpoint; 
    CGPoint destination; 

    // Concurrency 
    dispatch_queue_t recordingQueue; 

    // Video Recording 
    AVAssetWriter *videoWriter; 
    AVAssetWriterInput *videoWriterInput; 
    AVAssetWriterInputPixelBufferAdaptor *avAdaptor; 
    BOOL isRecording; 
    BOOL hasFinishedRecording; 
    NSDate *recordingBeganAt; 
    void* bitmapData; 
} 

#pragma mark - Setup 
- (id)initWithCoder:(NSCoder *)aDecoder { 
    NSLog(@"About to initWithCoder..."); 
    self = [super initWithCoder:aDecoder]; 
    if (self) { 
     isRecording = NO; 
     [self setupLayer]; 
     [self setupContext]; 
     [self setupRenderBuffer]; 
     [self setupFrameBuffer]; 
     [self setViewportParameters]; 
     [self setupBaseEffect]; 
     [self eraseScreen]; 
     recordingQueue = dispatch_queue_create("recordingQueue", NULL); 
    } 
    return self; 
} 

+ (Class) layerClass { 
    return [CAEAGLLayer class]; 
} 

- (void) setupContext { 
    context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 
    [EAGLContext setCurrentContext:context]; 
} 

- (void) setupLayer { 
    eaglLayer = (CAEAGLLayer *) self.layer; 
    [eaglLayer setOpaque:YES]; 
    [eaglLayer setDrawableProperties: [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]]; 
} 

- (void)setupRenderBuffer { 
    glGenRenderbuffers(1, &renderBuffer); 
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer); 
    [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:eaglLayer]; 
} 

- (void) setViewportParameters { 
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth); 
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight); 
    NSLog(@"Width: %d, Height: %d", backingWidth, backingHeight); 
    glViewport(0, 0, backingWidth, backingHeight); 

} 

- (void) setupFrameBuffer { 
    glGenFramebuffers(1, &frameBuffer); 
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); 
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBuffer); 
} 

- (void) setupBaseEffect { 
    effect = [[GLKBaseEffect alloc] init]; 

    // Load the texture into the effect 
    NSError *error; 
    NSDictionary *options = @{ GLKTextureLoaderOriginBottomLeft: @YES }; 
    NSString *path = [[NSBundle mainBundle] pathForResource:@"brush.png" ofType:nil]; 
    GLKTextureInfo *texture = [GLKTextureLoader textureWithContentsOfFile:path options:options error:&error]; 
    if (texture == nil) NSLog(@"Texture failed to load. Error: %@", error.localizedDescription); 
    effect.texture2d0.name = texture.name; 
    effect.texture2d0.enabled = GL_TRUE; 

    // Set the brushSize (used later, in drawing method) 
    brushSize = CGSizeMake(texture.width, texture.height); 
    brushScale = 0.6; 

    // Set up a project and modelview matrix. maybe: 
    GLKMatrix4 projectionMatrix = GLKMatrix4MakeOrtho(0, backingWidth, 0, backingHeight, 0, 1.0f); 
    effect.transform.projectionMatrix = projectionMatrix; 
    [effect prepareToDraw]; 

} 

- (void) eraseScreen { 
    [EAGLContext setCurrentContext:context]; 

    // Clear the buffer 
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); 
    glClearColor(1.0, 1.0, 1.0, 1.0); 
    glClear(GL_COLOR_BUFFER_BIT); 

    // Display the buffer 
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER]; 
} 


#pragma mark - Touch Response 

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { 
    isFirstTouch = YES; 
    origin = [[touches anyObject] locationInView:self]; 
    origin.y = backingHeight - origin.y; 
    [self drawPointAtPoint:origin]; 
} 

- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event { 
    if (isFirstTouch) { 
     isFirstTouch = NO; 
     midpoint = [[touches anyObject] locationInView:self]; 
     midpoint.y = backingHeight - midpoint.y; 
    } else { 
     destination = [[touches anyObject] locationInView:self]; 
     destination.y = backingHeight - destination.y; 
     [self drawCubicLineFromPoint:origin HalfwayToPoint:destination WithControl:midpoint]; 
    } 
} 

- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event { 
    [self touchesMoved:touches withEvent:event]; 
} 

#pragma mark - Drawing Algorithms 


- (void)drawCubicLineFromPoint:(CGPoint)start HalfwayToPoint:(CGPoint)end WithControl:(CGPoint)control { 

    static GLfloat tempVertexBuffer[ARBITRARY_BUFFER_SIZE]; 
    static GLubyte tempIndexBuffer[ARBITRARY_BUFFER_SIZE]; 

    int vertexCount = 0; 
    int indexCount = 0; 
    float pointCount; 

    // [EAGLContext setCurrentContext:context]; 
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); 

    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); 
    glEnable(GL_BLEND); 

    // Get the number of points to be drawn between the two points 
    float distance = sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)); 
    pointCount = MAX(ceilf(distance * POINT_FREQUENCY_CONSTANT), 1); 

    // Adjust the size of the brush based on the (rough) speed of the stroke 
    if (distance > 20) { 
     brushScale = 0.5; 
    } else { 
     brushScale = 0.6; 
    } 

    // Get the adjustment value used to center each texture (the brush image is a square, so here I use only width) 
    float positionAdjustmentToCenterTexture = brushScale * brushSize.width/2.0; 

    // Iterate through the points to be drawn, drawing a TexturedSquare (more or less) for each. 
    float t = 0.0, x, y; 
    for (float i = 0; i < pointCount; i++) { 

     x = powf(1 - t, 2) * start.x + 2 * (1 - t) * t * control.x + t * t * end.x; 
     y = powf(1 - t, 2) * start.y + 2 * (1 - t) * t * control.y + t * t * end.y; 
     t += 0.5/pointCount; 

     // Bottom-left vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 0; 
     tempVertexBuffer[4 * vertexCount + 3] = 0; 
     vertexCount++; 

     // Bottom-right vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 1; 
     tempVertexBuffer[4 * vertexCount + 3] = 0; 
     vertexCount++; 

     // Top-left vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 0; 
     tempVertexBuffer[4 * vertexCount + 3] = 1; 
     vertexCount++; 

     // Top-right vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 1; 
     tempVertexBuffer[4 * vertexCount + 3] = 1; 
     vertexCount++; 

     // Add the indices for the triangles 
     tempIndexBuffer[indexCount++] = vertexCount - 4; 
     tempIndexBuffer[indexCount++] = vertexCount - 3; 
     tempIndexBuffer[indexCount++] = vertexCount - 2; 
     tempIndexBuffer[indexCount++] = vertexCount - 3; 
     tempIndexBuffer[indexCount++] = vertexCount - 2; 
     tempIndexBuffer[indexCount++] = vertexCount - 1; 
    } 

    origin = CGPointMake(x, y); // sets the origin to the last point drawn 
    midpoint = end; // sets the midpoint to the previous destination. 

    long pointer = (long)&tempVertexBuffer; 
    glEnableVertexAttribArray(GLKVertexAttribPosition); 
    glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, Position)); 

    glEnableVertexAttribArray(GLKVertexAttribTexCoord0); 
    glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, TextureCoordinates)); 

    glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_BYTE, tempIndexBuffer); 

    // Display the buffer 
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER]; 

    // If recording, record this frame 
    if (isRecording) { 
     dispatch_async(recordingQueue, ^{ 
      [self writeCurrentFrame]; 
     }); 
    } 
} 


- (void)drawStraightLineFromPoint:(CGPoint)start ToPoint:(CGPoint)end { 

    GLfloat tempVertexBuffer[128]; 
    GLubyte tempIndexBuffer[128]; 

    int vertexCount = 0; 
    int indexCount = 0; 
    float pointCount; 

    // Dumb setup stuff. 
    [EAGLContext setCurrentContext:context]; 
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); 

    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); 
    glEnable(GL_BLEND); 

    // Get the number of points to be drawn between the two points 
    pointCount = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) * POINT_FREQUENCY_CONSTANT), 1); 

    // Get the adjustment value used to center each texture (the brush image is a square, so here I use only width) 
    float positionAdjustmentToCenterTexture = brushScale * brushSize.width/2.0; 

    // Iterate through the points to be drawn, drawing a TexturedSquare (more or less) for each. 
    float x; 
    float y; 
    for (float i = 0; i < pointCount; i++) { 

     // Set the x and y coordinates for each points, interpolating based on the distance 
     x = start.x + ((end.x - start.x) * (i/pointCount)); 
     y = start.y + (end.y - start.y) * (i/pointCount); 

     // Bottom-left vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 0; 
     tempVertexBuffer[4 * vertexCount + 3] = 0; 
     vertexCount++; 

     // Bottom-right vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 1; 
     tempVertexBuffer[4 * vertexCount + 3] = 0; 
     vertexCount++; 

     // Top-left vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 0; 
     tempVertexBuffer[4 * vertexCount + 3] = 1; 
     vertexCount++; 

     // Top-right vertex 
     tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture; 
     tempVertexBuffer[4 * vertexCount + 2] = 1; 
     tempVertexBuffer[4 * vertexCount + 3] = 1; 
     vertexCount++; 

     // Add the indices for the triangles 
     tempIndexBuffer[indexCount++] = vertexCount - 4; 
     tempIndexBuffer[indexCount++] = vertexCount - 3; 
     tempIndexBuffer[indexCount++] = vertexCount - 2; 
     tempIndexBuffer[indexCount++] = vertexCount - 3; 
     tempIndexBuffer[indexCount++] = vertexCount - 2; 
     tempIndexBuffer[indexCount++] = vertexCount - 1; 
    } 

    long pointer = (long)&tempVertexBuffer; 
    glEnableVertexAttribArray(GLKVertexAttribPosition); 
    glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, Position)); 

    glEnableVertexAttribArray(GLKVertexAttribTexCoord0); 
    glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, TextureCoordinates)); 

    // Last parameter may be wrong... 
    glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_BYTE, tempIndexBuffer); 

    // Display the buffer 
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER]; 
} 

- (void)drawPointAtPoint:(CGPoint)start { 

    GLfloat tempVertexBuffer[16]; 
    GLubyte tempIndexBuffer[6]; 

    int vertexCount = 0; 
    int indexCount = 0; 

    // Dumb setup stuff. 
    // [EAGLContext setCurrentContext:context]; 
    glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); 

    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); 
    glEnable(GL_BLEND); 

    // Get the adjustment value used to center each texture (the brush image is a square, so here I use only width) 
    float positionAdjustmentToCenterTexture = brushScale * brushSize.width/2.0; 

    // Iterate through the points to be drawn, drawing a TexturedSquare (more or less) for each. 
    float x = start.x; 
    float y = start.y; 

    // Bottom-left vertex 
    tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 2] = 0; 
    tempVertexBuffer[4 * vertexCount + 3] = 0; 
    vertexCount++; 

    // Bottom-right vertex 
    tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 2] = 1; 
    tempVertexBuffer[4 * vertexCount + 3] = 0; 
    vertexCount++; 

    // Top-left vertex 
    tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 2] = 0; 
    tempVertexBuffer[4 * vertexCount + 3] = 1; 
    vertexCount++; 

    // Top-right vertex 
    tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture; 
    tempVertexBuffer[4 * vertexCount + 2] = 1; 
    tempVertexBuffer[4 * vertexCount + 3] = 1; 
    vertexCount++; 

    // Add the indices for the triangles 
    tempIndexBuffer[indexCount++] = vertexCount - 4; 
    tempIndexBuffer[indexCount++] = vertexCount - 3; 
    tempIndexBuffer[indexCount++] = vertexCount - 2; 
    tempIndexBuffer[indexCount++] = vertexCount - 3; 
    tempIndexBuffer[indexCount++] = vertexCount - 2; 
    tempIndexBuffer[indexCount++] = vertexCount - 1; 

    long pointer = (long)&tempVertexBuffer; 
    glEnableVertexAttribArray(GLKVertexAttribPosition); 
    glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, Position)); 

    glEnableVertexAttribArray(GLKVertexAttribTexCoord0); 
    glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, TextureCoordinates)); 

    // Last parameter may be wrong... 
    glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_BYTE, tempIndexBuffer); 

    // Display the buffer 
    glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER]; 
} 


#pragma mark - Video Recorder Functions 

- (BOOL) setupVideoWriter { 
    NSError* error = nil; 
    videoWriter = [[AVAssetWriter alloc] initWithURL:[self temporaryFileURL] fileType:AVFileTypeQuickTimeMovie error:&error]; 
    NSParameterAssert(videoWriter); 

    //Configure video 
    NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys: 
              [NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey, 
              nil ]; 

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
            AVVideoCodecH264, AVVideoCodecKey, 
            [NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey, 
            [NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey, 
            videoCompressionProps, AVVideoCompressionPropertiesKey, 
            nil]; 

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; 
    NSParameterAssert(videoWriterInput); 

    videoWriterInput.expectsMediaDataInRealTime = YES; 
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: 
             [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; 

    avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput 
                       sourcePixelBufferAttributes:bufferAttributes]; 

    //add input 
    [videoWriter addInput:videoWriterInput]; 
    [videoWriter startWriting]; 
    [videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)]; 

    return YES; 
} 

- (CGContextRef) getContextOfSize:(CGSize)size { 
    CGContextRef tempContext = NULL; 
    CGColorSpaceRef colorSpace; 
    int    bitmapByteCount; 
    int    bitmapBytesPerRow; 

    bitmapBytesPerRow = size.width * 4; 
    bitmapByteCount  = bitmapBytesPerRow * size.height; 
    colorSpace = CGColorSpaceCreateDeviceRGB(); 

    if (bitmapData != NULL) { 
     free(bitmapData); 
    } 

    bitmapData = malloc(bitmapByteCount); 

    if (bitmapData == NULL) { 
     fprintf (stderr, "Memory not allocated!"); 
     return NULL; 
    } 

    tempContext = CGBitmapContextCreate (bitmapData, 
             size.width, 
             size.height, 
             8, 
             bitmapBytesPerRow, 
             colorSpace, 
             kCGImageAlphaNoneSkipFirst); 

    CGContextSetAllowsAntialiasing(tempContext, NO); 
    if (tempContext == NULL) { 
     free (bitmapData); 
     fprintf (stderr, "Context not created!"); 
     return NULL; 
    } 
    CGColorSpaceRelease(colorSpace); 

    return tempContext; 
} 

- (void) writeCurrentFrame { 

    NSLog(@"writeCurrentFrame called"); 

    // Get a context 
    CGContextRef videoContext = [self getContextOfSize:self.frame.size]; 

    // Render the current screen into that context 
    [self.layer renderInContext:videoContext]; 

    // Get a CGImage from the context 
    CGImageRef cgImage = CGBitmapContextCreateImage(videoContext); 

    // Check if the AVAssetWriterInput is ready for more data 
    if (![videoWriterInput isReadyForMoreMediaData]) { 

     NSLog(@"Not ready for video data"); 

    } else { 

     // If it is, convert the CGImage into a CVPixelBufferReference 
     CVPixelBufferRef pixelBuffer = NULL; 
     CFDataRef cfImage = CGDataProviderCopyData(CGImageGetDataProvider(cgImage)); 
     int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer); 

     if (status != 0) { 
      NSLog(@"Error creating pixel buffer. Status: %d", status); 
     } else { 
      NSLog(@"No error creating the pixel buffer..."); 
     } 

     // Set image data into pixel buffer 
     CVPixelBufferLockBaseAddress(pixelBuffer, 0); 
     uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer); 
     CFDataGetBytes(cfImage, CFRangeMake(0, CFDataGetLength(cfImage)), destPixels); 

     // If all's well so far, append the pixelbuffer to the adaptor 
     if (status == 0) { 
      float millisecondsSinceStart = [[NSDate date] timeIntervalSinceDate:recordingBeganAt]; 
      BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMake((int) millisecondsSinceStart, 1000)]; 
      if (!success) { 
       NSLog(@"Warning: Unable to write buffer to video."); 
      } else { 
       NSLog(@"Success! Was able to write buffer to video."); 
      } 
     } 

     // Clean up 
     CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 
     // CVPixelBufferRelease(pixelBuffer); 
     CFRelease(cfImage); 
     CGImageRelease(cgImage); 
     CGContextRelease(videoContext); 
    } 
} 

- (BOOL) completeRecordingSession { 
    return YES; 
} 

- (NSURL *) temporaryFileURL { 
    NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"]; 
    NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath]; 
    NSLog(@"Will try to store the file at %@", outputURL); 
    NSFileManager* fileManager = [NSFileManager defaultManager]; 
    if ([fileManager fileExistsAtPath:outputPath]) { 
     NSLog(@"There is already a file there - trying to delete it..."); 
     NSError* error; 
     if ([fileManager removeItemAtPath:outputPath error:&error] == NO) { 
      NSLog(@"Could not delete old recording file at path: %@", outputPath); 
     } else { 
      NSLog(@"Sucessfully deleted file. The new file can be stored at %@", outputURL); 
     } 
    } else { 
     NSLog(@"File can be stored at %@", outputURL); 
    } 
    return outputURL; 
} 

- (BOOL) startRecording { 
    isRecording = YES; 
    return [self setupVideoWriter];; 
} 

- (BOOL) stopRecording { 
    return YES; 
} 





#pragma mark - Helper Functions 

- (void) logLocationOfPoint:(CGPoint)point { 
    NSLog(@"point at { %d, %d }", (int) point.x, (int) point.y); 
} 


@end 

ответ

2

Для любого любопытного:

выше метод принимает некорректный подход. В общем, гораздо лучше сделать текстуру, а затем текстуру в renderbuffer. Производительность, которую вы нанесете на дополнительный шаг, минимальна, и этот подход гораздо более расширяем с точки зрения потоковой передачи, записи, обработки и т. Д.

Лучший. S

Смежные вопросы