问题描述:

I am generating a movie through rendering OpenGL textures. Some frames of the resulting movie look not completely rendered, as they show a part of the previous frame.

If I add a NSThread [NSThread sleepForTimeInterval:0.05]; the problem does not show up but I can't rely on this instruction.

This is the result without [NSThread sleepForTimeInterval:0.05]

This is the result when adding [NSThread sleepForTimeInterval:0.05]

The code I use is:

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^

{

//Video writer

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:

[NSURL fileURLWithPath:tempVideoPath]

fileType:AVFileTypeQuickTimeMovie

error:&error];

NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:

AVVideoCodecH264, AVVideoCodecKey,

[NSNumber numberWithInt:MOVsize.width], AVVideoWidthKey,

[NSNumber numberWithInt:MOVsize.height], AVVideoHeightKey,

nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput

assetWriterInputWithMediaType:AVMediaTypeVideo

outputSettings:videoSettings];

videoWriterInput.expectsMediaDataInRealTime=NO;

NSParameterAssert(videoWriterInput);

NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

[videoWriter addInput:videoWriterInput];

NSDictionary* pixelAttributesDict;

pixelAttributesDict= [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,

[NSNumber numberWithInt:1024], kCVPixelBufferWidthKey,

[NSNumber numberWithInt:768], kCVPixelBufferHeightKey,

nil];

AVAssetWriterInputPixelBufferAdaptor* adaptor=[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:pixelAttributesDict];

[videoWriter startWriting];

[videoWriter startSessionAtSourceTime:kCMTimeZero];

if([EAGLContext currentContext]!= glContext)

[EAGLContext setCurrentContext:glContext];

[self createDataFBO:adaptor];

for (int frame=0;frame<samplesNumber;frame++){

while (!videoWriterInput.readyForMoreMediaData)

{

NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];

[[NSRunLoop currentRunLoop] runUntilDate:maxDate];

}

//glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

//Render current frame with openGL

[self renderFrameAt:frame];

CVReturn lockResult= CVPixelBufferLockBaseAddress(renderTarget, 0);

BOOL test =(lockResult==kCVReturnSuccess) && [adaptor appendPixelBuffer:renderTarget withPresentationTime:CMTimeMake(frame, kFps)];

if(!test) {

NSLog(@"append failed!");

}

// If I add here [NSThread sleepForTimeInterval:0.05]; the rendering works fine

 CVPixelBufferUnlockBaseAddress(renderTarget, 0);

dispatch_async(dispatch_get_main_queue(), ^

{

[self updateProgressBar];

});

}

[videoWriterInput markAsFinished];

[videoWriter endSessionAtSourceTime:CMTimeMake(samplesNumber, kFps)];

[videoWriter finishWritingWithCompletionHandler:^{

[self destroyDataFBO];

//End of movie generation

}];

});

}

This is the code I use to create the texture cache

 - (void)createDataFBO:(AVAssetWriterInputPixelBufferAdaptor *) adaptor;

{

glActiveTexture(GL_TEXTURE1);

glGenFramebuffers(1, &movieFramebuffer);

glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

#if defined(__IPHONE_6_0)

CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &coreVideoTextureCache);

#else

CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[EAGLContext currentContext], NULL, &coreVideoTextureCache);

#endif

if (err)

{

NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);

}

// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/

CVPixelBufferPoolCreatePixelBuffer (NULL, [adaptor pixelBufferPool], &renderTarget);

size_t frameWidth = CVPixelBufferGetWidth(renderTarget);

size_t frameHeight = CVPixelBufferGetHeight(renderTarget);

CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,

NULL, // texture attributes

GL_TEXTURE_2D,

GL_RGBA, // opengl format

frameWidth,

frameHeight,

GL_BGRA, // native iOS format

GL_UNSIGNED_BYTE,

0,

&renderTexture);

glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));

glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);

glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);

GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);

}

网友答案:

Have you tried glFlush() after doing your rendering?

相关阅读:
Top