我正在为 iOS 编写 OpenGL 应用程序,我需要拍摄渲染场景的应用程序内屏幕截图。当我不使用多重采样时,一切正常。但是当我打开多重采样时,glReadPixels
不返回正确的数据(场景绘制正确 - 多重采样的图形质量要好得多)。
我已经在 SO 和其他一些地方检查了一堆类似的问题,但没有一个解决我的问题,因为我已经按照建议的方式这样做了:
- 我在解析缓冲区之后、呈现渲染缓冲区之前截取屏幕截图。
-
glReadPixels
不返回错误。
- 我什至尝试设置
kEAGLDrawablePropertyRetainedBacking
to YES
并在呈现缓冲区后截取屏幕截图 - 也不起作用。
- 我支持
OpenGLES 1.x
渲染 API(上下文初始化为kEAGLRenderingAPIOpenGLES1
)
基本上我不知道什么可能是错的。在 SO 上发布问题是我最后的手段。
这是相关的源代码:
创建帧缓冲区
- (BOOL)createFramebuffer
{
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
// Multisample support
glGenFramebuffersOES(1, &sampleFramebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, sampleFramebuffer);
glGenRenderbuffersOES(1, &sampleColorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, sampleColorRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, sampleColorRenderbuffer);
glGenRenderbuffersOES(1, &sampleDepthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, sampleDepthRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, sampleDepthRenderbuffer);
// End of multisample support
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
解析缓冲区部分并拍摄快照
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, viewFramebuffer);
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, sampleFramebuffer);
glResolveMultisampleFramebufferAPPLE();
[self checkGlError];
//glFinish();
if (capture)
captureImage = [self snapshot:self];
const GLenum discards[] = {GL_COLOR_ATTACHMENT0_OES,GL_DEPTH_ATTACHMENT_OES};
glDiscardFramebufferEXT(GL_READ_FRAMEBUFFER_APPLE,2,discards);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
快照方法(基本上是从apple docs复制的)
- (UIImage*)snapshot:(UIView*)eaglview
{
// Bind the color renderbuffer used to render the OpenGL ES view
// If your application only creates a single color renderbuffer which is already bound at this point,
// this call is redundant, but it is needed if you're dealing with multiple renderbuffers.
// Note, replace "_colorRenderbuffer" with the actual name of the renderbuffer object defined in your class.
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
NSInteger x = 0, y = 0, width = backingWidth, height = backingHeight;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
glPixelStorei(GL_PACK_ALIGNMENT, 4);
[self checkGlError];
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
[self checkGlError];
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
NSInteger widthInPoints, heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions) {
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else {
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}