How can I dump OpenGL RenderBuffer to PNG or JPG image? - iphone

I'm working on a project which uses OpenGL framebuffer/renderbuffer to draw freehand lines. This App uses OpenGL ES 1.x and I can't figure out how to dump current screen to PNG or JPG image.
Thanks for reading this question.
From OpenGL Newbie.
Codes used to create frame buffer and render buffer.
// Generate IDs for a framebuffer object and a color renderbuffer
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
// allowing us to draw into a buffer that will later be rendered to screen wherever the layer is (which corresponds with our view).
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(id<EAGLDrawable>)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
// For this sample, we also need a depth buffer, so we'll create and attach one via another renderbuffer.
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);

-(UIImage *)dumpImage
{
GLubyte *buffer = (GLubyte *) malloc(backingWidth * backingHeight * 4);
GLubyte *buffer2 = (GLubyte *) malloc(backingWidth * backingHeight * 4);
GLvoid *pixel_data = nil;
glReadPixels(0, 0, backingWidth, backingHeight, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid *)buffer);
/* make upside down */
for (int y=0; y<backingHeight; y++) {
for (int x=0; x<backingWidth*4; x++) {
buffer2[y * 4 * backingWidth + x] = buffer[(backingHeight - y - 1) * backingWidth * 4 + x];
}
}
// make data provider from buffer
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, backingWidth * backingHeight * 4, freeImageData);
// set up for CGImage creation
int bitsPerComponent = 8;
int bitsPerPixel = 32;
int bytesPerRow = 4 * backingWidth;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
// Use this to retain alpha
//CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(backingWidth, backingHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
// make UIImage from CGImage
UIImage *newUIImage = [UIImage imageWithCGImage:imageRef];
return newUIImage;
}
freeImageData() frees data later.
void freeImageData(void *info, const void *data, size_t size)
{
//printf("freeImageData called");
free((void*)data);
}

Related

iOS OpenGL ES 2.0: Offscreen render and save the result to an UIImage

I'm using OpenGL ES to render some special effects, I don't want to show this to user, I just want to save the result as an UIImage, can anybody please help me?
this is the code I'm using, I can get an image which contains the red clear color I use, but no geometry drawing shown.
#import "RendererGL.h"
#import <GLKit/GLKit.h>
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>
static NSInteger WIDTH_IN_PIXEL = 400;
static NSInteger HEIGHT_IN_PIXEL = 300;
typedef struct {
GLKVector3 positionCoords;
}
SceneVertex;
static const SceneVertex vertices[] =
{
{{-0.5f, -0.5f, 0.0}}, // lower left corner
{{ 0.5f, -0.5f, 0.0}}, // lower right corner
{{-0.5f, 0.5f, 0.0}} // upper left corner
};
#implementation RendererGL
{
EAGLContext* _myContext;
GLuint _framebuffer;
GLuint _colorRenderbuffer;
GLuint _depthRenderbuffer;
GLuint _vertexBufferID;
GLKBaseEffect *_baseEffect;
}
- (id) init
{
self = [super init];
if (self)
{
_myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:_myContext];
[self setupOffscreenBuffer];
[self setUpEffect];
[self renderImage];
[self saveImage]; //this do works, since I get an image, but the image only contains the red color I used to clear
}
return self;
}
-(void)setUpEffect
{
_baseEffect = [[GLKBaseEffect alloc] init];
_baseEffect.useConstantColor = GL_TRUE;
_baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 1.0f, 1.0f);
}
//this code is from apples document
-(void)setupOffscreenBuffer
{
glGenFramebuffers(1, &_framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glGenRenderbuffers(1, &_colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderbuffer);
glGenRenderbuffers(1, &_depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
if(status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"failed to make complete framebuffer object %x", status);
}
}
- (void) renderImage
{
GLenum error = GL_NO_ERROR;
glClearColor(1, 0, 0, 1); //red clear color, this can be seen
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
[_baseEffect prepareToDraw];
glGenBuffers(1, &_vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition,3,GL_FLOAT, GL_FALSE, sizeof(SceneVertex), NULL);
glDrawArrays(GL_TRIANGLES,0,3);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glFinish();
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
}
-(void)saveImage
{
GLenum error = GL_NO_ERROR;
NSInteger x = 0, y = 0;
NSInteger dataLength = WIDTH_IN_PIXEL * HEIGHT_IN_PIXEL * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, GL_RGBA, GL_UNSIGNED_BYTE, data);
NSData *pixelsRead = [NSData dataWithBytes:data length:dataLength];
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, 8, 32, WIDTH_IN_PIXEL * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
NSData *d = UIImageJPEGRepresentation(image, 1);
NSString *documentDirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
static NSInteger imageNO = 1;
imageNO++;
NSString *savingPath = [documentDirPath stringByAppendingPathComponent:[NSString stringWithFormat:#"%d.jpg",imageNO]];
BOOL succ = [d writeToFile:savingPath atomically:NO]; //is succeeded
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
}
#end
I had a very similar problem - to render some lines and get UIImage. I used OpenGL ES 1.1 and multisampling. I removed some additional code that does not refers to rendering and some OpenGL error checks. You can find the full code here: OSPRendererGL. Also, sorry for my one-for-all method.
#interface OSPRendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
GLuint _vertexArray;
GLuint _vertexBuffer;
GLuint resolveFramebuffer;
GLuint msaaFramebuffer, msaaRenderbuffer, msaaDepthbuffer;
int width;
int height;
}
#implementation OSPRendererGL
- (id) init
{
self = [super init];
if (self)
{
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL];
[EAGLContext setCurrentContext:nil];
width = 256;
height = 256;
}
return self;
}
-(void) setupOpenGL
{
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
glGenFramebuffersOES(1, &msaaFramebuffer);
glGenRenderbuffersOES(1, &msaaRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, msaaRenderbuffer);
glGenRenderbuffersOES(1, &msaaDepthbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaDepthbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, msaaDepthbuffer);
}
-(UIImage *) renderImageAtZoom:(int)zoom
{
CGRect b = CGRectMake(0, 0, width, height);
OSPCoordinateRect r = OSPRectForMapAreaInRect([self mapArea], b);
double_scale = b.size.width / r.size.x;
double scale = 1.0/_scale;
[EAGLContext setCurrentContext:myContext];
glBindFramebuffer(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(0.0f, 256.0f, 256.0f, 0.0f, 1.0f, -1.0f);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glScalef(_scale, _scale, 1);
glTranslatef(-r.origin.x, -r.origin.y, 0);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_LINE_SMOOTH);
glEnable(GL_POINT_SMOOTH);
glEnable(GL_BLEND);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// rendering here
glPopMatrix();
// msaa
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, msaaFramebuffer);
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, framebuffer);
glResolveMultisampleFramebufferAPPLE();
glBindFramebuffer(GL_FRAMEBUFFER_OES, framebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER, colorRenderbuffer);
// grabbing image from FBO
GLint backingWidth, backingHeight;
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
[EAGLContext setCurrentContext:nil];
return image;
}
I don't see a call to eglSwapBuffers(). That is required to start rendering the frame on PowerVR, even when rendering to a Renderbuffer. See the example code at:
http://processors.wiki.ti.com/index.php/Render_to_Texture_with_OpenGL_ES

Drawing into OpenGL ES framebuffer and getting UIImage from it on iPhone

I'm trying to do offscreen rendering of some primitives in OpenGL ES on iOS. The code is as follows:
// context and neccesary buffers
#interface RendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
}
.m file:
- (id) init
{
self = [super init];
if (self)
{
// initializing context
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL]; // creating buffers
}
return self;
}
-(void) setupOpenGL
{
int width = 256;
int height = 256;
// generating buffers and binding them as Apple,s tutorial says
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) ;
if(status != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", status);
}
}
- (UIImage *) renderImage
{
int width = 256;
int height = 256;
[EAGLContext setCurrentContext:myContext];
glEnable(GL_DEPTH_TEST);
// clear color - cyan
glClearColor(0.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// trying to draw some primitive - red line
float line[] = {-0.5f, -0.5f, 0.5f, 0.5f};
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer); //should I do this?
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, line);
glColor4f(1.0, 0.0, 0.0, 1.0);
glLineWidth(10);
glDrawArrays(GL_LINES, 0, 2); // draw line with two points
[myContext presentRenderbuffer:GL_RENDERBUFFER_OES]; // and this?
// then I grab image from _frameBuffer and return it as UIImage - this part is working
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}
the problem is that -renderImage returning completely cyan image, without red line. What can it be? Did I missed some actions before drawing line?
the first problem is that I was creating EAGLContext for OpenGL ES 2.0, but functions which I used were for OpenGL ES 1.1. The solution is to set constant to kEAGLRenderingAPIOpenGLES1
the second - I did not set model and projection matricies (if I use 1.1)
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-1.0f, 1.0f, -1.5f, 1.5f, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
after this all works)

Draw texture on touch in openGL iphone

Recently started with OpenGL, I have managed to draw a background image on screen and frame buffer. All works fine. Problem comes when I create new texture and add it to frame buffer.
{
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
// Clear screen
glClear(GL_COLOR_BUFFER_BIT);
// Render player ship
[playerShip drawAtPoint:CGPointMake(160, 240)];
// glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// [context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
If I uncomment the above two lines
// glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// [context presentRenderbuffer:GL_RENDERBUFFER_OES];
I dont get new texture which I am drawing through below code:
NSInteger myDataLength = 20 * 20 * 4;
GLubyte *buffer = (GLubyte *) malloc(myDataLength);
glReadPixels(location.x-10, location.y-10, 20, 20, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// gl renders "upside down" so swap top to bottom into new array.
// there's gotta be a better way, but this works.
GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);
for(int y = 0; y < 20; y++)
{
for(int x = 0; x < 20 * 4; x++)
{
buffer2[(19 - y) * 20 * 4 + x] = buffer[y * 4 * 20 + x];
}
}
// make data provider with data.
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, NULL);
// prep the ingredients
int bitsPerComponent = 8;
int bitsPerPixel = 32;
int bytesPerRow = 4 * 20;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// make the cgimage
CGImageRef imageRef = CGImageCreate(20, 20, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
// then make the uiimage from that
UIImage *myImage = [UIImage imageWithCGImage:imageRef];
Texture2D *texture = [[Texture2D alloc] initWithImage:myImage];
UIImageWriteToSavedPhotosAlbum(myImage, self, nil, nil);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glLoadIdentity();
[texture drawAtPoint:location];
glPopMatrix();
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glPushMatrix();
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
I dont know where I am doing mistake. I am new to this.
If you are still rendering the background texture in your code and then render the space ship on top of it, the space ship might fail the depth buffer test.
Have you tried out:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
to clear the z-buffer and then rendering the spaceship?

(iOS) OpenGL ES (2.0) Application, how to move the object in Z?

I'm playing with OpenGL ES (2.0) Application (iOS) source code in XCode and was trying to make the colorful square move in the Z coordinate, so I tried to change the shader code from
gl_Position.y += sin(translate) / 2.0;
to
gl_Position.z += sin(translate) / 2.0;
with no success.
The square doesn't move at all.
It moves well in X and Y thought...
Is there some option I need to activate when initializing OpenGL? Thanks!
UPDATE:
I've uploaded an example. This is roughly the OpenGL ES template that XCode generates, I just added the calls to create the depth buffer, and the gl_Position.x to gl_Position.z += sin(translate) / 2.0f in the Shader.vsh.
I wish to see the square move in a sinusoidal form on the Z coordinate but it just won't. Either it keeps still or, if I multiply the sin(), it will appear and disappear in a cycle.
If anyone can help me out I'll be most grateful, since sincerely I do not know what else to do, and believe me I tried alot...
The source is in a zip at: http://cl.ly/24240x2D1t2A3I0c1l1P
Thank you!
The example you are looking at has no depth buffer and a perspective matrix intended for 2D GL. Look at the aurioTouch example instead. in the EAGLView class you will notice an option to implement the depth buffer. The two combined (since aurioTouch doesn't implement shaders) should give a better understanding
I think the order of operations you have in your method are causing the problem.
Here's the code I use in my app "Live Effects Cam" which places the live camera as a GL Texture on shapes:
#define DEGREES_TO_RADIANS(__ANGLE__) ((__ANGLE__) / 180.0 * M_PI)
#interface GLView : UIView
{
#private
/* The pixel dimensions of the backbuffer */
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
/* OpenGL names for the renderbuffer and framebuffers used to render to this view */
GLuint viewRenderbuffer;
GLuint viewFramebuffer;
GLuint depthRenderbuffer;
/* OpenGL name for the sprite texture */
GLuint spriteTexture;
}
#property (readonly) GLint backingWidth;
#property (readonly) GLint backingHeight;
#property (readonly) EAGLContext *context;
- (void) drawView;
- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
+ (UIImage *) snapshot:(GLView *)eaglview;
#end
#implementation GLView
#synthesize backingWidth;
#synthesize backingHeight;
#synthesize context;
+ (Class) layerClass
{
return [CAEAGLLayer class];
}
- (id)init
{
self = [[super init] initWithFrame:CGRectMake(0.0, 0.0, 480.0, 640.0)]; // size of the camera image being captures
if ( self==nil )
return self;
// Set Content Scaling
//
if ( HIRESDEVICE )
{
self.contentScaleFactor = (CGFloat)2.0;
}
// Get our backing layer
//
CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
// Configure it so that it is opaque, does not retain the contents of the backbuffer when displayed, and uses RGBA8888 color.
//
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
nil];
// Create our EAGLContext, and if successful make it current and create our framebuffer.
//
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if(!context || ![EAGLContext setCurrentContext:context] || ![self createFramebuffer])
{
[self release];
return nil;
}
// Final View Settings
//
[self setOpaque:YES];
self.multipleTouchEnabled = YES;
self.backgroundColor = [UIColor clearColor];
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLfloat zNear = 1.0;
GLfloat zFar = 1000.0;
GLfloat fieldOfView = 90; // Lens Angle of View
GLfloat size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
CGRect rect = CGRectMake( (CGFloat)0.0, (CGFloat)0.0, backingWidth, backingHeight);
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size / (rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, backingWidth, backingHeight);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glEnable(GL_MULTISAMPLE);
glEnable(GL_LINE_SMOOTH);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
glDisable(GL_ALPHA_TEST);
// Turn Translucent Textures: OFF
//
glDisable(GL_BLEND);
// // Turn Translucent Textures: ON
// //
// glEnable(GL_BLEND);
// glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
return self;
}
- (void) drawView
{
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (BOOL)createFramebuffer
{
// Generate IDs for a framebuffer object and a color renderbuffer
//
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
// allowing us to draw into a buffer that will later be rendered to screen whereever the layer is (which corresponds with our view).
//
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
// If this app uses a depth buffer, we'll create and attach one via another renderbuffer.
//
if ( YES )
{
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
{
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void) destroyFramebuffer
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer)
{
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
+ (UIImage *) snapshot:(GLView *)eaglview
{
NSInteger x = 0;
NSInteger y = 0;
NSInteger width = [eaglview backingWidth];
NSInteger height = [eaglview backingHeight];
NSInteger dataLength = width * height * 4;
// Need to do this to get it to flush before taking the snapshit
//
NSUInteger i;
for ( i=0; i<100; i++ )
{
glFlush();
CFRunLoopRunInMode(kCFRunLoopDefaultMode, (float)1.0/(float)60.0, FALSE);
}
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
//
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
//
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast, ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
//
NSInteger widthInPoints;
NSInteger heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions)
{
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
//
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else
{
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
//
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
//
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); // autoreleased image
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}
#end
// Create default framebuffer object.
glGenFramebuffers(1, &defaultFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &depthRenderbuffer); <----
glGenRenderbuffers(1, &colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer); <----
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &framebufferWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &framebufferHeight);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, framebufferWidth, framebufferHeight); <----
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer); <----
I've added that to my code (the ... <----) and now I get a pinkish screen (lol). Any ideas? This is really frustrating. Shouldn't I do something in the setFrame.. presentFrame..?

Set Renderbuffer Width and Height (Open GL ES)

I'm currently experiencing an issue with an Open GL ES renderbuffer where the backing and width are are both set to 15. Is there any way to set them to the width of 320 and 480?
My project is built up on Apple's EAGLView class and ES1Renderer, but I've moved it from the app delegate to a controller. I also moved the CADisplayLink outside of it (I update my game logic with the timestamp from this)
Any help would be greatly appreciated.
I add the glview to the window as follows:
CGRect applicationFrame = [[UIScreen mainScreen] applicationFrame];
[window addSubview:gameController.glview];
[window makeKeyAndVisible];
I synthesize the controller and the glview within it. The EAGLView and Renderer are otherwise unmodified.
Renderer Initialization:
// Get the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = TRUE;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
renderer = [[ES1Renderer alloc] init];
Render "resize from layer" Method
- (BOOL)resizeFromLayer:(CAEAGLLayer *)layer
{
// Allocate color buffer backing based on the current layer size
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:layer];
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSLog(#"Backing Width:%i and Height: %i", backingWidth, backingHeight);
if (glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
{
NSLog(#"Failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
At the time you call renderbufferStorage:fromDrawable:, what is layer.bounds? The pixel width and height of your renderbuffer are allocated to match layer.bounds.size.
From Apple's Documentation itself. Worked perfectly for me :
- (UIImage*)snapshot:(UIView*)eaglview {
GLint backingWidth, backingHeight;
// Bind the color renderbuffer used to render the OpenGL ES view
// If your application only creates a single color renderbuffer which is already bound at this point,
// this call is redundant, but it is needed if you're dealing with multiple renderbuffers.
// Note, replace "_colorRenderbuffer" with the actual name of the renderbuffer object defined in your class.
glBindRenderbufferOES(GL_RENDERBUFFER_OES, _colorRenderbuffer);
// Get the size of the backing CAEAGLLayer
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0, width = backingWidth, height = backingHeight;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
NSInteger widthInPoints, heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions) {
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else {
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}