I'm trying to do offscreen rendering of some primitives in OpenGL ES on iOS. The code is as follows:
// context and neccesary buffers
#interface RendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
}
.m file:
- (id) init
{
self = [super init];
if (self)
{
// initializing context
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL]; // creating buffers
}
return self;
}
-(void) setupOpenGL
{
int width = 256;
int height = 256;
// generating buffers and binding them as Apple,s tutorial says
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) ;
if(status != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", status);
}
}
- (UIImage *) renderImage
{
int width = 256;
int height = 256;
[EAGLContext setCurrentContext:myContext];
glEnable(GL_DEPTH_TEST);
// clear color - cyan
glClearColor(0.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// trying to draw some primitive - red line
float line[] = {-0.5f, -0.5f, 0.5f, 0.5f};
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer); //should I do this?
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, line);
glColor4f(1.0, 0.0, 0.0, 1.0);
glLineWidth(10);
glDrawArrays(GL_LINES, 0, 2); // draw line with two points
[myContext presentRenderbuffer:GL_RENDERBUFFER_OES]; // and this?
// then I grab image from _frameBuffer and return it as UIImage - this part is working
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}
the problem is that -renderImage returning completely cyan image, without red line. What can it be? Did I missed some actions before drawing line?
the first problem is that I was creating EAGLContext for OpenGL ES 2.0, but functions which I used were for OpenGL ES 1.1. The solution is to set constant to kEAGLRenderingAPIOpenGLES1
the second - I did not set model and projection matricies (if I use 1.1)
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-1.0f, 1.0f, -1.5f, 1.5f, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
after this all works)
Related
I'm using OpenGL ES to render some special effects, I don't want to show this to user, I just want to save the result as an UIImage, can anybody please help me?
this is the code I'm using, I can get an image which contains the red clear color I use, but no geometry drawing shown.
#import "RendererGL.h"
#import <GLKit/GLKit.h>
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>
static NSInteger WIDTH_IN_PIXEL = 400;
static NSInteger HEIGHT_IN_PIXEL = 300;
typedef struct {
GLKVector3 positionCoords;
}
SceneVertex;
static const SceneVertex vertices[] =
{
{{-0.5f, -0.5f, 0.0}}, // lower left corner
{{ 0.5f, -0.5f, 0.0}}, // lower right corner
{{-0.5f, 0.5f, 0.0}} // upper left corner
};
#implementation RendererGL
{
EAGLContext* _myContext;
GLuint _framebuffer;
GLuint _colorRenderbuffer;
GLuint _depthRenderbuffer;
GLuint _vertexBufferID;
GLKBaseEffect *_baseEffect;
}
- (id) init
{
self = [super init];
if (self)
{
_myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:_myContext];
[self setupOffscreenBuffer];
[self setUpEffect];
[self renderImage];
[self saveImage]; //this do works, since I get an image, but the image only contains the red color I used to clear
}
return self;
}
-(void)setUpEffect
{
_baseEffect = [[GLKBaseEffect alloc] init];
_baseEffect.useConstantColor = GL_TRUE;
_baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 1.0f, 1.0f);
}
//this code is from apples document
-(void)setupOffscreenBuffer
{
glGenFramebuffers(1, &_framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glGenRenderbuffers(1, &_colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderbuffer);
glGenRenderbuffers(1, &_depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
if(status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"failed to make complete framebuffer object %x", status);
}
}
- (void) renderImage
{
GLenum error = GL_NO_ERROR;
glClearColor(1, 0, 0, 1); //red clear color, this can be seen
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
[_baseEffect prepareToDraw];
glGenBuffers(1, &_vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition,3,GL_FLOAT, GL_FALSE, sizeof(SceneVertex), NULL);
glDrawArrays(GL_TRIANGLES,0,3);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glFinish();
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
}
-(void)saveImage
{
GLenum error = GL_NO_ERROR;
NSInteger x = 0, y = 0;
NSInteger dataLength = WIDTH_IN_PIXEL * HEIGHT_IN_PIXEL * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, GL_RGBA, GL_UNSIGNED_BYTE, data);
NSData *pixelsRead = [NSData dataWithBytes:data length:dataLength];
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, 8, 32, WIDTH_IN_PIXEL * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
NSData *d = UIImageJPEGRepresentation(image, 1);
NSString *documentDirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
static NSInteger imageNO = 1;
imageNO++;
NSString *savingPath = [documentDirPath stringByAppendingPathComponent:[NSString stringWithFormat:#"%d.jpg",imageNO]];
BOOL succ = [d writeToFile:savingPath atomically:NO]; //is succeeded
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
}
#end
I had a very similar problem - to render some lines and get UIImage. I used OpenGL ES 1.1 and multisampling. I removed some additional code that does not refers to rendering and some OpenGL error checks. You can find the full code here: OSPRendererGL. Also, sorry for my one-for-all method.
#interface OSPRendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
GLuint _vertexArray;
GLuint _vertexBuffer;
GLuint resolveFramebuffer;
GLuint msaaFramebuffer, msaaRenderbuffer, msaaDepthbuffer;
int width;
int height;
}
#implementation OSPRendererGL
- (id) init
{
self = [super init];
if (self)
{
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL];
[EAGLContext setCurrentContext:nil];
width = 256;
height = 256;
}
return self;
}
-(void) setupOpenGL
{
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
glGenFramebuffersOES(1, &msaaFramebuffer);
glGenRenderbuffersOES(1, &msaaRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, msaaRenderbuffer);
glGenRenderbuffersOES(1, &msaaDepthbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaDepthbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, msaaDepthbuffer);
}
-(UIImage *) renderImageAtZoom:(int)zoom
{
CGRect b = CGRectMake(0, 0, width, height);
OSPCoordinateRect r = OSPRectForMapAreaInRect([self mapArea], b);
double_scale = b.size.width / r.size.x;
double scale = 1.0/_scale;
[EAGLContext setCurrentContext:myContext];
glBindFramebuffer(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(0.0f, 256.0f, 256.0f, 0.0f, 1.0f, -1.0f);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glScalef(_scale, _scale, 1);
glTranslatef(-r.origin.x, -r.origin.y, 0);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_LINE_SMOOTH);
glEnable(GL_POINT_SMOOTH);
glEnable(GL_BLEND);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// rendering here
glPopMatrix();
// msaa
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, msaaFramebuffer);
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, framebuffer);
glResolveMultisampleFramebufferAPPLE();
glBindFramebuffer(GL_FRAMEBUFFER_OES, framebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER, colorRenderbuffer);
// grabbing image from FBO
GLint backingWidth, backingHeight;
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
[EAGLContext setCurrentContext:nil];
return image;
}
I don't see a call to eglSwapBuffers(). That is required to start rendering the frame on PowerVR, even when rendering to a Renderbuffer. See the example code at:
http://processors.wiki.ti.com/index.php/Render_to_Texture_with_OpenGL_ES
Recently started with OpenGL, I have managed to draw a background image on screen and frame buffer. All works fine. Problem comes when I create new texture and add it to frame buffer.
{
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
// Clear screen
glClear(GL_COLOR_BUFFER_BIT);
// Render player ship
[playerShip drawAtPoint:CGPointMake(160, 240)];
// glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// [context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
If I uncomment the above two lines
// glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// [context presentRenderbuffer:GL_RENDERBUFFER_OES];
I dont get new texture which I am drawing through below code:
NSInteger myDataLength = 20 * 20 * 4;
GLubyte *buffer = (GLubyte *) malloc(myDataLength);
glReadPixels(location.x-10, location.y-10, 20, 20, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// gl renders "upside down" so swap top to bottom into new array.
// there's gotta be a better way, but this works.
GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);
for(int y = 0; y < 20; y++)
{
for(int x = 0; x < 20 * 4; x++)
{
buffer2[(19 - y) * 20 * 4 + x] = buffer[y * 4 * 20 + x];
}
}
// make data provider with data.
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, NULL);
// prep the ingredients
int bitsPerComponent = 8;
int bitsPerPixel = 32;
int bytesPerRow = 4 * 20;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// make the cgimage
CGImageRef imageRef = CGImageCreate(20, 20, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
// then make the uiimage from that
UIImage *myImage = [UIImage imageWithCGImage:imageRef];
Texture2D *texture = [[Texture2D alloc] initWithImage:myImage];
UIImageWriteToSavedPhotosAlbum(myImage, self, nil, nil);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glLoadIdentity();
[texture drawAtPoint:location];
glPopMatrix();
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glPushMatrix();
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
I dont know where I am doing mistake. I am new to this.
If you are still rendering the background texture in your code and then render the space ship on top of it, the space ship might fail the depth buffer test.
Have you tried out:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
to clear the z-buffer and then rendering the spaceship?
I'm using the CoreGraphcis to create a text texture. Unfortunately the text renders like this (Text color is same as background to demonstrate the strange border).
I've tried playing with stroke colors and borders to I think it is do to OpenGLES 2.0 and not CoreGraphics.
// Create default framebuffer object. The backing will be allocated for the current layer in -resizeFromLayer
glGenFramebuffers(1, &defaultFramebuffer);
glGenRenderbuffers(1, &colorRenderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
glActiveTexture(GL_TEXTURE0);
glUniform1i(uniforms[UNIFORM_SAMPLER], 0);
// Set up the texture state.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
texture = [[FW2Texture alloc] initWithString:#"Text"];
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texture.width, texture.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, texture.imageData);
And the core graphics bit:
-(id)initWithString:(NSString*)str {
if((self = [super init])) {
UIFont *font = [UIFont systemFontOfSize:17];
CGSize size = [str sizeWithFont:font];
NSInteger i;
width = size.width;
if((width != 1) && (((int)width) & (((int)width) - 1))) {
i = 1;
while(i < width)
i *= 2;
width = i;
}
height = size.height;
if((height != 1) && (((int)height) & (((int)height) - 1))) {
i = 1;
while(i < height)
i *= 2;
height = i;
}
NSInteger BitsPerComponent = 8;
int bpp = BitsPerComponent / 2;
int byteCount = width * height * bpp;
uint8_t *data = (uint8_t*) calloc(byteCount, 1);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
CGContextRef context = CGBitmapContextCreate(data,
width,
height,
BitsPerComponent,
bpp * width,
colorSpace,
bitmapInfo);
CGColorSpaceRelease(colorSpace);
CGContextSetGrayFillColor(context, 0.5f, 1.0f);
CGContextTranslateCTM(context, 0.0f, height);
CGContextScaleCTM(context, 1.0f, -1.0f);
UIGraphicsPushContext(context);
[str drawInRect:CGRectMake(0,
0,
size.width,
size.height)
withFont:font
lineBreakMode:UILineBreakModeWordWrap
alignment:UITextAlignmentCenter];
UIGraphicsPopContext();
CGContextRelease(context);
imageData = (uint8_t*)[[NSData dataWithBytesNoCopy:data length:byteCount freeWhenDone:YES] bytes];
}
return self;
}
What's your glBlendFunc? You're taking premultiplied alpha from CoreGraphics, so e.g. instead of a border pixel being (r, g, b, 0.5) it'll be (0.5*r, 0.5*g, 0.5*b, 0.5). That means you should composite with blending enabled, using glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA) so that you get srcColour + (1 - alpha of srcColour)*dstColour.
I'm playing with OpenGL ES (2.0) Application (iOS) source code in XCode and was trying to make the colorful square move in the Z coordinate, so I tried to change the shader code from
gl_Position.y += sin(translate) / 2.0;
to
gl_Position.z += sin(translate) / 2.0;
with no success.
The square doesn't move at all.
It moves well in X and Y thought...
Is there some option I need to activate when initializing OpenGL? Thanks!
UPDATE:
I've uploaded an example. This is roughly the OpenGL ES template that XCode generates, I just added the calls to create the depth buffer, and the gl_Position.x to gl_Position.z += sin(translate) / 2.0f in the Shader.vsh.
I wish to see the square move in a sinusoidal form on the Z coordinate but it just won't. Either it keeps still or, if I multiply the sin(), it will appear and disappear in a cycle.
If anyone can help me out I'll be most grateful, since sincerely I do not know what else to do, and believe me I tried alot...
The source is in a zip at: http://cl.ly/24240x2D1t2A3I0c1l1P
Thank you!
The example you are looking at has no depth buffer and a perspective matrix intended for 2D GL. Look at the aurioTouch example instead. in the EAGLView class you will notice an option to implement the depth buffer. The two combined (since aurioTouch doesn't implement shaders) should give a better understanding
I think the order of operations you have in your method are causing the problem.
Here's the code I use in my app "Live Effects Cam" which places the live camera as a GL Texture on shapes:
#define DEGREES_TO_RADIANS(__ANGLE__) ((__ANGLE__) / 180.0 * M_PI)
#interface GLView : UIView
{
#private
/* The pixel dimensions of the backbuffer */
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
/* OpenGL names for the renderbuffer and framebuffers used to render to this view */
GLuint viewRenderbuffer;
GLuint viewFramebuffer;
GLuint depthRenderbuffer;
/* OpenGL name for the sprite texture */
GLuint spriteTexture;
}
#property (readonly) GLint backingWidth;
#property (readonly) GLint backingHeight;
#property (readonly) EAGLContext *context;
- (void) drawView;
- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
+ (UIImage *) snapshot:(GLView *)eaglview;
#end
#implementation GLView
#synthesize backingWidth;
#synthesize backingHeight;
#synthesize context;
+ (Class) layerClass
{
return [CAEAGLLayer class];
}
- (id)init
{
self = [[super init] initWithFrame:CGRectMake(0.0, 0.0, 480.0, 640.0)]; // size of the camera image being captures
if ( self==nil )
return self;
// Set Content Scaling
//
if ( HIRESDEVICE )
{
self.contentScaleFactor = (CGFloat)2.0;
}
// Get our backing layer
//
CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
// Configure it so that it is opaque, does not retain the contents of the backbuffer when displayed, and uses RGBA8888 color.
//
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
nil];
// Create our EAGLContext, and if successful make it current and create our framebuffer.
//
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if(!context || ![EAGLContext setCurrentContext:context] || ![self createFramebuffer])
{
[self release];
return nil;
}
// Final View Settings
//
[self setOpaque:YES];
self.multipleTouchEnabled = YES;
self.backgroundColor = [UIColor clearColor];
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLfloat zNear = 1.0;
GLfloat zFar = 1000.0;
GLfloat fieldOfView = 90; // Lens Angle of View
GLfloat size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
CGRect rect = CGRectMake( (CGFloat)0.0, (CGFloat)0.0, backingWidth, backingHeight);
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size / (rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, backingWidth, backingHeight);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glEnable(GL_MULTISAMPLE);
glEnable(GL_LINE_SMOOTH);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
glDisable(GL_ALPHA_TEST);
// Turn Translucent Textures: OFF
//
glDisable(GL_BLEND);
// // Turn Translucent Textures: ON
// //
// glEnable(GL_BLEND);
// glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
return self;
}
- (void) drawView
{
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (BOOL)createFramebuffer
{
// Generate IDs for a framebuffer object and a color renderbuffer
//
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
// allowing us to draw into a buffer that will later be rendered to screen whereever the layer is (which corresponds with our view).
//
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
// If this app uses a depth buffer, we'll create and attach one via another renderbuffer.
//
if ( YES )
{
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
{
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void) destroyFramebuffer
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer)
{
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
+ (UIImage *) snapshot:(GLView *)eaglview
{
NSInteger x = 0;
NSInteger y = 0;
NSInteger width = [eaglview backingWidth];
NSInteger height = [eaglview backingHeight];
NSInteger dataLength = width * height * 4;
// Need to do this to get it to flush before taking the snapshit
//
NSUInteger i;
for ( i=0; i<100; i++ )
{
glFlush();
CFRunLoopRunInMode(kCFRunLoopDefaultMode, (float)1.0/(float)60.0, FALSE);
}
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
//
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
//
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast, ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
//
NSInteger widthInPoints;
NSInteger heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions)
{
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
//
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else
{
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
//
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
//
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); // autoreleased image
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}
#end
// Create default framebuffer object.
glGenFramebuffers(1, &defaultFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &depthRenderbuffer); <----
glGenRenderbuffers(1, &colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer); <----
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &framebufferWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &framebufferHeight);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, framebufferWidth, framebufferHeight); <----
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer); <----
I've added that to my code (the ... <----) and now I get a pinkish screen (lol). Any ideas? This is really frustrating. Shouldn't I do something in the setFrame.. presentFrame..?
For some reason my texture are not drawing, even though my code looks exactly the same as an old project that did. So far, the vertexes and TexCoords look fine, as I am having white squares being drawn, where the texture should be drawn instead.
The process so far goes,
I load up a Contoller and in loadView, I
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
Then my renderer is loaded up, which does nothing on construction. After that I load up my Texture into gl. This code is a direct copy from my old project and I know it works.
- (GLuint)textureFromPath:(NSString *)path
{
GLuint texture;
glGenTextures(1, &texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:path];
if (!img) {
NSLog(#"Image \"%#\" could not be loaded and was not bound", path);
return 0;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef context = CGBitmapContextCreate(image, width, height, 8, 4 * width, colourSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(context, bounds);
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGContextDrawImage(context, bounds, cgimage);
CGContextRelease(context);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
[img release];
free(image);
return texture;
}
I then take the generate texture from gl and assign it's postion in the array of the Renderer at 0. I also did this in my old project and worked fine too. So far so good, I feel.
The Application then tells it to startAnimation, which it then calls setFramebuffer, which within it calls createFramebuffer as framebuffer is undefined. It then notifies the Renderer (btw, Renderer is a C++ class) that it has created the framebuffers.
void bufferHasBeenCreated() const {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-160.0f, 160.0f, -240.0f, 240.0f, -5.0f, 1.0f);
glViewport(0, 0, 320, 480);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
}
It then calls the render on Renderer.
void render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.325f, 0.0f, 0.325f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static float rot = 0.0f;
glRotatef(rot, 0.0f, 0.0f, 1.0f);
//glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture_[0]);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
printf("Error. glError: 0x%04X\n", err);
glVertexPointer(2, GL_FLOAT, 0, pos[0]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[1]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[2]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
rot += 0.5f;
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
and then finally it then calls presentFramebuffer, which binds the renderBuffer and setup context.
Edit: I have done some more work on this, and it turns out it is something to do with the context and the buffers. Whenever I do just the context while enabling GL_TEXTURE_2D and GL_BLEND, as you do, the textures don't load. Yet do it when the buffers are loaded up and everything works.
I have got my texture to draw. I pulled all my code out and put it's own file. I will then start pulling it a part again and hopefully get everything working in the structure that I already have.
(Objective-C) ES1Renderer.h
#import <QuartzCore/QuartzCore.h>
#import "OpenGLES.h"
#interface ES1Renderer : UIView {
#private
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
GLuint viewFramebuffer, viewRenderbuffer;
GLuint texture[1];
BOOL animating;
BOOL displayLinkSupported;
NSInteger animationFrameInterval;
// Use of the CADisplayLink class is the preferred method for controlling your animation timing.
// CADisplayLink will link to the main display and fire every vsync when added to a given run-loop.
// The NSTimer class is used only as fallback when running on a pre 3.1 device where CADisplayLink
// isn't available.
id displayLink;
NSTimer *animationTimer;
}
#property (readonly, nonatomic, getter=isAnimating) BOOL animating;
#property (nonatomic) NSInteger animationFrameInterval;
- (void) startAnimation;
- (void) stopAnimation;
- (void)render;
#end
Next ES1Renderer.m
#import "ES1Renderer.h"
#implementation ES1Renderer
#synthesize animating;
#dynamic animationFrameInterval;
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context])
{
[self release];
return nil;
}
// Generate buffers
glGenFramebuffersOES(1, &viewFramebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Disable Depth
glDisable(GL_DEPTH_TEST);
// Load textures
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:#"colour" ofType:#"png"]];
if (!img) {
NSLog(#"Image \"colour.png\" could not be loaded and was not bound");
[self release];
return nil;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef imgContext = CGBitmapContextCreate(image,
width, height,
8, 4 * width, colourSpace,
kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(imgContext, bounds);
CGContextTranslateCTM (imgContext, 0, height);
CGContextScaleCTM (imgContext, 1.0, -1.0);
CGContextDrawImage(imgContext, bounds, cgimage);
CGContextRelease(imgContext);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
free(image);
[img release];
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView:(id)sender
{
[self render];
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
}
- (void) render
{
//glDisable(GL_TEXTURE_2D);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
static const float textureVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const float textureCoords[] = {
0.0f, 0.0f,
0.0f, 0.515625f,
0.12890625f, 0.0f,
0.12890625f, 0.515625f,
};
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_2D, texture[0]);
//glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
glVertexPointer(2, GL_FLOAT, 0, textureVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:context];
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer *)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
}
- (void) dealloc
{
// Tear down GL
if (viewFramebuffer)
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
}
if (viewRenderbuffer)
{
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
}
// Tear down context
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
[context release];
context = nil;
displayLink = nil;
animationTimer = nil;
[super dealloc];
}
- (NSInteger) animationFrameInterval
{
return animationFrameInterval;
}
- (void) setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void) startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView:)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView:) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
#end
There is only one problem with this code. It's out of date. Apple released a new way of doing things, but hell. It works.
Update:
It turns out I had set the context up before loading the textures.