Problems with OpenGL when using Image Processing to filter video - iphone

I am trying to use Brad Larson's Framework as a reference to get some basic image processing tests. So far i have implemented the sepia filter and it kind of works, the problem is that I get a processed Image and a non processed Image every render cycle (like if i set the fps to 1 sec, first second rendered , second not, third yes, etc). And i have spent hours and i cant seem to find out why.
I have 2 windows, the video preview layer and a glkview where i am rendering my buffer.
This is my code, which is basically just an attempt to synthethize the previously mentioned framework:
View did Load method:
- (void)viewDidLoad
{
[super viewDidLoad];
session = [[AVCaptureSession alloc] init];
// Add inputs and outputs.
if ([session canSetSessionPreset:AVCaptureSessionPreset640x480]) {
session.sessionPreset = AVCaptureSessionPreset640x480;
}
else {
// Handle the failure.
NSLog(#"Cannot set session preset to 640x480");
}
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"Could create input: %#", error);
}
if ([session canAddInput:input]) {
[session addInput:input];
}
else {
// Handle the failure.
NSLog(#"Could not add input");
}
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
[captureVideoPreviewLayer setBounds:videoLayer.layer.bounds];
[captureVideoPreviewLayer setPosition:videoLayer.layer.position];
[videoLayer.layer addSublayer:captureVideoPreviewLayer];
[session startRunning];
// OPENGL stuff
self.imageView.context = [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context];
if ([self createFilterProgram]) {
NSLog(#"Filter Program Created");
}
if ([self createOutputProgram]) {
NSLog(#"Output Program Created");
}
if (CVOpenGLESTextureCacheCreate != NULL)
{
[GPUImageOpenGLESContext useImageProcessingContext];
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
if (err)
{
NSAssert(NO, #"Error at CVOpenGLESTextureCacheCreate %d");
}
else
NSLog(#"Initial CVOpenGLESTextureCache created");
// Need to remove the initially created texture
if (outputTexture)
{
glDeleteTextures(1, &outputTexture);
outputTexture = 0;
}
}
// DATA OUTPUT
dataOutput = [[AVCaptureVideoDataOutput alloc] init];
if ([session canAddOutput:dataOutput]) {
[session addOutput:dataOutput];
dataOutput.videoSettings =
[NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey: (id)kCVPixelBufferPixelFormatTypeKey];
AVCaptureConnection *connection = [dataOutput connectionWithMediaType:AVMediaTypeVideo];
[connection setVideoMaxFrameDuration:CMTimeMake(1, 1)];
[connection setVideoMinFrameDuration:CMTimeMake(1, 1)];
}
else {
// Handle the failure.
NSLog(#"Could not add output");
}
// DATA OUTPUT END
//dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//dispatch_release(queue);
}
and the delegate method from the video capture:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(#"Render");
//CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
int bufferWidth = CVPixelBufferGetWidth(cameraFrame);
int bufferHeight = CVPixelBufferGetHeight(cameraFrame);
if (CVOpenGLESTextureCacheCreate != NULL)
{
NSLog(#"Not Null");
CVPixelBufferLockBaseAddress(cameraFrame, 0);
[GPUImageOpenGLESContext useImageProcessingContext];
CVOpenGLESTextureRef texture = NULL;
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
if (!texture || err) {
NSLog(#"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
return;
}
outputTexture = CVOpenGLESTextureGetName(texture);
glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// "setInputSize" method
CGSize sizeOfFBO = CGSizeMake(bufferWidth, bufferHeight);
// "newFrameReady" method
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat squareTextureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
[GPUImageOpenGLESContext useImageProcessingContext];
// "setFilterFBO" method
if (!filterFramebuffer)
{
NSLog(#"New Filter Frame buffer");
glActiveTexture(GL_TEXTURE1);
glGenFramebuffers(1, &filterFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
NSLog(#"Filter size: %f, %f", sizeOfFBO.width, sizeOfFBO.height);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)sizeOfFBO.width, (int)sizeOfFBO.height);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)sizeOfFBO.width, (int)sizeOfFBO.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(status == GL_FRAMEBUFFER_COMPLETE, #"Incomplete filter FBO: %d", status);
}
glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
glViewport(0, 0, (int)sizeOfFBO.width, (int)sizeOfFBO.height);
// END "setFilterFBO" method
glUseProgram(filterProgram); // This Program MUST be the filter one
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glUniform1i(uniforms[filterInputTextureUniform], 2);
glVertexAttribPointer(position, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(inputTextureCoordinate, 2, GL_FLOAT, 0, 0, squareTextureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
static const GLfloat squareVertices2[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureCoordinates2[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
[GPUImageOpenGLESContext useImageProcessingContext];
//[self setDisplayFramebuffer];
if (!displayFramebuffer)
{
NSLog(#"New Display Frame buffer");
glGenFramebuffers(1, &displayFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
glGenRenderbuffers(1, &displayRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
[[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)imageView.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
NSLog(#"Backing width: %d, height: %d", backingWidth, backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);
GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, #"Failure with display framebuffer generation");
}
glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glUseProgram(outputProgram);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glUniform1i(uniformsOutput[outputTextureUniform], 4);
glVertexAttribPointer(outputPosition, 2, GL_FLOAT, 0, 0, squareVertices2);
glVertexAttribPointer(outputTextureCoordinate, 2, GL_FLOAT, 0, 0, textureCoordinates2);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] presentBufferForDisplay];
//glBindRenderbuffer(GL_RENDERBUFFER, 0);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
glBindTexture(outputTexture, 0);
// Flush the CVOpenGLESTexture cache and release the texture
CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
CFRelease(texture);
outputTexture = 0;
}
else
NSLog(#"No support for fast CVOpenGLESTextureCacheCreate");
}
This is Brad's original framework (which is pretty cool) http://www.sunsetlakesoftware.com/2012/02/12/introducing-gpuimage-framework
If anyone has any idea why this might be happening i would appreciate the help.
So far the reasons i thought that could cause this are:
Rendering 2 times during the cycle (which i discarded it because it
happens every 1 second, and even if it did happen super fast the last
one would stay which is the processed one)
Enabling or disabling a state of OpenGL (which i think might be the
cause but cant find where)
Not using the buffers correctly (I am using the same buffer variable
for both steps this might cause some trouble)
It might have something to do with the context (I believe the context
is where everything is being drawn, so If i am working on the glkview
context I might be overwriting the image)
Please correct me if some of this ideas are wrong, this is my first attempt on processing using OpenGL
Thank you!
EDIT:
As suggested by Brad, I used the OpenGL profiler and got the following results:
This tool does seem to be amazing for optimizing the OpenGL code.

Related

iOS OpenGL ES 2.0: Offscreen render and save the result to an UIImage

I'm using OpenGL ES to render some special effects, I don't want to show this to user, I just want to save the result as an UIImage, can anybody please help me?
this is the code I'm using, I can get an image which contains the red clear color I use, but no geometry drawing shown.
#import "RendererGL.h"
#import <GLKit/GLKit.h>
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>
static NSInteger WIDTH_IN_PIXEL = 400;
static NSInteger HEIGHT_IN_PIXEL = 300;
typedef struct {
GLKVector3 positionCoords;
}
SceneVertex;
static const SceneVertex vertices[] =
{
{{-0.5f, -0.5f, 0.0}}, // lower left corner
{{ 0.5f, -0.5f, 0.0}}, // lower right corner
{{-0.5f, 0.5f, 0.0}} // upper left corner
};
#implementation RendererGL
{
EAGLContext* _myContext;
GLuint _framebuffer;
GLuint _colorRenderbuffer;
GLuint _depthRenderbuffer;
GLuint _vertexBufferID;
GLKBaseEffect *_baseEffect;
}
- (id) init
{
self = [super init];
if (self)
{
_myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:_myContext];
[self setupOffscreenBuffer];
[self setUpEffect];
[self renderImage];
[self saveImage]; //this do works, since I get an image, but the image only contains the red color I used to clear
}
return self;
}
-(void)setUpEffect
{
_baseEffect = [[GLKBaseEffect alloc] init];
_baseEffect.useConstantColor = GL_TRUE;
_baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 1.0f, 1.0f);
}
//this code is from apples document
-(void)setupOffscreenBuffer
{
glGenFramebuffers(1, &_framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glGenRenderbuffers(1, &_colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderbuffer);
glGenRenderbuffers(1, &_depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
if(status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"failed to make complete framebuffer object %x", status);
}
}
- (void) renderImage
{
GLenum error = GL_NO_ERROR;
glClearColor(1, 0, 0, 1); //red clear color, this can be seen
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
[_baseEffect prepareToDraw];
glGenBuffers(1, &_vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition,3,GL_FLOAT, GL_FALSE, sizeof(SceneVertex), NULL);
glDrawArrays(GL_TRIANGLES,0,3);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glFinish();
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
}
-(void)saveImage
{
GLenum error = GL_NO_ERROR;
NSInteger x = 0, y = 0;
NSInteger dataLength = WIDTH_IN_PIXEL * HEIGHT_IN_PIXEL * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, GL_RGBA, GL_UNSIGNED_BYTE, data);
NSData *pixelsRead = [NSData dataWithBytes:data length:dataLength];
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, 8, 32, WIDTH_IN_PIXEL * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
NSData *d = UIImageJPEGRepresentation(image, 1);
NSString *documentDirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
static NSInteger imageNO = 1;
imageNO++;
NSString *savingPath = [documentDirPath stringByAppendingPathComponent:[NSString stringWithFormat:#"%d.jpg",imageNO]];
BOOL succ = [d writeToFile:savingPath atomically:NO]; //is succeeded
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
}
#end
I had a very similar problem - to render some lines and get UIImage. I used OpenGL ES 1.1 and multisampling. I removed some additional code that does not refers to rendering and some OpenGL error checks. You can find the full code here: OSPRendererGL. Also, sorry for my one-for-all method.
#interface OSPRendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
GLuint _vertexArray;
GLuint _vertexBuffer;
GLuint resolveFramebuffer;
GLuint msaaFramebuffer, msaaRenderbuffer, msaaDepthbuffer;
int width;
int height;
}
#implementation OSPRendererGL
- (id) init
{
self = [super init];
if (self)
{
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL];
[EAGLContext setCurrentContext:nil];
width = 256;
height = 256;
}
return self;
}
-(void) setupOpenGL
{
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
glGenFramebuffersOES(1, &msaaFramebuffer);
glGenRenderbuffersOES(1, &msaaRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, msaaRenderbuffer);
glGenRenderbuffersOES(1, &msaaDepthbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaDepthbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, msaaDepthbuffer);
}
-(UIImage *) renderImageAtZoom:(int)zoom
{
CGRect b = CGRectMake(0, 0, width, height);
OSPCoordinateRect r = OSPRectForMapAreaInRect([self mapArea], b);
double_scale = b.size.width / r.size.x;
double scale = 1.0/_scale;
[EAGLContext setCurrentContext:myContext];
glBindFramebuffer(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(0.0f, 256.0f, 256.0f, 0.0f, 1.0f, -1.0f);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glScalef(_scale, _scale, 1);
glTranslatef(-r.origin.x, -r.origin.y, 0);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_LINE_SMOOTH);
glEnable(GL_POINT_SMOOTH);
glEnable(GL_BLEND);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// rendering here
glPopMatrix();
// msaa
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, msaaFramebuffer);
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, framebuffer);
glResolveMultisampleFramebufferAPPLE();
glBindFramebuffer(GL_FRAMEBUFFER_OES, framebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER, colorRenderbuffer);
// grabbing image from FBO
GLint backingWidth, backingHeight;
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
[EAGLContext setCurrentContext:nil];
return image;
}
I don't see a call to eglSwapBuffers(). That is required to start rendering the frame on PowerVR, even when rendering to a Renderbuffer. See the example code at:
http://processors.wiki.ti.com/index.php/Render_to_Texture_with_OpenGL_ES

OpenGL ES on iPhone doesn't draw anything

I'm an absolute beginner in OpenGL ES programming in iOS.
This is my first attempt to draw some simple 2D primitives with OpenGL ES onto a view.
Here is the class declaration:
#interface OGLGameCanvas : UIView <GameCanvas> {
EAGLContext* context;
Game* game;
GLuint framebuffer, renderbuffer, depthbuffer;
}
Here is my initialization code:
- (void)initialize {
// Get the layer and set properties
CAEAGLLayer* layer = (CAEAGLLayer*)self.layer;
layer.opaque = NO;
layer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
// Set the context
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context])
DLog(#"Cannot create EAGLContext!");
// Create the color buffer and the render buffer
glGenFramebuffers(1, &framebuffer);
glGenRenderbuffers(1, &renderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)layer];
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE)
NSLog(#"Failed to make complete frame buffer: %x", status);
// Get width and height of the render buffer
GLint width, height;
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &width);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &height);
// Create and start animation loop
CADisplayLink* displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(drawFrame:)];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
and my drawing code:
- (void)drawFrame:(CADisplayLink*)sender {
glLoadIdentity();
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
GLfloat vertices[] = { -20.0f, 2.5f, 0.0f, 0.0f, 1.5f, 7.5f };
glVertexPointer(2, GL_FLOAT, 0, vertices);
glColor4f(1.0, 0.0, 0.0, 1.0);
glPointSize(5.0);
glDrawArrays(GL_POINTS, 0, 3);
glDisableClientState(GL_VERTEX_ARRAY);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
The canvas gets cleared (in fact, it becomes black, or red, or whatever I set into glClearColor), but no points are drawn.
I'm pretty sure I'm forgetting something basic and essential.
Thanks for your help.
The problem is here:
GLfloat vertices[] = { -20.0f, 2.5f, 0.0f, 0.0f, 1.5f, 7.5f };
The normalized device coordinates lay in range [-1..1] so you're drawing them outside the visible area.
And here:
You have to do this before drawing.
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();

OpenGL OES Iphone glCopyTexImage2D

I am new to openGL OES on iPhone and have a memory issue with glCopyTexImage2D. So far i understood, this function should copy the current framebuffer to the binded texture. But for some reason it always allocates new memory, which i can see in instruments checking the allocations.
My goal is to read texture images and draw on it, after drawing i want to save the new texture , so i can scroll through the painting. So here is may code:
1) init opengl and framebuffer:
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
// Setup OpenGL states
glMatrixMode(GL_PROJECTION);
CGRect frame = self.bounds;
CGFloat scale = self.contentScaleFactor;
// Setup the view port in Pixels
glOrthof(0, frame.size.width * scale, 0, frame.size.height * scale, -1, 1);
glViewport(0, 0, frame.size.width, frame.size.height * scale);
glDisable(GL_DEPTH_TEST);
glDisable(GL_DITHER);
glMatrixMode(GL_MODELVIEW);
glEnableClientState(GL_VERTEX_ARRAY);
// Set a blending function appropriate for premultiplied alpha pixel data
glEnable(GL_POINT_SPRITE_OES);
glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE);
glPointSize(64 / kBrushScale);
2) now i load the saved images into the framebuffer:
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
// load texture
NSData* data = [[NSData alloc] initWithContentsOfFile:path];
glGenTextures(1, &drawBoardTextures[i]);
glBindTexture(GL_TEXTURE_2D, drawBoardTextures[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1024, 1024, 0, GL_RGBA, GL_UNSIGNED_BYTE, [data bytes]);
// free memory
[data release];
}
3) and finally render the texture:
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
int width = 1024;
GLfloat quad[] = {0.0,1024.0,1024.0,1024.0,0.0,0.0,1024.0,0.0};
GLfloat quadTex[] = {0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0};
for (int i=0; i<10; i++) {
quad[0] = width * i;
quad[2] = quad[0] + width;
quad[4] = quad[0];
quad[6] = quad[2];
glBindTexture(GL_TEXTURE_2D, drawBoardTextures[i]);
glVertexPointer(2, GL_FLOAT, 0, quad);
glTexCoordPointer(2, GL_FLOAT, 0, quadTex);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindTexture(GL_TEXTURE_2D, 0);
}
4) for now everything works fine, with gltranslatef i can scroll through the textures and also there is no allocation yet observed in instruments. so now i draw on the current window and want to save the result like followed:
int texIndex = offset.x/1024;
float diff = offset.x - (1024*texIndex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, drawBoardTextures[texIndex]);
glBindTexture(GL_TEXTURE_2D, drawBoardTextures[texIndex]);
glCopyTexSubImage2D(GL_TEXTURE_2D, 0, diff, 0, 0, 0, 1024-diff, 1024);
glBindTexture(GL_TEXTURE_2D, drawBoardTextures[texIndex + 1]);
glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 1024-diff, 0, diff, 1024);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glFlush();
No the problems starts. instead of writing it directly into the generated textures, it writes it into client memory. for every copied texture it uses ~4 MB of Ram, but every recopy doesn't need any memory. i really don't know what i did wrong.
Does anyone know what the problem is? Thanks alot for your help.
cheers
chris

Problem with opengles to show an image

I use xcode Opengl App template to create a sample.
I am new to opengles, and having to try re-write the 'render' method in ES1Renderer.m
I try create a texture and show it on the screen, but nothing showed.
Someone can help me ? I have no idea how to fix it:
- (void)render
{
int imageW = 16;
int imageH = 16;
GLubyte *textureData = (GLubyte *) malloc(imageW * imageH << 2);
for (int i = 0; i < imageW * imageH << 2; i++) {
textureData[i]= 0xff & i;
}
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
// when texture area is small, bilinear filter the closest mipmap
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR );
// when texture area is large, bilinear filter the original
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
// the texture wraps over at the edges (repeat)
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageW, imageH, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureData);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error uploading texture. glError: 0x%04X", err);
free(textureData);
float x = 10.0f;
float y = 10.0f;
float z = 0.0f;
float scaleX = 1.0f;
float scaleY = 1.0f;
float scaleZ = 1.0f;
int w = imageW /2;
int h = imageH /2;
const GLfloat squareVertices[] = {
-w, -h,
w, -h,
-w, h,
w, h,
};
const GLfloat textureCoords[] = {
0, 0,
1, 0,
0, 1,
1, 1,
};
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glBindTexture(GL_TEXTURE_2D, textureId);
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glPushMatrix();
glTranslatef(x, y, z);
glScalef(scaleX, scaleY, scaleZ);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glPopMatrix();
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
NSLog(#"-->");
glDeleteTextures(1, &textureId);
// This application only creates a single color renderbuffer which is already bound at this point.
// This call is redundant, but needed if dealing with multiple renderbuffers.
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
Sadly the OpenGL template provided by Xcode has changed at some point — the current code (as of Xcode 3.2.5, creating an iOS Application with the 'OpenGL ES Application' template) no longer supplies a separate ES1Renderer.m and ES2Renderer.m, preferring to provide a single, simplified EAGLView.m and to perform runtime tests within GLTestViewController.m. With that in mind, I modified GLTestViewController.m's awakeFromNib no longer to attempt to get an ES 2 context:
- (void)awakeFromNib
{
EAGLContext *aContext = nil;//[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!aContext)
{
aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
}
if (!aContext)
NSLog(#"Failed to create ES context");
else if (![EAGLContext setCurrentContext:aContext])
NSLog(#"Failed to set ES context current");
self.context = aContext;
[aContext release];
[(EAGLView *)self.view setContext:context];
[(EAGLView *)self.view setFramebuffer];
if ([context API] == kEAGLRenderingAPIOpenGLES2)
[self loadShaders];
animating = FALSE;
animationFrameInterval = 1;
self.displayLink = nil;
}
And copied and pasted relevant portions of your code into drawFrame:
- (void)drawFrame
{
[(EAGLView *)self.view setFramebuffer];
// Replace the implementation of this method to do your own custom drawing.
static const GLfloat squareVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
const GLfloat textureCoords[] = {
0, 0,
1, 0,
0, 1,
1, 1,
};
static float transY = 0.0f;
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
int imageW = 16;
int imageH = 16;
GLubyte *textureData = (GLubyte *) malloc(imageW * imageH << 2);
for (int i = 0; i < imageW * imageH << 2; i++) {
textureData[i]= 0xff & i;
}
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageW, imageH, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureData);
free(textureData);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0.0f, (GLfloat)(sinf(transY)/2.0f), 0.0f);
transY += 0.075f;
glEnable(GL_TEXTURE_2D);
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDeleteTextures(1, &textureId);
[(EAGLView *)self.view presentFramebuffer];
}
The result works entirely as you seem to intend. At a guess, is it possible either that:
you're setting something other than the identity as your projection matrix, causing your geometry to be clipped because it is placed at z = 0?
you've neglected properly to abandon an attempt at ES 2 rendering, causing unexpected results because tasks like textured rendering aren't hardwired in with ES 2 in the same way that they are with ES1?

iOS 4.1 OpenGL ES 1.1 not drawing Texture

For some reason my texture are not drawing, even though my code looks exactly the same as an old project that did. So far, the vertexes and TexCoords look fine, as I am having white squares being drawn, where the texture should be drawn instead.
The process so far goes,
I load up a Contoller and in loadView, I
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
Then my renderer is loaded up, which does nothing on construction. After that I load up my Texture into gl. This code is a direct copy from my old project and I know it works.
- (GLuint)textureFromPath:(NSString *)path
{
GLuint texture;
glGenTextures(1, &texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:path];
if (!img) {
NSLog(#"Image \"%#\" could not be loaded and was not bound", path);
return 0;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef context = CGBitmapContextCreate(image, width, height, 8, 4 * width, colourSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(context, bounds);
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGContextDrawImage(context, bounds, cgimage);
CGContextRelease(context);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
[img release];
free(image);
return texture;
}
I then take the generate texture from gl and assign it's postion in the array of the Renderer at 0. I also did this in my old project and worked fine too. So far so good, I feel.
The Application then tells it to startAnimation, which it then calls setFramebuffer, which within it calls createFramebuffer as framebuffer is undefined. It then notifies the Renderer (btw, Renderer is a C++ class) that it has created the framebuffers.
void bufferHasBeenCreated() const {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-160.0f, 160.0f, -240.0f, 240.0f, -5.0f, 1.0f);
glViewport(0, 0, 320, 480);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
}
It then calls the render on Renderer.
void render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.325f, 0.0f, 0.325f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static float rot = 0.0f;
glRotatef(rot, 0.0f, 0.0f, 1.0f);
//glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture_[0]);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
printf("Error. glError: 0x%04X\n", err);
glVertexPointer(2, GL_FLOAT, 0, pos[0]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[1]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[2]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
rot += 0.5f;
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
and then finally it then calls presentFramebuffer, which binds the renderBuffer and setup context.
Edit: I have done some more work on this, and it turns out it is something to do with the context and the buffers. Whenever I do just the context while enabling GL_TEXTURE_2D and GL_BLEND, as you do, the textures don't load. Yet do it when the buffers are loaded up and everything works.
I have got my texture to draw. I pulled all my code out and put it's own file. I will then start pulling it a part again and hopefully get everything working in the structure that I already have.
(Objective-C) ES1Renderer.h
#import <QuartzCore/QuartzCore.h>
#import "OpenGLES.h"
#interface ES1Renderer : UIView {
#private
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
GLuint viewFramebuffer, viewRenderbuffer;
GLuint texture[1];
BOOL animating;
BOOL displayLinkSupported;
NSInteger animationFrameInterval;
// Use of the CADisplayLink class is the preferred method for controlling your animation timing.
// CADisplayLink will link to the main display and fire every vsync when added to a given run-loop.
// The NSTimer class is used only as fallback when running on a pre 3.1 device where CADisplayLink
// isn't available.
id displayLink;
NSTimer *animationTimer;
}
#property (readonly, nonatomic, getter=isAnimating) BOOL animating;
#property (nonatomic) NSInteger animationFrameInterval;
- (void) startAnimation;
- (void) stopAnimation;
- (void)render;
#end
Next ES1Renderer.m
#import "ES1Renderer.h"
#implementation ES1Renderer
#synthesize animating;
#dynamic animationFrameInterval;
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context])
{
[self release];
return nil;
}
// Generate buffers
glGenFramebuffersOES(1, &viewFramebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Disable Depth
glDisable(GL_DEPTH_TEST);
// Load textures
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:#"colour" ofType:#"png"]];
if (!img) {
NSLog(#"Image \"colour.png\" could not be loaded and was not bound");
[self release];
return nil;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef imgContext = CGBitmapContextCreate(image,
width, height,
8, 4 * width, colourSpace,
kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(imgContext, bounds);
CGContextTranslateCTM (imgContext, 0, height);
CGContextScaleCTM (imgContext, 1.0, -1.0);
CGContextDrawImage(imgContext, bounds, cgimage);
CGContextRelease(imgContext);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
free(image);
[img release];
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView:(id)sender
{
[self render];
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
}
- (void) render
{
//glDisable(GL_TEXTURE_2D);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
static const float textureVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const float textureCoords[] = {
0.0f, 0.0f,
0.0f, 0.515625f,
0.12890625f, 0.0f,
0.12890625f, 0.515625f,
};
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_2D, texture[0]);
//glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
glVertexPointer(2, GL_FLOAT, 0, textureVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:context];
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer *)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
}
- (void) dealloc
{
// Tear down GL
if (viewFramebuffer)
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
}
if (viewRenderbuffer)
{
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
}
// Tear down context
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
[context release];
context = nil;
displayLink = nil;
animationTimer = nil;
[super dealloc];
}
- (NSInteger) animationFrameInterval
{
return animationFrameInterval;
}
- (void) setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void) startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView:)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView:) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
#end
There is only one problem with this code. It's out of date. Apple released a new way of doing things, but hell. It works.
Update:
It turns out I had set the context up before loading the textures.