Implement iphone MSAA using Unity build - iphone

I want to implement MSAA into the Unity build of a game. Currently I am using this code I got from the Unity forums and it compiles but I can clearly see that it is having no effect in game. Any help would be appreciated.
bool msaaEnabled = false;
CGSize globalSize;
struct MyEAGLSurface
{
GLuint format;
GLuint depthFormat;
GLuint framebuffer;
GLuint renderbuffer;
GLuint msaaFrameBuffer;
GLuint msaaRenderBuffer;
GLuint msaaDepthBuffer;
GLuint depthBuffer;
CGSize size;
};
typedef EAGLContext* MyEAGLContext;
#interface EAGLView : UIView {}
#end
MyEAGLContext _context;
MyEAGLSurface _surface;
UIWindow * _window;
NSTimer* _timer;
id _displayLink;
BOOL _accelerometerIsActive = NO;
extern "C" void MSAA_Enabled( bool enabled )
{
if( enabled && !msaaEnabled )
{
// Create MSAA buffers!
glGenFramebuffersOES(1, &_surface.msaaFrameBuffer );
glGenRenderbuffersOES(1, &_surface.msaaRenderBuffer );
glBindFramebufferOES(GL_FRAMEBUFFER_OES, _surface.msaaFrameBuffer );
glBindRenderbufferOES(GL_RENDERBUFFER_OES, _surface.msaaRenderBuffer );
// Samples is the amount of pixels the MSAA buffer uses to make one pixel on the render
// buffer. Use a small number like 2 for the 3G and below and 4 or more for newer models
int samples = 4;
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, samples, GL_RGB5_A1_OES, globalSize.width, globalSize.height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, _surface.msaaRenderBuffer);
// MSAA Depth buffer
glGenRenderbuffersOES(1, &_surface.msaaDepthBuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, _surface.msaaDepthBuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, samples, _surface.depthFormat, globalSize.width, globalSize.height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, _surface.msaaDepthBuffer);
}
else
{
}
msaaEnabled = enabled;
}
extern "C" void MSAA_BindTarget()
{
if( _surface.msaaFrameBuffer && msaaEnabled )
{
glBindFramebufferOES(GL_FRAMEBUFFER_OES, _surface.msaaFrameBuffer); //Bind MSAA
}
}
bool CreateWindowSurface(EAGLView *view, GLuint format, GLuint depthFormat, bool retained, MyEAGLSurface* surface)
{
CGSize newSize;
GLuint oldRenderbuffer;
GLuint oldFramebuffer;
CAEAGLLayer* eaglLayer = (CAEAGLLayer*)[view layer];
surface->format = format;
surface->depthFormat = depthFormat;
surface->msaaFrameBuffer = 0;
surface->msaaRenderBuffer = 0;
surface->msaaDepthBuffer = 0;
surface->depthBuffer = 0;
surface->renderbuffer = 0;
surface->framebuffer = 0;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
newSize = [eaglLayer bounds].size;
newSize.width = roundf(newSize.width);
newSize.height = roundf(newSize.height);
globalSize = newSize;
glGetIntegerv(GL_RENDERBUFFER_BINDING_OES, (GLint *) &oldRenderbuffer);
glGetIntegerv(GL_FRAMEBUFFER_BINDING_OES, (GLint *) &oldFramebuffer);
// Create the main frame and render buffers
glGenFramebuffersOES(1, &surface->framebuffer);
glGenRenderbuffersOES(1, &surface->renderbuffer);
// Bind the frame and render buffer
glBindFramebufferOES(GL_FRAMEBUFFER_OES, surface->framebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, surface->renderbuffer);
// Set storage for render buffer
if(![_context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:eaglLayer])
{
glDeleteRenderbuffersOES(1, &surface->renderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_BINDING_OES, oldRenderbuffer);
return false;
}
// Attach the renderbuffer
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, surface->renderbuffer);
if (depthFormat)
{
// Regular depth buffer
glGenRenderbuffersOES(1, &surface->depthBuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, surface->depthBuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, surface->depthFormat, newSize.width, newSize.height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, surface->depthBuffer);
}
surface->size = newSize;
glBindRenderbufferOES(GL_RENDERBUFFER_OES, oldRenderbuffer);
CHECK_GL_ERROR();
return true;
}
void DestroySurface(MyEAGLSurface* surface)
{
EAGLContext *oldContext = [EAGLContext currentContext];
if (oldContext != _context)
[EAGLContext setCurrentContext:_context];
if(surface->msaaDepthBuffer)
{
glDeleteRenderbuffersOES(1, &surface->msaaDepthBuffer);
surface->msaaDepthBuffer = 0;
}
if(surface->depthBuffer)
{
glDeleteRenderbuffersOES(1, &surface->depthBuffer);
surface->depthBuffer = 0;
}
glDeleteRenderbuffersOES(1, &surface->msaaRenderBuffer);
surface->msaaRenderBuffer = 0;
glDeleteFramebuffersOES(1, &surface->msaaFrameBuffer);
surface->msaaFrameBuffer = 0;
glDeleteRenderbuffersOES(1, &surface->renderbuffer);
surface->renderbuffer = 0;
glDeleteFramebuffersOES(1, &surface->framebuffer);
surface->framebuffer = 0;
if (oldContext != _context)
[EAGLContext setCurrentContext:oldContext];
}
void PresentSurface(MyEAGLSurface& surface)
{
EAGLContext *oldContext = [EAGLContext currentContext];
GLuint oldRenderbuffer;
if (oldContext != _context)
[EAGLContext setCurrentContext:_context];
CHECK_GL_ERROR();
glGetIntegerv(GL_RENDERBUFFER_BINDING_OES, (GLint *) &oldRenderbuffer);
if( msaaEnabled )
{
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, surface.msaaFrameBuffer);
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, surface.framebuffer);
// Call a resolve to combine buffers
glResolveMultisampleFramebufferAPPLE();
}
glBindRenderbufferOES(GL_RENDERBUFFER_OES, surface.renderbuffer);
if(![_context presentRenderbuffer:GL_RENDERBUFFER_OES])
EAGL_ERROR("swap renderbuffer");
if(oldContext != _context)
[EAGLContext setCurrentContext:oldContext];
}
void PresentContext_UnityCallback(struct UnityFrameStats const* unityFrameStats)
{
#if ENABLE_INTERNAL_PROFILER
_unityFrameStats = *unityFrameStats;
if (_frameId % BLOCK_ON_GPU_EACH_NTH_FRAME == (BLOCK_ON_GPU_EACH_NTH_FRAME-1))
{
Prof_Int64 gpuTime0 = mach_absolute_time();
#if ENABLE_BLOCK_ON_GPU_PROFILER
UnityFinishRendering();
#endif
Prof_Int64 gpuTime1 = mach_absolute_time();
_gpuDelta = gpuTime1 - gpuTime0;
}
else
_gpuDelta = 0;
#endif
#if ENABLE_INTERNAL_PROFILER
Prof_Int64 swapTime0 = mach_absolute_time();
#endif
PresentSurface(_surface);
#if ENABLE_INTERNAL_PROFILER
Prof_Int64 vblankTime = mach_absolute_time();
if (_lastVBlankTime < 0) _lastVBlankTime = vblankTime;
_frameDelta = vblankTime - _lastVBlankTime; _lastVBlankTime = vblankTime;
Prof_Int64 swapTime1 = vblankTime;
_swapDelta = swapTime1 - swapTime0;
#endif
}
int OpenEAGL_UnityCallback(UIWindow** window, int* screenWidth, int* screenHeight)
{
CGRect rect = [[UIScreen mainScreen] bounds];
// Create a full-screen window
_window = [[UIWindow alloc] initWithFrame:rect];
EAGLView* view = [[EAGLView alloc] initWithFrame:rect];
[_window addSubview:view];
//CAEAGLLayer* eaglLayer = (CAEAGLLayer*)[view layer];
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
MSAA_Enabled(true);
MSAA_BindTarget();
if (!_context)
return false;
if (![EAGLContext setCurrentContext:_context]) {
_context = 0;
return false;
}
if (!CreateWindowSurface(view, GL_RGB565_OES, GL_DEPTH_COMPONENT16_OES, NO, &_surface)) {
return false;
}
glViewport(0, 0, _surface.size.width, _surface.size.height);
[_window makeKeyAndVisible];
[view release];
*window = _window;
*screenWidth = _surface.size.width;
*screenHeight = _surface.size.height;
return true;
}

Got it working with some help from the Unity forums. If anyone wants to know how to implement, take a look at the thread here: http://forum.unity3d.com/threads/60785-iPhone-4-MSAA-Test-Results
The code provided at the bottom of the thread only seemed to compile using a Unity 3.0 build, btw.

Related

iOS OpenGL ES 2.0: Offscreen render and save the result to an UIImage

I'm using OpenGL ES to render some special effects, I don't want to show this to user, I just want to save the result as an UIImage, can anybody please help me?
this is the code I'm using, I can get an image which contains the red clear color I use, but no geometry drawing shown.
#import "RendererGL.h"
#import <GLKit/GLKit.h>
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>
static NSInteger WIDTH_IN_PIXEL = 400;
static NSInteger HEIGHT_IN_PIXEL = 300;
typedef struct {
GLKVector3 positionCoords;
}
SceneVertex;
static const SceneVertex vertices[] =
{
{{-0.5f, -0.5f, 0.0}}, // lower left corner
{{ 0.5f, -0.5f, 0.0}}, // lower right corner
{{-0.5f, 0.5f, 0.0}} // upper left corner
};
#implementation RendererGL
{
EAGLContext* _myContext;
GLuint _framebuffer;
GLuint _colorRenderbuffer;
GLuint _depthRenderbuffer;
GLuint _vertexBufferID;
GLKBaseEffect *_baseEffect;
}
- (id) init
{
self = [super init];
if (self)
{
_myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:_myContext];
[self setupOffscreenBuffer];
[self setUpEffect];
[self renderImage];
[self saveImage]; //this do works, since I get an image, but the image only contains the red color I used to clear
}
return self;
}
-(void)setUpEffect
{
_baseEffect = [[GLKBaseEffect alloc] init];
_baseEffect.useConstantColor = GL_TRUE;
_baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 1.0f, 1.0f);
}
//this code is from apples document
-(void)setupOffscreenBuffer
{
glGenFramebuffers(1, &_framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glGenRenderbuffers(1, &_colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderbuffer);
glGenRenderbuffers(1, &_depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
if(status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"failed to make complete framebuffer object %x", status);
}
}
- (void) renderImage
{
GLenum error = GL_NO_ERROR;
glClearColor(1, 0, 0, 1); //red clear color, this can be seen
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
[_baseEffect prepareToDraw];
glGenBuffers(1, &_vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition,3,GL_FLOAT, GL_FALSE, sizeof(SceneVertex), NULL);
glDrawArrays(GL_TRIANGLES,0,3);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
glFinish();
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
}
-(void)saveImage
{
GLenum error = GL_NO_ERROR;
NSInteger x = 0, y = 0;
NSInteger dataLength = WIDTH_IN_PIXEL * HEIGHT_IN_PIXEL * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, GL_RGBA, GL_UNSIGNED_BYTE, data);
NSData *pixelsRead = [NSData dataWithBytes:data length:dataLength];
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(#"error happend, error is %d, line %d",error,__LINE__);
}
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, 8, 32, WIDTH_IN_PIXEL * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
NSData *d = UIImageJPEGRepresentation(image, 1);
NSString *documentDirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
static NSInteger imageNO = 1;
imageNO++;
NSString *savingPath = [documentDirPath stringByAppendingPathComponent:[NSString stringWithFormat:#"%d.jpg",imageNO]];
BOOL succ = [d writeToFile:savingPath atomically:NO]; //is succeeded
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
}
#end
I had a very similar problem - to render some lines and get UIImage. I used OpenGL ES 1.1 and multisampling. I removed some additional code that does not refers to rendering and some OpenGL error checks. You can find the full code here: OSPRendererGL. Also, sorry for my one-for-all method.
#interface OSPRendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
GLuint _vertexArray;
GLuint _vertexBuffer;
GLuint resolveFramebuffer;
GLuint msaaFramebuffer, msaaRenderbuffer, msaaDepthbuffer;
int width;
int height;
}
#implementation OSPRendererGL
- (id) init
{
self = [super init];
if (self)
{
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL];
[EAGLContext setCurrentContext:nil];
width = 256;
height = 256;
}
return self;
}
-(void) setupOpenGL
{
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
glGenFramebuffersOES(1, &msaaFramebuffer);
glGenRenderbuffersOES(1, &msaaRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, msaaRenderbuffer);
glGenRenderbuffersOES(1, &msaaDepthbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaDepthbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, msaaDepthbuffer);
}
-(UIImage *) renderImageAtZoom:(int)zoom
{
CGRect b = CGRectMake(0, 0, width, height);
OSPCoordinateRect r = OSPRectForMapAreaInRect([self mapArea], b);
double_scale = b.size.width / r.size.x;
double scale = 1.0/_scale;
[EAGLContext setCurrentContext:myContext];
glBindFramebuffer(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(0.0f, 256.0f, 256.0f, 0.0f, 1.0f, -1.0f);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glScalef(_scale, _scale, 1);
glTranslatef(-r.origin.x, -r.origin.y, 0);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_LINE_SMOOTH);
glEnable(GL_POINT_SMOOTH);
glEnable(GL_BLEND);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// rendering here
glPopMatrix();
// msaa
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, msaaFramebuffer);
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, framebuffer);
glResolveMultisampleFramebufferAPPLE();
glBindFramebuffer(GL_FRAMEBUFFER_OES, framebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER, colorRenderbuffer);
// grabbing image from FBO
GLint backingWidth, backingHeight;
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
[EAGLContext setCurrentContext:nil];
return image;
}
I don't see a call to eglSwapBuffers(). That is required to start rendering the frame on PowerVR, even when rendering to a Renderbuffer. See the example code at:
http://processors.wiki.ti.com/index.php/Render_to_Texture_with_OpenGL_ES

Using OpenGL ES to do texture mapping on simple polygons?

Intro:
I'm brand new at OpenGL. I first go familiar with texture mapping and basic transformations/translations on OpenGL on my PC.
Now it seems I'm basically re-learning everything as im trying to create a simple polygon (quad) with a PNG texture using the GLKit (the GLKView project template helped alot).
Here's the thing:
I was actually able to get up and running very quickly using the project template's out-of-box implementation using the GLKBasicEffect way of rendering. However, ive read so much in the past 24 hours about the recommendation of ignoring this route and going with "OpenGL ES2" way.
My very general idea of this is basically: fixed-pipeline vs programmable-pipeline. Whatever.
Problem:
When going with the "ES2" approach, I can see my quad polygon, but I'm unable to apply a texture on it.
Question: Anybody know of a simple tutorial/example I can follow? Or
even better, can anybody figure out what im doing wrong?
* Insert of Update comment *
I discovered that i was getting a glError thrown due to two things in
my openGL setup: I was calling glEnable(GL_TEXTURE_2D) and
glEnableClientState(GL_TEXTURE_COORD_ARRAY). How am i suppose to
enable texture mapping without these? Or maybe there is a bigger error
somewhere? FYI, I am using Opengl ES2.
* End of update insert *
My ViewController file is below.
#define BUFFER_OFFSET(i) ((char *)NULL + (i))
// Uniform index.
enum
{
UNIFORM_MODELVIEWPROJECTION_MATRIX,
UNIFORM_NORMAL_MATRIX,
NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];
// Attribute index.
enum
{
ATTRIB_VERTEX,
ATTRIB_NORMAL,
NUM_ATTRIBUTES
};
BOOL updateRotate = FALSE;
VertexData *p_meshVertexData = nil;
int g_numFaces = 0;
GLuint g_textures[2]; // 0: photo, 1: picture frame.
const int DataSize = 48;
GLfloat PortraitVertexData[DataSize] =
{
// CCW
-0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0,1,
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0,0,
0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 1,0,
-0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0,1,
0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 1,0,
0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 1,1,
};
#interface ViewController () {
GLuint _program;
GLKMatrix4 _modelViewProjectionMatrix;
GLKMatrix3 _normalMatrix;
float _rotation;
GLuint _vertexArray;
GLuint _vertexBuffer;
}
#property (strong, nonatomic) EAGLContext *context;
//#property (strong, nonatomic) GLKBaseEffect *effect;
#property (strong, nonatomic) GLKTextureInfo *texture;
#property (nonatomic, retain) CaptureEngine *engine;
- (void)setupGL;
- (void)tearDownGL;
- (BOOL)loadShaders;
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file;
- (BOOL)linkProgram:(GLuint)prog;
- (BOOL)validateProgram:(GLuint)prog;
#end
#implementation ViewController
#synthesize context = _context;
//#synthesize effect = _effect;
#synthesize texture = _texture;
#synthesize engine;
/// View did load.
- (void)viewDidLoad
{
[super viewDidLoad];
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!self.context) {
NSLog(#"Failed to create ES context");
}
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
[self initEngine];
[self setupGL];
}
/// Initialize engine object.
- (void) initEngine
{
self.engine = [[CaptureEngine alloc] init];
g_numFaces = DataSize / 8;
p_meshVertexData = (VertexData *)malloc(g_numFaces * sizeof(VertexData));
int numIndex = 0;
for (int i = 0; i < DataSize; i += 8)
{
float x = PortraitVertexData[i];
float y = PortraitVertexData[i + 1];
float z = PortraitVertexData[i + 2];
float nx = PortraitVertexData[i + 3];
float ny = PortraitVertexData[i + 4];
float nz = PortraitVertexData[i + 5];
float tx = PortraitVertexData[i + 6];
float ty = PortraitVertexData[i + 7];
VertexData data;
data.vertex.x = x;
data.vertex.y = y;
data.vertex.z = z;
data.normal.x = nx;
data.normal.y = ny;
data.normal.z = nz;
data.textureCoord.x = tx;
data.textureCoord.y = ty;
p_meshVertexData[numIndex++] = data;
}
// UIImage *testImage = [UIImage imageNamed:#"frame.png"];
// self.previewImage.image = [ImageLoader ConvertToGrayedImage:testImage];
}
// Dealloc.
- (void)dealloc
{
[self tearDownGL];
if ([EAGLContext currentContext] == self.context) {
[EAGLContext setCurrentContext:nil];
}
}
// Memory warning.
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
if ([self isViewLoaded] && ([[self view] window] == nil)) {
self.view = nil;
[self tearDownGL];
if ([EAGLContext currentContext] == self.context) {
[EAGLContext setCurrentContext:nil];
}
self.context = nil;
}
// Dispose of any resources that can be recreated.
}
// Setup OpenlGL.
- (void)setupGL
{
[EAGLContext setCurrentContext:self.context];
[self loadShaders];
/*
self.effect = [[GLKBaseEffect alloc] init];
self.effect.light0.enabled = GL_TRUE;
self.effect.light0.diffuseColor = GLKVector4Make(1.0f, 1.0f, 1.0f, 0.5f);
self.effect.lightingType = GLKLightingTypePerPixel;
*/
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glGenVertexArraysOES(1, &_vertexArray);
glBindVertexArrayOES(_vertexArray);
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VertexData) * g_numFaces, p_meshVertexData, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), 0);
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char *)12);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char *)24);
//// texture sample
/*
glActiveTexture(GL_TEXTURE0);
NSString *path = [[NSBundle mainBundle] pathForResource:#"frame" ofType:#"png"];
NSError *error;
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:GLKTextureLoaderOriginBottomLeft];
self.texture = [GLKTextureLoader textureWithContentsOfFile:path
options:options error:&error];
if (self.texture == nil)
NSLog(#"Error loading texture: %#", [error localizedDescription]);
*/
/*
GLKEffectPropertyTexture *tex = [[GLKEffectPropertyTexture alloc] init];
tex.enabled = YES;
tex.envMode = GLKTextureEnvModeDecal;
tex.name = self.texture.name;
self.effect.texture2d0.name = tex.name;
*/
UIImage *textureImage = [UIImage imageNamed:#"frame.png"];
[self ApplyTexture: textureImage];
//// end of texture sample
glBindVertexArrayOES(0);
}
// Dealloc OpenlGL.
- (void)tearDownGL
{
[EAGLContext setCurrentContext:self.context];
glDeleteBuffers(1, &_vertexBuffer);
glDeleteVertexArraysOES(1, &_vertexArray);
//self.effect = nil;
if (_program)
{
glDeleteProgram(_program);
_program = 0;
}
}
#pragma mark - GLKView and GLKViewController delegate methods
// Update process.
- (void)update
{
/// Default OpenGL project template (2 cubes) ///
float aspect = fabsf(self.view.bounds.size.width / self.view.bounds.size.height);
GLKMatrix4 projectionMatrix = GLKMatrix4MakePerspective(GLKMathDegreesToRadians(60.0f), aspect, 0.1f, 100.0f);
//self.effect.transform.projectionMatrix = projectionMatrix;
GLKMatrix4 baseModelViewMatrix = GLKMatrix4MakeTranslation(0.0f, 0.0f, -2.0f);
baseModelViewMatrix = GLKMatrix4Rotate(baseModelViewMatrix, _rotation, 0.0f, 1.0f, 0.0f);
/*
// Compute the model view matrix for the object rendered with GLKit
GLKMatrix4 modelViewMatrix = GLKMatrix4MakeTranslation(0.0f, 0.0f, 0.0f);
modelViewMatrix = GLKMatrix4Rotate(modelViewMatrix, _rotation, 0, 1.0f, 0);
modelViewMatrix = GLKMatrix4Multiply(baseModelViewMatrix, modelViewMatrix);
self.effect.transform.modelviewMatrix = modelViewMatrix;
*/
// Compute the model view matrix for the object rendered with ES2
GLKMatrix4 modelViewMatrix = GLKMatrix4MakeTranslation(0, 0, 0);
modelViewMatrix = GLKMatrix4Rotate(modelViewMatrix, _rotation, 0, 1.0f, 0);
modelViewMatrix = GLKMatrix4Multiply(baseModelViewMatrix, modelViewMatrix);
_normalMatrix = GLKMatrix3InvertAndTranspose(GLKMatrix4GetMatrix3(modelViewMatrix), NULL);
_modelViewProjectionMatrix = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix);
if (updateRotate)
{
_rotation += self.timeSinceLastUpdate * 0.5f;
}
}
// Render process.
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
glClearColor(1, 1, 1, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArrayOES(_vertexArray);
/*
// Render the object with GLKit
[self.effect prepareToDraw];
glDrawArrays(GL_TRIANGLES, 0, g_numFaces);
*/
// Render the object again with ES2
glUseProgram(_program);
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX], 1, 0, _modelViewProjectionMatrix.m);
glUniformMatrix3fv(uniforms[UNIFORM_NORMAL_MATRIX], 1, 0, _normalMatrix.m);
glDrawArrays(GL_TRIANGLES, 0, g_numFaces);
}
- (int) ApplyTexture:(UIImage *)image
{
// 1
CGImageRef spriteImage = image.CGImage;
if (!spriteImage)
{
NSLog(#"Failed to apply texture.");
return -1;
}
// 2
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte * spriteData = (GLubyte *) calloc(width * width * 4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData,
width,
width,
8,
width * 4,
CGImageGetColorSpace(spriteImage),
kCGImageAlphaPremultipliedLast);
// 3
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
// 4
glGenTextures(1, &g_textures[0]);
glBindTexture(GL_TEXTURE_2D, g_textures[0]);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
// glEnableClientState(GL_TEXTURE_COORD_ARRAY);
CFDataRef data = CGDataProviderCopyData(CGImageGetDataProvider(spriteImage));
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, width, 0, GL_RGBA,
GL_UNSIGNED_BYTE, CFDataGetBytePtr(data));
free(spriteData);
return 0;
}
#pragma mark - OpenGL ES 2 shader compilation
- (BOOL)loadShaders
{
GLuint vertShader, fragShader;
NSString *vertShaderPathname, *fragShaderPathname;
// Create shader program.
_program = glCreateProgram();
// Create and compile vertex shader.
vertShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"vsh"];
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
NSLog(#"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
fragShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"fsh"];
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
NSLog(#"Failed to compile fragment shader");
return NO;
}
// Attach vertex shader to program.
glAttachShader(_program, vertShader);
// Attach fragment shader to program.
glAttachShader(_program, fragShader);
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(_program, GLKVertexAttribPosition, "position");
glBindAttribLocation(_program, GLKVertexAttribNormal, "normal");
// Link program.
if (![self linkProgram:_program]) {
NSLog(#"Failed to link program: %d", _program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (_program) {
glDeleteProgram(_program);
_program = 0;
}
return NO;
}
// Get uniform locations.
uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX] = glGetUniformLocation(_program, "modelViewProjectionMatrix");
uniforms[UNIFORM_NORMAL_MATRIX] = glGetUniformLocation(_program, "normalMatrix");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(_program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(_program, fragShader);
glDeleteShader(fragShader);
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file
{
GLint status;
const GLchar *source;
source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
if (!source) {
NSLog(#"Failed to load vertex shader");
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(#"Shader compile log:\n%s", log);
free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)validateProgram:(GLuint)prog
{
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program validate log:\n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
Wow. It looks like glEnable for texture mapping is deprecated.
https://gamedev.stackexchange.com/questions/20656/is-glenable-obsolete-unneeded-in-opengl-es-2
Ugh. I guess i have no choice but to learn this pixel/fragment shader rocket surgery.

OpenGl ES iPhone, strange line

I have a strange problem. Sometimes it happens, sometimes it doesn't.
I have an array of GLfloat with 8 items. When EAGLView draw it, it shows the two lines of the array, but also an additional line.
EAGLView.m
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (id)initWithCoder:(NSCoder*)coder {
if ((self = [super initWithCoder:coder])) {
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = NO;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
}
return self;
}
- (void)drawView {
int numero=malloc_size(puntosPintar)/sizeof(GLfloat);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glClear(GL_COLOR_BUFFER_BIT);
glVertexPointer(2, GL_FLOAT, 0, puntosPintar);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(1,1,1,1);
glDrawArrays(GL_LINES, 0, numero);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
free(puntosPintar);
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext:context];
[self destroyFramebuffer];
[self createFramebuffer];
[self drawView];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void)destroyFramebuffer {
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer) {
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
- (void)dealloc {
if ([EAGLContext currentContext] == context) {
[EAGLContext setCurrentContext:nil];
}
[context release];
[super dealloc];
}
#end
puntosPintar
puntosPintar=(GLfloat*)malloc(sizeof(GLfloat)*8);
puntosPintar[0] = -0.25f;
puntosPintar[1] = -1.49f;
puntosPintar[2] = -0.1f;
puntosPintar[3] = 0.0f;
puntosPintar[4] = 0.25f;
puntosPintar[5] = -1.49f;
puntosPintar[6] = 0.1f;
puntosPintar[7] = 0.0f;
You should be passing '4' into the count argument of glDrawArrays, not 8 (this is the count of vertexes, not the count of floats).

(iOS) OpenGL ES (2.0) Application, how to move the object in Z?

I'm playing with OpenGL ES (2.0) Application (iOS) source code in XCode and was trying to make the colorful square move in the Z coordinate, so I tried to change the shader code from
gl_Position.y += sin(translate) / 2.0;
to
gl_Position.z += sin(translate) / 2.0;
with no success.
The square doesn't move at all.
It moves well in X and Y thought...
Is there some option I need to activate when initializing OpenGL? Thanks!
UPDATE:
I've uploaded an example. This is roughly the OpenGL ES template that XCode generates, I just added the calls to create the depth buffer, and the gl_Position.x to gl_Position.z += sin(translate) / 2.0f in the Shader.vsh.
I wish to see the square move in a sinusoidal form on the Z coordinate but it just won't. Either it keeps still or, if I multiply the sin(), it will appear and disappear in a cycle.
If anyone can help me out I'll be most grateful, since sincerely I do not know what else to do, and believe me I tried alot...
The source is in a zip at: http://cl.ly/24240x2D1t2A3I0c1l1P
Thank you!
The example you are looking at has no depth buffer and a perspective matrix intended for 2D GL. Look at the aurioTouch example instead. in the EAGLView class you will notice an option to implement the depth buffer. The two combined (since aurioTouch doesn't implement shaders) should give a better understanding
I think the order of operations you have in your method are causing the problem.
Here's the code I use in my app "Live Effects Cam" which places the live camera as a GL Texture on shapes:
#define DEGREES_TO_RADIANS(__ANGLE__) ((__ANGLE__) / 180.0 * M_PI)
#interface GLView : UIView
{
#private
/* The pixel dimensions of the backbuffer */
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
/* OpenGL names for the renderbuffer and framebuffers used to render to this view */
GLuint viewRenderbuffer;
GLuint viewFramebuffer;
GLuint depthRenderbuffer;
/* OpenGL name for the sprite texture */
GLuint spriteTexture;
}
#property (readonly) GLint backingWidth;
#property (readonly) GLint backingHeight;
#property (readonly) EAGLContext *context;
- (void) drawView;
- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
+ (UIImage *) snapshot:(GLView *)eaglview;
#end
#implementation GLView
#synthesize backingWidth;
#synthesize backingHeight;
#synthesize context;
+ (Class) layerClass
{
return [CAEAGLLayer class];
}
- (id)init
{
self = [[super init] initWithFrame:CGRectMake(0.0, 0.0, 480.0, 640.0)]; // size of the camera image being captures
if ( self==nil )
return self;
// Set Content Scaling
//
if ( HIRESDEVICE )
{
self.contentScaleFactor = (CGFloat)2.0;
}
// Get our backing layer
//
CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
// Configure it so that it is opaque, does not retain the contents of the backbuffer when displayed, and uses RGBA8888 color.
//
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
nil];
// Create our EAGLContext, and if successful make it current and create our framebuffer.
//
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if(!context || ![EAGLContext setCurrentContext:context] || ![self createFramebuffer])
{
[self release];
return nil;
}
// Final View Settings
//
[self setOpaque:YES];
self.multipleTouchEnabled = YES;
self.backgroundColor = [UIColor clearColor];
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLfloat zNear = 1.0;
GLfloat zFar = 1000.0;
GLfloat fieldOfView = 90; // Lens Angle of View
GLfloat size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
CGRect rect = CGRectMake( (CGFloat)0.0, (CGFloat)0.0, backingWidth, backingHeight);
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size / (rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, backingWidth, backingHeight);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glEnable(GL_MULTISAMPLE);
glEnable(GL_LINE_SMOOTH);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
glDisable(GL_ALPHA_TEST);
// Turn Translucent Textures: OFF
//
glDisable(GL_BLEND);
// // Turn Translucent Textures: ON
// //
// glEnable(GL_BLEND);
// glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
return self;
}
- (void) drawView
{
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (BOOL)createFramebuffer
{
// Generate IDs for a framebuffer object and a color renderbuffer
//
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
// This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
// allowing us to draw into a buffer that will later be rendered to screen whereever the layer is (which corresponds with our view).
//
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
// If this app uses a depth buffer, we'll create and attach one via another renderbuffer.
//
if ( YES )
{
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
{
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void) destroyFramebuffer
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer)
{
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
+ (UIImage *) snapshot:(GLView *)eaglview
{
NSInteger x = 0;
NSInteger y = 0;
NSInteger width = [eaglview backingWidth];
NSInteger height = [eaglview backingHeight];
NSInteger dataLength = width * height * 4;
// Need to do this to get it to flush before taking the snapshit
//
NSUInteger i;
for ( i=0; i<100; i++ )
{
glFlush();
CFRunLoopRunInMode(kCFRunLoopDefaultMode, (float)1.0/(float)60.0, FALSE);
}
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
//
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
//
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast, ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
//
NSInteger widthInPoints;
NSInteger heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions)
{
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
//
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else
{
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
//
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
//
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); // autoreleased image
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}
#end
// Create default framebuffer object.
glGenFramebuffers(1, &defaultFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &depthRenderbuffer); <----
glGenRenderbuffers(1, &colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer); <----
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &framebufferWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &framebufferHeight);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, framebufferWidth, framebufferHeight); <----
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer); <----
I've added that to my code (the ... <----) and now I get a pinkish screen (lol). Any ideas? This is really frustrating. Shouldn't I do something in the setFrame.. presentFrame..?

My "drawView" function is being called only once?

I'm new to OpenGL ES and having a simple kind of problem in my project. I've successfully drawn a square on my screen but the problem is that my "drawView" function in my EAGLView is
only called once however I wrote code to call it again n again as in Xcode's OpenGL ES Template.
Do anyone have clue where's the mistake?
Following is the code of my EAGLView.m file:
#import <QuartzCore/QuartzCore.h>
#import <OpenGLES/EAGLDrawable.h>
#import "EAGLView.h"
#define USE_DEPTH_BUFFER 0
// A class extension to declare private methods
#interface EAGLView ()
#property (nonatomic, retain) EAGLContext *context;
- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
#end
#implementation EAGLView
#synthesize context;
#synthesize animating;
#dynamic animationFrameInterval;
// You must implement this method
+ (Class)layerClass {
return [CAEAGLLayer class];
}
//The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
- (id)initWithCoder:(NSCoder*)coder {
if ((self = [super initWithCoder:coder])) {
// Get the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView {
NSLog(#"In EAGLView's drawView func");
static const GLfloat squareVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const GLubyte squareColors[] = {
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
0, 0, 0, 0,
};
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glRotatef(10.0f, 0.0f, 0.0f, 1.0f);
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_UNSIGNED_BYTE, 0, squareColors);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext:context];
[self destroyFramebuffer];
[self createFramebuffer];
[self drawView];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void)destroyFramebuffer {
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer) {
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
- (NSInteger)animationFrameInterval
{
return animationFrameInterval;
}
- (void)setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void)startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
- (void)dealloc {
if ([EAGLContext currentContext] == context) {
[EAGLContext setCurrentContext:nil];
}
[context release];
[super dealloc];
}
#end
Check to see if your method:
- (void)startAnimation
is actually being called.