Intro:
I'm brand new at OpenGL. I first go familiar with texture mapping and basic transformations/translations on OpenGL on my PC.
Now it seems I'm basically re-learning everything as im trying to create a simple polygon (quad) with a PNG texture using the GLKit (the GLKView project template helped alot).
Here's the thing:
I was actually able to get up and running very quickly using the project template's out-of-box implementation using the GLKBasicEffect way of rendering. However, ive read so much in the past 24 hours about the recommendation of ignoring this route and going with "OpenGL ES2" way.
My very general idea of this is basically: fixed-pipeline vs programmable-pipeline. Whatever.
Problem:
When going with the "ES2" approach, I can see my quad polygon, but I'm unable to apply a texture on it.
Question: Anybody know of a simple tutorial/example I can follow? Or
even better, can anybody figure out what im doing wrong?
* Insert of Update comment *
I discovered that i was getting a glError thrown due to two things in
my openGL setup: I was calling glEnable(GL_TEXTURE_2D) and
glEnableClientState(GL_TEXTURE_COORD_ARRAY). How am i suppose to
enable texture mapping without these? Or maybe there is a bigger error
somewhere? FYI, I am using Opengl ES2.
* End of update insert *
My ViewController file is below.
#define BUFFER_OFFSET(i) ((char *)NULL + (i))
// Uniform index.
enum
{
UNIFORM_MODELVIEWPROJECTION_MATRIX,
UNIFORM_NORMAL_MATRIX,
NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];
// Attribute index.
enum
{
ATTRIB_VERTEX,
ATTRIB_NORMAL,
NUM_ATTRIBUTES
};
BOOL updateRotate = FALSE;
VertexData *p_meshVertexData = nil;
int g_numFaces = 0;
GLuint g_textures[2]; // 0: photo, 1: picture frame.
const int DataSize = 48;
GLfloat PortraitVertexData[DataSize] =
{
// CCW
-0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0,1,
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0,0,
0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 1,0,
-0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0,1,
0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 1,0,
0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 1,1,
};
#interface ViewController () {
GLuint _program;
GLKMatrix4 _modelViewProjectionMatrix;
GLKMatrix3 _normalMatrix;
float _rotation;
GLuint _vertexArray;
GLuint _vertexBuffer;
}
#property (strong, nonatomic) EAGLContext *context;
//#property (strong, nonatomic) GLKBaseEffect *effect;
#property (strong, nonatomic) GLKTextureInfo *texture;
#property (nonatomic, retain) CaptureEngine *engine;
- (void)setupGL;
- (void)tearDownGL;
- (BOOL)loadShaders;
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file;
- (BOOL)linkProgram:(GLuint)prog;
- (BOOL)validateProgram:(GLuint)prog;
#end
#implementation ViewController
#synthesize context = _context;
//#synthesize effect = _effect;
#synthesize texture = _texture;
#synthesize engine;
/// View did load.
- (void)viewDidLoad
{
[super viewDidLoad];
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!self.context) {
NSLog(#"Failed to create ES context");
}
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
[self initEngine];
[self setupGL];
}
/// Initialize engine object.
- (void) initEngine
{
self.engine = [[CaptureEngine alloc] init];
g_numFaces = DataSize / 8;
p_meshVertexData = (VertexData *)malloc(g_numFaces * sizeof(VertexData));
int numIndex = 0;
for (int i = 0; i < DataSize; i += 8)
{
float x = PortraitVertexData[i];
float y = PortraitVertexData[i + 1];
float z = PortraitVertexData[i + 2];
float nx = PortraitVertexData[i + 3];
float ny = PortraitVertexData[i + 4];
float nz = PortraitVertexData[i + 5];
float tx = PortraitVertexData[i + 6];
float ty = PortraitVertexData[i + 7];
VertexData data;
data.vertex.x = x;
data.vertex.y = y;
data.vertex.z = z;
data.normal.x = nx;
data.normal.y = ny;
data.normal.z = nz;
data.textureCoord.x = tx;
data.textureCoord.y = ty;
p_meshVertexData[numIndex++] = data;
}
// UIImage *testImage = [UIImage imageNamed:#"frame.png"];
// self.previewImage.image = [ImageLoader ConvertToGrayedImage:testImage];
}
// Dealloc.
- (void)dealloc
{
[self tearDownGL];
if ([EAGLContext currentContext] == self.context) {
[EAGLContext setCurrentContext:nil];
}
}
// Memory warning.
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
if ([self isViewLoaded] && ([[self view] window] == nil)) {
self.view = nil;
[self tearDownGL];
if ([EAGLContext currentContext] == self.context) {
[EAGLContext setCurrentContext:nil];
}
self.context = nil;
}
// Dispose of any resources that can be recreated.
}
// Setup OpenlGL.
- (void)setupGL
{
[EAGLContext setCurrentContext:self.context];
[self loadShaders];
/*
self.effect = [[GLKBaseEffect alloc] init];
self.effect.light0.enabled = GL_TRUE;
self.effect.light0.diffuseColor = GLKVector4Make(1.0f, 1.0f, 1.0f, 0.5f);
self.effect.lightingType = GLKLightingTypePerPixel;
*/
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glGenVertexArraysOES(1, &_vertexArray);
glBindVertexArrayOES(_vertexArray);
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VertexData) * g_numFaces, p_meshVertexData, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), 0);
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char *)12);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char *)24);
//// texture sample
/*
glActiveTexture(GL_TEXTURE0);
NSString *path = [[NSBundle mainBundle] pathForResource:#"frame" ofType:#"png"];
NSError *error;
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:GLKTextureLoaderOriginBottomLeft];
self.texture = [GLKTextureLoader textureWithContentsOfFile:path
options:options error:&error];
if (self.texture == nil)
NSLog(#"Error loading texture: %#", [error localizedDescription]);
*/
/*
GLKEffectPropertyTexture *tex = [[GLKEffectPropertyTexture alloc] init];
tex.enabled = YES;
tex.envMode = GLKTextureEnvModeDecal;
tex.name = self.texture.name;
self.effect.texture2d0.name = tex.name;
*/
UIImage *textureImage = [UIImage imageNamed:#"frame.png"];
[self ApplyTexture: textureImage];
//// end of texture sample
glBindVertexArrayOES(0);
}
// Dealloc OpenlGL.
- (void)tearDownGL
{
[EAGLContext setCurrentContext:self.context];
glDeleteBuffers(1, &_vertexBuffer);
glDeleteVertexArraysOES(1, &_vertexArray);
//self.effect = nil;
if (_program)
{
glDeleteProgram(_program);
_program = 0;
}
}
#pragma mark - GLKView and GLKViewController delegate methods
// Update process.
- (void)update
{
/// Default OpenGL project template (2 cubes) ///
float aspect = fabsf(self.view.bounds.size.width / self.view.bounds.size.height);
GLKMatrix4 projectionMatrix = GLKMatrix4MakePerspective(GLKMathDegreesToRadians(60.0f), aspect, 0.1f, 100.0f);
//self.effect.transform.projectionMatrix = projectionMatrix;
GLKMatrix4 baseModelViewMatrix = GLKMatrix4MakeTranslation(0.0f, 0.0f, -2.0f);
baseModelViewMatrix = GLKMatrix4Rotate(baseModelViewMatrix, _rotation, 0.0f, 1.0f, 0.0f);
/*
// Compute the model view matrix for the object rendered with GLKit
GLKMatrix4 modelViewMatrix = GLKMatrix4MakeTranslation(0.0f, 0.0f, 0.0f);
modelViewMatrix = GLKMatrix4Rotate(modelViewMatrix, _rotation, 0, 1.0f, 0);
modelViewMatrix = GLKMatrix4Multiply(baseModelViewMatrix, modelViewMatrix);
self.effect.transform.modelviewMatrix = modelViewMatrix;
*/
// Compute the model view matrix for the object rendered with ES2
GLKMatrix4 modelViewMatrix = GLKMatrix4MakeTranslation(0, 0, 0);
modelViewMatrix = GLKMatrix4Rotate(modelViewMatrix, _rotation, 0, 1.0f, 0);
modelViewMatrix = GLKMatrix4Multiply(baseModelViewMatrix, modelViewMatrix);
_normalMatrix = GLKMatrix3InvertAndTranspose(GLKMatrix4GetMatrix3(modelViewMatrix), NULL);
_modelViewProjectionMatrix = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix);
if (updateRotate)
{
_rotation += self.timeSinceLastUpdate * 0.5f;
}
}
// Render process.
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
glClearColor(1, 1, 1, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArrayOES(_vertexArray);
/*
// Render the object with GLKit
[self.effect prepareToDraw];
glDrawArrays(GL_TRIANGLES, 0, g_numFaces);
*/
// Render the object again with ES2
glUseProgram(_program);
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX], 1, 0, _modelViewProjectionMatrix.m);
glUniformMatrix3fv(uniforms[UNIFORM_NORMAL_MATRIX], 1, 0, _normalMatrix.m);
glDrawArrays(GL_TRIANGLES, 0, g_numFaces);
}
- (int) ApplyTexture:(UIImage *)image
{
// 1
CGImageRef spriteImage = image.CGImage;
if (!spriteImage)
{
NSLog(#"Failed to apply texture.");
return -1;
}
// 2
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte * spriteData = (GLubyte *) calloc(width * width * 4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData,
width,
width,
8,
width * 4,
CGImageGetColorSpace(spriteImage),
kCGImageAlphaPremultipliedLast);
// 3
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
// 4
glGenTextures(1, &g_textures[0]);
glBindTexture(GL_TEXTURE_2D, g_textures[0]);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
// glEnableClientState(GL_TEXTURE_COORD_ARRAY);
CFDataRef data = CGDataProviderCopyData(CGImageGetDataProvider(spriteImage));
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, width, 0, GL_RGBA,
GL_UNSIGNED_BYTE, CFDataGetBytePtr(data));
free(spriteData);
return 0;
}
#pragma mark - OpenGL ES 2 shader compilation
- (BOOL)loadShaders
{
GLuint vertShader, fragShader;
NSString *vertShaderPathname, *fragShaderPathname;
// Create shader program.
_program = glCreateProgram();
// Create and compile vertex shader.
vertShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"vsh"];
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
NSLog(#"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
fragShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"fsh"];
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
NSLog(#"Failed to compile fragment shader");
return NO;
}
// Attach vertex shader to program.
glAttachShader(_program, vertShader);
// Attach fragment shader to program.
glAttachShader(_program, fragShader);
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(_program, GLKVertexAttribPosition, "position");
glBindAttribLocation(_program, GLKVertexAttribNormal, "normal");
// Link program.
if (![self linkProgram:_program]) {
NSLog(#"Failed to link program: %d", _program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (_program) {
glDeleteProgram(_program);
_program = 0;
}
return NO;
}
// Get uniform locations.
uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX] = glGetUniformLocation(_program, "modelViewProjectionMatrix");
uniforms[UNIFORM_NORMAL_MATRIX] = glGetUniformLocation(_program, "normalMatrix");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(_program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(_program, fragShader);
glDeleteShader(fragShader);
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file
{
GLint status;
const GLchar *source;
source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
if (!source) {
NSLog(#"Failed to load vertex shader");
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(#"Shader compile log:\n%s", log);
free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)validateProgram:(GLuint)prog
{
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program validate log:\n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
Wow. It looks like glEnable for texture mapping is deprecated.
https://gamedev.stackexchange.com/questions/20656/is-glenable-obsolete-unneeded-in-opengl-es-2
Ugh. I guess i have no choice but to learn this pixel/fragment shader rocket surgery.
Related
I am using Blender to create a 3D cow and obtain the correct vertices (optimized afterwards to remove duplicates). I have created a Cow Class to be able to draw multiple instances of this cow on screen.
Cow 1 is drawn at (-2,0,-10) and Cow 2 is drawn at (2,0,-10). When I render cow 1 only I see Cow 1 at (-2,0,-10). When I render either both cows or only cow 2 (comment render cow 1 out), I only get cow 2 (do not see cow 1) at (2,0,-10). I am making a game where I will have many enemies running around and I need to be able to draw multiple instances of these objects and render them independently at different locations. Can anyone tell me what I am doing wrong? Thanks
//
// Cow.m
//
#import "Cow.h"
#import "cow_verts.h"
#implementation Cow
- (id)initWithContext:(EAGLContext *)aContext {
context = aContext;
effect = [[GLKBaseEffect alloc] init];
[EAGLContext setCurrentContext:context];
sharedResourceManager = [ResourceManager sharedResourceManager];
UIImage *textureImage = [UIImage imageNamed:#"cowUV.png"];
texture = [sharedResourceManager addTexture:textureImage];
position = GLKVector3Make(0,0,0);
rotation = GLKVector3Make(0,0,0);
scale = GLKVector3Make(1,1,1);
[self setupGL];
return self;
}
- (void)setupGL {
glGenVertexArraysOES(1, &_vao);
glBindVertexArrayOES(_vao);
glGenBuffers(1, &_dynamicVBO);
glBindBuffer(GL_ARRAY_BUFFER, _dynamicVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(CowDynamicVertexData), CowDynamicVertexData, GL_DYNAMIC_DRAW);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(dynamicVertexData), (void *)offsetof(dynamicVertexData, position));
glEnableVertexAttribArray(GLKVertexAttribPosition);
glGenBuffers(1, &_staticVBO);
glBindBuffer(GL_ARRAY_BUFFER, _staticVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(CowStaticVertexData), CowStaticVertexData, GL_STATIC_DRAW);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(staticVertexData), (void *)offsetof(staticVertexData, texCoord));
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glGenBuffers(1, &_indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(CowIndices), CowIndices, GL_STATIC_DRAW);
glBindVertexArrayOES(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
- (void)updateWithDelta:(float)aDelta {
float aspect = fabsf([[UIScreen mainScreen] bounds].size.width/ [[UIScreen mainScreen] bounds].size.height);
GLKMatrix4 projectionMatrix = GLKMatrix4MakePerspective(GLKMathDegreesToRadians(65.0f), aspect, 4.0f, 20.0f);
effect.transform.projectionMatrix = projectionMatrix;
rotation.y += 90 * aDelta;
}
- (void)render {
[EAGLContext setCurrentContext:context];
glClearColor(0, 1, 1, 1);
glClear( GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
GLKMatrix4 xRotationMatrix = GLKMatrix4MakeXRotation(GLKMathDegreesToRadians(rotation.x));
GLKMatrix4 yRotationMatrix = GLKMatrix4MakeYRotation(GLKMathDegreesToRadians(rotation.y));
GLKMatrix4 zRotationMatrix = GLKMatrix4MakeZRotation(GLKMathDegreesToRadians(rotation.z));
GLKMatrix4 scaleMatrix = GLKMatrix4MakeScale(scale.x, scale.y, scale.z);
GLKMatrix4 translateMatrix = GLKMatrix4MakeTranslation(position.x, position.y, position.z);
GLKMatrix4 modelMatrix = GLKMatrix4Multiply(translateMatrix,
GLKMatrix4Multiply(scaleMatrix,
GLKMatrix4Multiply(zRotationMatrix,
GLKMatrix4Multiply(yRotationMatrix,
xRotationMatrix))));
effect.transform.modelviewMatrix = modelMatrix;
//GLKMatrix4 viewMatrix = GLKMatrix4MakeLookAt(0, 0, 3, 0, 0, 0, 0, 1, 0);
//effect.transform.modelviewMatrix = GLKMatrix4Multiply(viewMatrix, modelMatrix);
effect.texture2d0.name = texture.name;
effect.texture2d0.target = texture.target;
effect.texture2d0.envMode = GLKTextureEnvModeReplace;
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glCullFace(GL_FRONT);
glBindVertexArrayOES(_vao);
[effect prepareToDraw];
glDrawElements(GL_TRIANGLES, sizeof(CowIndices)/sizeof(CowIndices[0]), GL_UNSIGNED_INT, (void *)0);
glBindVertexArrayOES(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
}
#end
//
// Cow.h
//
#interface Cow : NSObject {
EAGLContext *context;
GLuint _vao;
GLuint _staticVBO;
GLuint _dynamicVBO;
GLuint _indexBuffer;
GLKTextureInfo *texture;
GLKBaseEffect *effect;
GLKVector3 position;
GLKVector3 rotation;
GLKVector3 scale;
ResourceManager *sharedResourceManager;
}
- (id)initWithContext:(EAGLContext *)aContext;
- (void)updateWithDelta:(float)aDelta;
- (void)render;
#end
//
// Scene which creates these objects and calls the functions
//
#import "Cow.h"
#implementation GameScene {
NSMutableArray *cows;
}
- (id)init {
if(self = [super init]) {
sharedDirector = [Director sharedDirector];
[EAGLContext setCurrentContext:[sharedDirector context]];
cows = [[NSMutableArray alloc] init];
AbstractEnemy *cow1 = [[Cow alloc] initWithContext:[sharedDirector context]];
cow1.position = GLKVector3Make(-2, 0, -10);
[cows addObject:cow1];
AbstractEnemy *cow2 = [[Cow alloc] initWithContext:[sharedDirector context]];
cow2.position = GLKVector3Make(2, 0, -10);
[cows addObject:cow2];
AbstractEnemy *cow3 = [[Cow alloc] initWithContext:[sharedDirector context]];
cow3.position = GLKVector3Make(0, 0, -15);
[cows addObject:cow3];
}
return self;
}
- (void)updateWithDelta:(GLfloat)aDelta {
for (int i = 0; i < cows.count; i++)
{
[cows[i] updateWithDelta:aDelta];
}
}
- (void)render {
for (int i = 0; i < cows.count; i++)
{
[cows[i] render];
}
}
#end
From the code you gave, you are calling a glClear() every time before you draw the cow. With all those clear calls, only the last cow you draw will be visible.
Move this call into the scene's render function so you only clear the scene once at the start of each frame.
Am working in a simple iPhone game application using OpenGLES framework. I facing an "EXC Bad Access" error while draw a line on the images. Here i will explain you what i did and what i tried in my project.
Step:1 I draw a Background Image used GL_TRIANGLE_STRIP. Here is my sample code,
- (void)viewDidLoad
{
[super viewDidLoad];
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
Test = NO;
Test1 = NO;
if (!self.context)
{
NSLog(#"Failed to create ES context");
}
GLKView *view = (GLKView *)self.view;
view.context = self.context;
[EAGLContext setCurrentContext:self.context];
self.effect = [[GLKBaseEffect alloc] init];
GLKMatrix4 projectionMatrix = GLKMatrix4MakeOrtho(0, 480, 0, 320, -1024, 1024);
self.effect.transform.projectionMatrix = projectionMatrix;
self.player = [[SGGSprite alloc] initWithFile:#"bg.png" effect:self.effect];
self.player.position = GLKVector2Make(self.player.contentSize.width, 460);
self.children = [NSMutableArray array];
[self.children addObject:self.player];
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
// glClearColor(1, 1, 1, 1);
// glClear(GL_COLOR_BUFFER_BIT);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
// Condition satisfied to draw a line code given below, otherwise the background and fish image draw in else part.
if(Test1 == YES)
{
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
self.effect = [[GLKBaseEffect alloc] init];
[self.effect prepareToDraw];
const GLfloat line[] =
{
0.0f, 0.5f,
-3.0f,-1.0f,
};
GLuint bufferObjectNameArray;
glGenBuffers(1, &bufferObjectNameArray);
glBindBuffer(GL_ARRAY_BUFFER, bufferObjectNameArray);
glBufferData(
GL_ARRAY_BUFFER, // the target buffer
sizeof(line), // the number of bytes to put into the buffer
line, // a pointer to the data being copied
GL_STATIC_DRAW); // the usage pattern of the data
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(
GLKVertexAttribPosition,
2, // number of coordinates per vertex
GL_FLOAT, // the data type of each component
GL_FALSE, // can the data be scaled
2*4,
NULL);
glDrawArrays(GL_LINES, 0, 2);
}
else
{
//SGGSprite is another class and self.children is mutable array.
for (SGGSprite * sprite in self.children)
{
[sprite render];
}
}
}
//render method in SGGSprite class to draw a fish and background image
- (void)render
{
self.effect.texture2d0.name = self.textureInfo.name;
self.effect.texture2d0.enabled = YES;
self.effect.transform.modelviewMatrix = self.modelMatrix;
[self.effect prepareToDraw];
long offset = (long)&_quad;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
NSLog(#"TexturedVertex:%ld",offset);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, geometryVertex)));
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, textureVertex)));
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
Step:2 After i draw a background image for my view am going to draw a fish images in the screen by using GL_TRIANGLE_STRIP. The fish images are always in moving. Here is my sample code,
(void)update
{
if(Test == NO)
{
self.timeSinceLastSpawn += self.timeSinceLastUpdate;
if (self.timeSinceLastSpawn > 2.0)
{
self.timeSinceLastSpawn = 0;
[self target]; // add fish image
}
for (SGGSprite * sprite in self.children)
{
[sprite update:self.timeSinceLastUpdate];
}
}
}
(void)target
{
SGGSprite * target = [[SGGSprite alloc] initWithFile:#"img2.png" effect:self.effect];
[self.children addObject:target]; // Add fish image in NSMutable array
int minY = target.contentSize.height/2;
int maxY = 320 - target.contentSize.height/2;
int rangeY = maxY - minY;
int actualY = (arc4random() % rangeY) + minY;
if(actualY <= 100)
{
actualY = 215;
}
target.position = GLKVector2Make(480 + (target.contentSize.width), actualY);
int minVelocity = 480.0/12.0;
int maxVelocity = 480.0/6.0;
int rangeVelocity = maxVelocity - minVelocity;
int actualVelocity = (arc4random() % rangeVelocity) + minVelocity;
target.moveVelocity = GLKVector2Make(-actualVelocity, 0);
}
(void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
// glClearColor(1, 1, 1, 1);
// glClear(GL_COLOR_BUFFER_BIT);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
// Condition satisfied to draw a line code given below, otherwise the background and fish image draw in else part.
if(Test1 == YES)
{
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
self.effect = [[GLKBaseEffect alloc] init];
[self.effect prepareToDraw];
const GLfloat line[] =
{
0.0f, 0.5f,
-3.0f,-1.0f,
};
GLuint bufferObjectNameArray;
glGenBuffers(1, &bufferObjectNameArray);
glBindBuffer(GL_ARRAY_BUFFER, bufferObjectNameArray);
glBufferData(
GL_ARRAY_BUFFER, // the target buffer
sizeof(line), // the number of bytes to put into the buffer
line, // a pointer to the data being copied
GL_STATIC_DRAW); // the usage pattern of the data
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(
GLKVertexAttribPosition,
2, // number of coordinates per vertex
GL_FLOAT, // the data type of each component
GL_FALSE, // can the data be scaled
2*4,
NULL);
glDrawArrays(GL_LINES, 0, 2);
}
else
{
//SGGSprite is another class and self.children is mutable array.
for (SGGSprite * sprite in self.children)
{
[sprite render];
}
}
}
Step:3 Now i draw a Background Image and fish images in my screen. It works perfect. Now am going to draw a white line (using GL_LINE) in the screen above the fish images and background image. When i draw a line by using below code,
-(void) touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
Test = YES;
Test1 = YES;
}
- (void)update
{
if(Test == NO)
{
self.timeSinceLastSpawn += self.timeSinceLastUpdate;
NSLog(#"timeSinceLastUpdate:%f",self.timeSinceLastUpdate);
if (self.timeSinceLastSpawn > 2.0)
{
self.timeSinceLastSpawn = 0;
[self target];
}
NSLog(#"Children count:%d",[self.children count]);
for (SGGSprite * sprite in self.children)
{
[sprite update:self.timeSinceLastUpdate];
}
}
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
// glClearColor(1, 1, 1, 1);
// glClear(GL_COLOR_BUFFER_BIT);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
// Condition satisfied to draw a line code given below, otherwise the background and fish image draw in else part.
if(Test1 == YES)
{
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
self.effect = [[GLKBaseEffect alloc] init];
[self.effect prepareToDraw];
const GLfloat line[] =
{
0.0f, 0.5f,
-3.0f,-1.0f,
};
GLuint bufferObjectNameArray;
glGenBuffers(1, &bufferObjectNameArray);
glBindBuffer(GL_ARRAY_BUFFER, bufferObjectNameArray);
glBufferData(
GL_ARRAY_BUFFER, // the target buffer
sizeof(line), // the number of bytes to put into the buffer
line, // a pointer to the data being copied
GL_STATIC_DRAW); // the usage pattern of the data
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(
GLKVertexAttribPosition,
2, // number of coordinates per vertex
GL_FLOAT, // the data type of each component
GL_FALSE, // can the data be scaled
2*4,
NULL);
glDrawArrays(GL_LINES, 0, 2);
}
else
{
//SGGSprite is another class and self.children is mutable array, Draw a fish and background image
for (SGGSprite * sprite in self.children)
{
[sprite render];
}
}
}
The app getting crash in the line "glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); // ERROR -Bad excess happened here when i touch the screen"
(void)render
{
self.effect.texture2d0.name = self.textureInfo.name;
self.effect.texture2d0.enabled = YES;
self.effect.transform.modelviewMatrix = self.modelMatrix;
[self.effect prepareToDraw];
long offset = (long)&_quad;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
NSLog(#"TexturedVertex:%ld",offset);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, geometryVertex)));
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, textureVertex)));
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); // ERROR comes from this line, when i touch the screen.
}
Can you please refer these code and tell what is wrong with my code? What i should do to fix this problem? Please help me. Thanks in advance.
I have downloaded the sample code GLPaint from developer.Apple website to draw pictures on a Canvas using OpenGL.
I have made many changes to the GLPaint application to meet my requirements. Now, I would like to save the drawn item into photo-library as an image.
I know the method to save an image in the photo-library. So, I tried to create the corresponding image file after drawing a picture. Do you know what's the good way to do it? Any help on this is highly appreciated.
The code details are described below.
PaintingView.h
EAGLContext *context;
// OpenGL names for the renderbuffer and framebuffers used to render to this view
GLuint viewRenderbuffer, viewFramebuffer;
// OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist)
GLuint depthRenderbuffer;
GLuint brushTexture;
CGPoint location;
CGPoint previousLocation;
PaintingView.m
// Handles the start of a touch
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
CGRect bounds = [self bounds];
UITouch* touch = [[event touchesForView:self] anyObject];
firstTouch = YES;
// Convert touch point from UIView referential to OpenGL one (upside-down flip)
location = [touch locationInView:self];
location.y = bounds.size.height - location.y;
}
// Handles the continuation of a touch.
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
CGRect bounds = [self bounds];
UITouch* touch = [[event touchesForView:self] anyObject];
// Convert touch point from UIView referential to OpenGL one (upside-down flip)
if (firstTouch) {
firstTouch = NO;
previousLocation = [touch previousLocationInView:self];
previousLocation.y = bounds.size.height - previousLocation.y;
} else {
location = [touch locationInView:self];
location.y = bounds.size.height - location.y;
previousLocation = [touch previousLocationInView:self];
previousLocation.y = bounds.size.height - previousLocation.y;
}
// Render the stroke
[self renderLineFromPoint:previousLocation toPoint:location];
}
// Handles the end of a touch event when the touch is a tap.
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
CGRect bounds = [self bounds];
UITouch* touch = [[event touchesForView:self] anyObject];
if (firstTouch) {
firstTouch = NO;
previousLocation = [touch previousLocationInView:self];
previousLocation.y = bounds.size.height - previousLocation.y;
[self renderLineFromPoint:previousLocation toPoint:location];
}
}
// Drawings a line onscreen based on where the user touches
- (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
static GLfloat* vertexBuffer = NULL;
static NSUInteger vertexMax = 64;
NSUInteger vertexCount = 0,
count,
i;
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
// Convert locations from Points to Pixels
CGFloat scale = self.contentScaleFactor;
start.x *= scale;
start.y *= scale;
end.x *= scale;
end.y *= scale;
// Allocate vertex array buffer
if(vertexBuffer == NULL)
vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
// Add points to the buffer so there are drawing points every X pixels
count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1);
for(i = 0; i < count; ++i) {
if(vertexCount == vertexMax) {
vertexMax = 2 * vertexMax;
vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
}
vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
vertexCount += 1;
}
// Render the vertex array
glVertexPointer(2, GL_FLOAT, 0, vertexBuffer);
glDrawArrays(GL_POINTS, 0, vertexCount);
// Display the buffer
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
// Erases the screen
- (void) erase
{
[EAGLContext setCurrentContext:context];
// Clear the buffer
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
// Display the buffer
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
// The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
- (id)initWithCoder:(NSCoder*)coder {
CGImageRef brushImage;
CGContextRef brushContext;
GLubyte *brushData;
size_t width, height;
if ((self = [super initWithCoder:coder])) {
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
// In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer.
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
// Create a texture from an image
// First create a UIImage object from the data in a image file, and then extract the Core Graphics image
brushImage = [UIImage imageNamed:#"Particle.png"].CGImage;
// Get the width and height of the image
width = CGImageGetWidth(brushImage);
height = CGImageGetHeight(brushImage);
// Texture dimensions must be a power of 2. If you write an application that allows users to supply an image,
// you'll want to add code that checks the dimensions and takes appropriate action if they are not a power of 2.
// Make sure the image exists
if(brushImage) {
// Allocate memory needed for the bitmap context
brushData = (GLubyte *) calloc(width * height * 4, sizeof(GLubyte));
// Use the bitmatp creation function provided by the Core Graphics framework.
brushContext = CGBitmapContextCreate(brushData, width, height, 8, width * 4, CGImageGetColorSpace(brushImage), kCGImageAlphaPremultipliedLast);
// After you create the context, you can draw the image to the context.
CGContextDrawImage(brushContext, CGRectMake(0.0, 0.0, (CGFloat)width, (CGFloat)height), brushImage);
// You don't need the context at this point, so you need to release it to avoid memory leaks.
CGContextRelease(brushContext);
// Use OpenGL ES to generate a name for the texture.
glGenTextures(1, &brushTexture);
// Bind the texture name.
glBindTexture(GL_TEXTURE_2D, brushTexture);
// Set the texture parameters to use a minifying filter and a linear filer (weighted average)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// Specify a 2D texture image, providing the a pointer to the image data in memory
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, brushData);
// Release the image data; it's no longer needed
free(brushData);
}
// Set the view's scale factor
self.contentScaleFactor = 1.0;
// Setup OpenGL states
glMatrixMode(GL_PROJECTION);
CGRect frame = self.bounds;
CGFloat scale = self.contentScaleFactor;
// Setup the view port in Pixels
glOrthof(0, frame.size.width * scale, 0, frame.size.height * scale, -1, 1);
glViewport(0, 0, frame.size.width * scale, frame.size.height * scale);
glMatrixMode(GL_MODELVIEW);
glDisable(GL_DITHER);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_VERTEX_ARRAY);
glEnable(GL_BLEND);
// Set a blending function appropriate for premultiplied alpha pixel data
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_POINT_SPRITE_OES);
glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE);
glPointSize(width / kBrushScale);
// Make sure to start with a cleared buffer
needsErase = YES;
}
return self;
}
AppDelegate.h
PaintingWindow *window; //its a class inherited from window.
PaintingView *drawingView;
#property (nonatomic, retain) IBOutlet PaintingWindow *window;
#property (nonatomic, retain) IBOutlet PaintingView *drawingView;
#synthesize window;
#synthesize drawingView;
AppDelegate.m
- (void) applicationDidFinishLaunching:(UIApplication*)application
{
CGRect rect = [[UIScreen mainScreen] applicationFrame];
CGFloat components[3];
// Create a segmented control so that the user can choose the brush color.
UISegmentedControl *segmentedControl = [[UISegmentedControl alloc] initWithItems:
[NSArray arrayWithObjects:
[UIImage imageNamed:#"Red.png"],
[UIImage imageNamed:#"Yellow.png"],
[UIImage imageNamed:#"Green.png"],
[UIImage imageNamed:#"Blue.png"],
[UIImage imageNamed:#"Purple.png"],
nil]];
// Compute a rectangle that is positioned correctly for the segmented control you'll use as a brush color palette
//CGRect frame = CGRectMake(rect.origin.x + kLeftMargin, rect.size.height - kPaletteHeight - kTopMargin, rect.size.width - (kLeftMargin + kRightMargin), kPaletteHeight);
CGRect frame = CGRectMake(50, 22, (rect.size.width - (kLeftMargin + kRightMargin)) - 20, kPaletteHeight);
segmentedControl.frame = frame;
// When the user chooses a color, the method changeBrushColor: is called.
[segmentedControl addTarget:self action:#selector(changeBrushColor:) forControlEvents:UIControlEventValueChanged];
segmentedControl.segmentedControlStyle = UISegmentedControlStyleBar;
// Make sure the color of the color complements the black background
segmentedControl.tintColor = [UIColor darkGrayColor];
// Set the third color (index values start at 0)
segmentedControl.selectedSegmentIndex = 2;
// Add the control to the window
[window addSubview:segmentedControl];
// Now that the control is added, you can release it
[segmentedControl release];
[self addBackgroundSegmentControll];
// Define a starting color
HSL2RGB((CGFloat) 2.0 / (CGFloat)kPaletteSize, kSaturation, kLuminosity, &components[0], &components[1], &components[2]);
// Defer to the OpenGL view to set the brush color
[drawingView setBrushColorWithRed:components[0] green:components[1] blue:components[2]];
// Look in the Info.plist file and you'll see the status bar is hidden
// Set the style to black so it matches the background of the application
[application setStatusBarStyle:UIStatusBarStyleBlackTranslucent animated:NO];
// Now show the status bar, but animate to the style.
[application setStatusBarHidden:NO withAnimation:YES];
// Load the sounds
NSBundle *mainBundle = [NSBundle mainBundle];
erasingSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:#"Erase" ofType:#"caf"]];
selectSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:#"Select" ofType:#"caf"]];
[window setFrame:CGRectMake(0, 0, 768, 1024)];
drawingView.frame = CGRectMake(0, 0, 768, 1024);
// Erase the view when recieving a notification named "shake" from the NSNotificationCenter object
// The "shake" nofification is posted by the PaintingWindow object when user shakes the device
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(eraseView) name:#"shake" object:nil];
}
An improved version of Ramshad's answer:
This one has no memory leaks and works with new versions of iOS and for different view sizes and displays (retina and non-retina).
CGFloat scale = [[UIScreen mainScreen] scale]; // use nativeScale on iOS 8.0+
CGSize imageSize = CGSizeMake((scale * view.frame.size.width), (scale * view.frame.size.height));
NSUInteger length = imageSize.width * imageSize.height * 4;
GLubyte * buffer = (GLubyte *)malloc(length * sizeof(GLubyte));
if(buffer == NULL)
return nil;
glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, length, NULL);
int bitsPerComponent = 8;
int bitsPerPixel = 32;
int bytesPerRow = 4 * imageSize.width;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(imageSize.width, imageSize.height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
UIGraphicsBeginImageContext(imageSize);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0.0, 0.0, imageSize.width, imageSize.height), imageRef);
UIImage * image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
CGImageRelease(imageRef);
CGColorSpaceRelease(colorSpaceRef);
CGDataProviderRelease(provider);
free(buffer);
return image;
Please refer the below link to save an OpenGL drawn item as an image in the photo-library.
Save an OpenGL drawn item as an image
Code Details;
Call [self captureToPhotoAlbum]; after writing the below code.
-(void)captureToPhotoAlbum {
UIImage *image = [self glToUIImage];
UIImageWriteToSavedPhotosAlbum(image, self, nil, nil);
}
- (UIImage *)glToUIImage {
NSInteger myDataLength = 320 * 480 * 4;
// allocate array and read pixels into it.
GLubyte *buffer = (GLubyte *) malloc(myDataLength);
glReadPixels(0, 0, 320, 480, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// gl renders "upside down" so swap top to bottom into new array.
// there's gotta be a better way, but this works.
GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);
for(int y = 0; y < 480; y++)
{
for(int x = 0; x < 320 * 4; x++)
{
buffer2[(479 - y) * 320 * 4 + x] = buffer[y * 4 * 320 + x];
}
}
// make data provider with data.
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, NULL);
// prep the ingredients
int bitsPerComponent = 8;
int bitsPerPixel = 32;
int bytesPerRow = 4 * 320;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// make the cgimage
CGImageRef imageRef = CGImageCreate(320, 480, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
// then make the uiimage from that
UIImage *myImage = [UIImage imageWithCGImage:imageRef];
return myImage;
}
For iPad or to fix the scaling issue,change all the width's as 640 instead of 320 and height's as 960 instead of 480. Change the
Height and Width values up to meet your scaling.
Manage the memory(free the buffers)
Thanks.
If the iPhone supports it, you can read from an OpenGL context using glReadPixels. After that's done, you should be able to create something like a UIImage from the pixel data you have read and save it to the photo library like you would for any other image created by an application.
The code works perfectly on iOS 5.0 programmed in XCode 4.0.
Although, now in XCode 4.5 and with the developers preview iOS 6.0 the code does not work as it should.The image tha is saved is tottaly black! It is saved in "My photos", with the resolution we choose, but it is a black image!
I guess that something has changed from the XCode programmers in
glReadPixels(0, 0, 320, 480, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
or in
CGImageRef imageRef = CGImageCreate(320, 480, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
UIImage *myImage = [UIImage imageWithCGImage:imageRef];
It is not a matter of 320, 480 or any other resolution variables. In XCode 4.0 worked perfectly and fast even for Retina resolutions.
I am using this source code as my base and trying to change the code as per my requirements. I have included the following code to create a mesh on image.
-(void)populateMesh{
verticalDivisions = kVerticalDivisions;
horizontalDivisions = kHorisontalDivisions;
unsigned int verticesArrsize = (kVerticalDivisions * ((2 + kHorisontalDivisions * 2) * 3));
unsigned int textureCoordsArraySize = kVerticalDivisions * ((2 + kHorisontalDivisions * 2) * 2);
verticesArr = (GLfloat *)malloc(verticesArrsize * sizeof(GLfloat));
textureCoordsArr = (GLfloat*)malloc(textureCoordsArraySize * sizeof(GLfloat));
if (verticesArr == NULL) {
NSLog(#"verticesArr = NULL!");
}
float height = kWindowHeight/verticalDivisions;
float width = kWindowWidth/horizontalDivisions;
int i,j, count;
count = 0;
for (j=0; j<verticalDivisions; j++) {
for (i=0; i<=horizontalDivisions; i++, count+=6) { //2 vertices each time...
float currX = i * width;
float currY = j * height;
verticesArr[count] = currX;
verticesArr[count+1] = currY + height;
verticesArr[count+2] = 0.0f;
verticesArr[count+3] = currX;
verticesArr[count+4] = currY;
verticesArr[count+5] = 0.0f;
}
}
float xIncrease = 1.0f/horizontalDivisions;
float yIncrease = 1.0f/verticalDivisions;
int x,y;
//int elements;
count = 0;
for (y=0; y<verticalDivisions; y++) {
for (x=0; x<horizontalDivisions+1; x++, count+=4) {
float currX = x *xIncrease;
float currY = y * yIncrease;
textureCoordsArr[count] = (float)currX;
textureCoordsArr[count+1] = (float)currY + yIncrease;
textureCoordsArr[count+2] = (float)currX;
textureCoordsArr[count+3] = (float)currY;
}
}
// int cnt;
// int cnt = 0;
NSLog(#"expected %i vertices, and %i vertices were done",(verticalDivisions * ((2 + horizontalDivisions*2 ) * 2) ) , count );
}
Following is the drawView code.
- (void)drawView:(GLView*)view;
{
static GLfloat rot = 0.0;
glBindTexture(GL_TEXTURE_2D, texture[0]);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoordsArr);
glVertexPointer(3, GL_FLOAT, 0, verticesArr);
glPushMatrix();{
int i;
for (i=0; i<verticalDivisions; i++) {
glDrawArrays(GL_TRIANGLE_STRIP, i*(horizontalDivisions*2+2), horizontalDivisions*2+2);
}
}glPopMatrix();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisable(GL_TEXTURE_2D);
}
In the setup view I have called [self populateMesh]; at the end of the function.
My problem is after changing the code, a blank rather say black view is appeared on the screen. Can anyone figure out where I am doing some mistake. I am newbie for openGL and trying to manipulate images through mesh. Please help asap. Thanks in advance.
Following is the setup view code.
-(void)setupView:(GLView*)view {
const GLfloat zNear = 0.01, zFar = 1000.0, fieldOfView = 45.0;
GLfloat size;
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
CGRect rect = view.bounds;
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size / (rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, rect.size.width, rect.size.height);
glMatrixMode(GL_MODELVIEW);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, &texture[0]);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
NSString *path = [[NSBundle mainBundle] pathForResource:#"texture" ofType:#"png"];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
NSLog(#"Do real error checking here");
GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc( height * width * 4 );
CGContextRef context = CGBitmapContextCreate( imageData, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGColorSpaceRelease( colorSpace );
CGContextClearRect( context, CGRectMake( 0, 0, width, height ) );
CGContextDrawImage( context, CGRectMake( 0, 0, width, height ),image.CGImage );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
CGContextRelease(context);
free(imageData);
[image release];
[texData release];
[self populateMesh];
}
EDIT This is what I am getting as an out put. While expected is regular grid...
Guessing the mesh is cut out with zNear value. Try to change z value to -2.
verticesArr[count+2] = -2.0f;
verticesArr[count+5] = -2.0f;
By default, the camera is situated at the origin, points down the negative z-axis, and has an up-vector of (0, 1, 0).
Notice that your mesh is out of view frustum (pyramid). Check it out in OpenGL red book:
http://glprogramming.com/red/chapter03.html
The grid is not regular because of the way how vertices are ordered. Also are you sure that GL_TRIANGLE_STRIP is desired option. Maybe, GL_TRIANGLES is that what you need.
I propose simpler solution with using indices array. For example in initialization code make vertices and texture array for your grid in normal order as:
0 1 2
3 4 5
6 7 8
Update:
- (void) setup
{
vertices = (GLfloat*)malloc(rows*colums*3*sizeof(GLfloat));
texCoords = (GLfloat*)malloc(rows*columns*2*sizeof(GLfloat));
indices = (GLubyte*)malloc((rows-1)*(columns-1)*6*sizeof(GLubyte));
float xDelta = horizontalDivisions/columns;
float yDelta = verticalDivisions/rows;
for (int i=0;i<columns;i++) {
for(int j=0;j<rows; j++) {
int index = j*columns+i;
vertices[3*index+0] = i*xDelta; //x
vertices[3*index+1] = j*yDelta; //y
vertices[3*index+2] = -10; //z
texCoords[2*index+0] = i/(float)(columns-1); //x texture coordinate
texCoords[2*index+1] = j/(float)(rows-1); //y tex coordinate
}
}
for (int i=0;i<columns-1;i++) {
for(int j=0;j<rows-1; j++) {
indices[6*(j*columns+i)+0] = j*columns+i;
indices[6*(j*columns+i)+1] = j*columns+i+1;
indices[6*(j*columns+i)+2] = (j+1)*columns+i;
indices[6*(j*columns+i)+3] = j*columns+i+1;
indices[6*(j*columns+i)+4] = (j+1)*columns+i+1;
indices[6*(j*columns+i)+5] = (j+1)*columns+i;
}
}
}
- (void) dealloc {
free(vertices); free(texCoords); free(indices);
}
Practically this indices order means that tringles are rendered as following:
(013)(143)(124)(254)(346)(476)... and so on.
In render method use the following lines:
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawElements(GL_TRIANGLES, 6*(columns-1)*(rows-1), GL_UNSIGNED_BYTE, indices);
Hope that will help.
This is a great tutorial on drawing a grid in 3D. It should have the code necessary to help you. I'm not sure if you are working in 3D or 2D, but even if it is 2D, it should be fairly easy to adapt to your needs. Hope that helps!
For some reason my texture are not drawing, even though my code looks exactly the same as an old project that did. So far, the vertexes and TexCoords look fine, as I am having white squares being drawn, where the texture should be drawn instead.
The process so far goes,
I load up a Contoller and in loadView, I
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
Then my renderer is loaded up, which does nothing on construction. After that I load up my Texture into gl. This code is a direct copy from my old project and I know it works.
- (GLuint)textureFromPath:(NSString *)path
{
GLuint texture;
glGenTextures(1, &texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:path];
if (!img) {
NSLog(#"Image \"%#\" could not be loaded and was not bound", path);
return 0;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef context = CGBitmapContextCreate(image, width, height, 8, 4 * width, colourSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(context, bounds);
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGContextDrawImage(context, bounds, cgimage);
CGContextRelease(context);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
[img release];
free(image);
return texture;
}
I then take the generate texture from gl and assign it's postion in the array of the Renderer at 0. I also did this in my old project and worked fine too. So far so good, I feel.
The Application then tells it to startAnimation, which it then calls setFramebuffer, which within it calls createFramebuffer as framebuffer is undefined. It then notifies the Renderer (btw, Renderer is a C++ class) that it has created the framebuffers.
void bufferHasBeenCreated() const {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-160.0f, 160.0f, -240.0f, 240.0f, -5.0f, 1.0f);
glViewport(0, 0, 320, 480);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
}
It then calls the render on Renderer.
void render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.325f, 0.0f, 0.325f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static float rot = 0.0f;
glRotatef(rot, 0.0f, 0.0f, 1.0f);
//glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture_[0]);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
printf("Error. glError: 0x%04X\n", err);
glVertexPointer(2, GL_FLOAT, 0, pos[0]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[1]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[2]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
rot += 0.5f;
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
and then finally it then calls presentFramebuffer, which binds the renderBuffer and setup context.
Edit: I have done some more work on this, and it turns out it is something to do with the context and the buffers. Whenever I do just the context while enabling GL_TEXTURE_2D and GL_BLEND, as you do, the textures don't load. Yet do it when the buffers are loaded up and everything works.
I have got my texture to draw. I pulled all my code out and put it's own file. I will then start pulling it a part again and hopefully get everything working in the structure that I already have.
(Objective-C) ES1Renderer.h
#import <QuartzCore/QuartzCore.h>
#import "OpenGLES.h"
#interface ES1Renderer : UIView {
#private
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
GLuint viewFramebuffer, viewRenderbuffer;
GLuint texture[1];
BOOL animating;
BOOL displayLinkSupported;
NSInteger animationFrameInterval;
// Use of the CADisplayLink class is the preferred method for controlling your animation timing.
// CADisplayLink will link to the main display and fire every vsync when added to a given run-loop.
// The NSTimer class is used only as fallback when running on a pre 3.1 device where CADisplayLink
// isn't available.
id displayLink;
NSTimer *animationTimer;
}
#property (readonly, nonatomic, getter=isAnimating) BOOL animating;
#property (nonatomic) NSInteger animationFrameInterval;
- (void) startAnimation;
- (void) stopAnimation;
- (void)render;
#end
Next ES1Renderer.m
#import "ES1Renderer.h"
#implementation ES1Renderer
#synthesize animating;
#dynamic animationFrameInterval;
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context])
{
[self release];
return nil;
}
// Generate buffers
glGenFramebuffersOES(1, &viewFramebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Disable Depth
glDisable(GL_DEPTH_TEST);
// Load textures
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:#"colour" ofType:#"png"]];
if (!img) {
NSLog(#"Image \"colour.png\" could not be loaded and was not bound");
[self release];
return nil;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef imgContext = CGBitmapContextCreate(image,
width, height,
8, 4 * width, colourSpace,
kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(imgContext, bounds);
CGContextTranslateCTM (imgContext, 0, height);
CGContextScaleCTM (imgContext, 1.0, -1.0);
CGContextDrawImage(imgContext, bounds, cgimage);
CGContextRelease(imgContext);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
free(image);
[img release];
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView:(id)sender
{
[self render];
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
}
- (void) render
{
//glDisable(GL_TEXTURE_2D);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
static const float textureVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const float textureCoords[] = {
0.0f, 0.0f,
0.0f, 0.515625f,
0.12890625f, 0.0f,
0.12890625f, 0.515625f,
};
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_2D, texture[0]);
//glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
glVertexPointer(2, GL_FLOAT, 0, textureVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:context];
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer *)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
}
- (void) dealloc
{
// Tear down GL
if (viewFramebuffer)
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
}
if (viewRenderbuffer)
{
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
}
// Tear down context
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
[context release];
context = nil;
displayLink = nil;
animationTimer = nil;
[super dealloc];
}
- (NSInteger) animationFrameInterval
{
return animationFrameInterval;
}
- (void) setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void) startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView:)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView:) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
#end
There is only one problem with this code. It's out of date. Apple released a new way of doing things, but hell. It works.
Update:
It turns out I had set the context up before loading the textures.