Drawing with with blur effect on image using opengl - iphone

I have the same requirement as this question
I am right now at situation where i set UIImage on my opengl view.
Below is the complete code i used to set uiimage. Also it includes the drawing code.
Using below code I am able to set image and drawing on it.
when i do not set image in background it allows me to draw blur and smooth drawing
but if i set background image it draws solid drawing
I want smooth drawing on background image.
I have used some of GLPaint code of apple for drawing.
- (id)initWithCoder:(NSCoder*)coder
{
CGImageRef brushImage;
CGContextRef brushContext;
GLubyte *brushData;
size_t width, height;
if ((self = [super initWithCoder:coder]))
{
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = NO;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
nil];
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!_context || ![EAGLContext setCurrentContext:_context])
{
return nil;
}
{
brushImage = [UIImage imageNamed:#"Brush.png"].CGImage;
width = CGImageGetWidth(brushImage);
height = CGImageGetHeight(brushImage);
if(brushImage) {
brushData = (GLubyte *) calloc(width * height * 4, sizeof(GLubyte));
brushContext = CGBitmapContextCreate(brushData, width, width, 8, width * 4, CGImageGetColorSpace(brushImage), kCGImageAlphaPremultipliedLast);
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextDrawImage(brushContext, CGRectMake(0.0, 0.0, (CGFloat)width, (CGFloat)height), brushImage);
CGContextRelease(brushContext);
glGenTextures(1, &brushTexture);
glBindTexture(GL_TEXTURE_2D, brushTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, brushData);
free(brushData);
}
}
// Setup OpenGL states
glMatrixMode(GL_PROJECTION);
CGRect frame = self.bounds;
glOrthof(0, frame.size.width, 0, frame.size.height, -1, 1);
glViewport(0, 0, frame.size.width , frame.size.height);
glMatrixMode(GL_MODELVIEW);
glDisable(GL_DITHER);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_VERTEX_ARRAY);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_POINT_SPRITE_OES);
glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE);
glPointSize(10);
}
return self;
}
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:_context];
[self destroyFramebuffer];
[self createFramebuffer];
if (texture) {
[self _updateContent];
glDeleteTextures(1, &texture);
texture = 0;
}
}
- (void)_updateContent {
NSUInteger width = self.frame.size.width;
NSUInteger height = self.frame.size.height;
CGFloat texWidth = (1.0 * width)/TEX_SIZE;
CGFloat texHeight = (1.0 * height)/TEX_SIZE;
GLfloat verts[12] = {
0, height,
width, height,
width, 0,
0, height,
0, 0,
width, 0
};
GLfloat txcoord[12] = {
0, texHeight,
texWidth, texHeight,
texWidth, 0,
0, texHeight,
0, 0,
texWidth, 0
};
[EAGLContext setCurrentContext:_context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, _viewFramebuffer);
glVertexPointer(2, GL_FLOAT, 0, verts);
glTexCoordPointer(2, GL_FLOAT, 0, txcoord);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, _viewRenderbuffer);
[_context presentRenderbuffer:GL_RENDERBUFFER_OES];
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
- (void)setContent:(UIImage*)image
{
if (image) {
// self.isNotEmpty = YES;
CGImageRef contentImage = image.CGImage;
[EAGLContext setCurrentContext:_context];
CGFloat w = CGImageGetWidth(contentImage);
CGFloat h = CGImageGetHeight(contentImage);
GLubyte *data = (GLubyte *)calloc(TEX_SIZE * TEX_SIZE * 4, sizeof(GLubyte));
CGContextRef ctx = CGBitmapContextCreate(data, TEX_SIZE, TEX_SIZE, 8, TEX_SIZE * 4, CGImageGetColorSpace(contentImage), kCGImageAlphaPremultipliedLast);
CGContextScaleCTM(ctx, 1, -1);
CGContextTranslateCTM(ctx, 0, -TEX_SIZE);
CGContextDrawImage(ctx, CGRectMake(0, 0, w, h), contentImage);
CGContextRelease(ctx);
if (!texture) {
glGenTextures(1, &texture);
}
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, TEX_SIZE, TEX_SIZE, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
free(data);
}
[self setNeedsLayout];
glDisable(GL_VERTEX_ARRAY);
}
//Called on touchedMoved
- (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
glEnable(GL_BLEND);
if(1){
sharedDelegate = [AppDelegate appDelegate];
static GLfloat* vertexBuffer = NULL;
static NSUInteger vertexMax = 64;
NSUInteger vertexCount = 0,
count,
i;
GLenum err;
glColor4f(1.0, 0.0, 1.0, 1.0);
// Convert locations from Points to Pixels
CGFloat scale = self.contentScaleFactor;
start.x *= scale;
start.y *= scale;
end.x *= scale;
end.y *= scale;
// Allocate vertex array buffer
if(vertexBuffer == NULL)
vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
// Add points to the buffer so there are drawing points every X pixels
count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1);
for(i = 0; i < count; ++i) {
if(vertexCount == vertexMax) {
vertexMax = 2 * vertexMax;
vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
}
vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
vertexCount += 1;
}
if(sharedDelegate.boolIsEraser)
{
glColor4f(1.0, 1.0, 1.0, 0.5);
glBlendFunc( GL_ZERO, GL_ONE_MINUS_SRC_ALPHA);
}
// Render the vertex array
glVertexPointer(2, GL_FLOAT, 0, vertexBuffer);
glDrawArrays(GL_POINTS, 0, vertexCount);
if(sharedDelegate.boolIsEraser){
// at last restore the mixed-mode
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
}
glBindRenderbufferOES(GL_RENDERBUFFER_OES, _viewRenderbuffer);
[_context presentRenderbuffer:GL_RENDERBUFFER_OES];
err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error in frame. glError: 0x%04X", err);
}
}
Any workaround to accomplish this?
I am almost there to my goal but badly stuck here.Any help will be appreciated.

You might try setting the shadow color to the same as the stroke color, and the stroke blur radius to however many points out from the edge of the stroke you want the blur to extend. You'll probably want to set the shadow offset to CGSizeZero.

You can try openGL code from This Link
or try this
CGSize offset;
float blur;//set this variable as per your requirement
offset.width = 10;
offset.height = -10;
CGContextSetShadow(context, offset, blur);

I got the solution...
The thing remaining was I need to set my opengl states after i draw UIImage in opengl view. The same thing i did when i initialized brush texture...
I just added below code in LayoutSubviews method after _updateContent method
// Setup OpenGL states
glMatrixMode(GL_PROJECTION);
CGRect frame = self.bounds;
glOrthof(0, frame.size.width, 0, frame.size.height, -1, 1);
glViewport(0, 0, frame.size.width , frame.size.height);
glMatrixMode(GL_MODELVIEW);
glDisable(GL_DITHER);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_VERTEX_ARRAY);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_POINT_SPRITE_OES);
glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE)
and now this code is working perfectly..

You can use
CGContextSetBlendMode(context, kCGBlendModeClear) ike real eraser effect

Related

Mesh creation in openGL iphone sdk

I am using this source code as my base and trying to change the code as per my requirements. I have included the following code to create a mesh on image.
-(void)populateMesh{
verticalDivisions = kVerticalDivisions;
horizontalDivisions = kHorisontalDivisions;
unsigned int verticesArrsize = (kVerticalDivisions * ((2 + kHorisontalDivisions * 2) * 3));
unsigned int textureCoordsArraySize = kVerticalDivisions * ((2 + kHorisontalDivisions * 2) * 2);
verticesArr = (GLfloat *)malloc(verticesArrsize * sizeof(GLfloat));
textureCoordsArr = (GLfloat*)malloc(textureCoordsArraySize * sizeof(GLfloat));
if (verticesArr == NULL) {
NSLog(#"verticesArr = NULL!");
}
float height = kWindowHeight/verticalDivisions;
float width = kWindowWidth/horizontalDivisions;
int i,j, count;
count = 0;
for (j=0; j<verticalDivisions; j++) {
for (i=0; i<=horizontalDivisions; i++, count+=6) { //2 vertices each time...
float currX = i * width;
float currY = j * height;
verticesArr[count] = currX;
verticesArr[count+1] = currY + height;
verticesArr[count+2] = 0.0f;
verticesArr[count+3] = currX;
verticesArr[count+4] = currY;
verticesArr[count+5] = 0.0f;
}
}
float xIncrease = 1.0f/horizontalDivisions;
float yIncrease = 1.0f/verticalDivisions;
int x,y;
//int elements;
count = 0;
for (y=0; y<verticalDivisions; y++) {
for (x=0; x<horizontalDivisions+1; x++, count+=4) {
float currX = x *xIncrease;
float currY = y * yIncrease;
textureCoordsArr[count] = (float)currX;
textureCoordsArr[count+1] = (float)currY + yIncrease;
textureCoordsArr[count+2] = (float)currX;
textureCoordsArr[count+3] = (float)currY;
}
}
// int cnt;
// int cnt = 0;
NSLog(#"expected %i vertices, and %i vertices were done",(verticalDivisions * ((2 + horizontalDivisions*2 ) * 2) ) , count );
}
Following is the drawView code.
- (void)drawView:(GLView*)view;
{
static GLfloat rot = 0.0;
glBindTexture(GL_TEXTURE_2D, texture[0]);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoordsArr);
glVertexPointer(3, GL_FLOAT, 0, verticesArr);
glPushMatrix();{
int i;
for (i=0; i<verticalDivisions; i++) {
glDrawArrays(GL_TRIANGLE_STRIP, i*(horizontalDivisions*2+2), horizontalDivisions*2+2);
}
}glPopMatrix();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisable(GL_TEXTURE_2D);
}
In the setup view I have called [self populateMesh]; at the end of the function.
My problem is after changing the code, a blank rather say black view is appeared on the screen. Can anyone figure out where I am doing some mistake. I am newbie for openGL and trying to manipulate images through mesh. Please help asap. Thanks in advance.
Following is the setup view code.
-(void)setupView:(GLView*)view {
const GLfloat zNear = 0.01, zFar = 1000.0, fieldOfView = 45.0;
GLfloat size;
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
CGRect rect = view.bounds;
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size / (rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, rect.size.width, rect.size.height);
glMatrixMode(GL_MODELVIEW);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, &texture[0]);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
NSString *path = [[NSBundle mainBundle] pathForResource:#"texture" ofType:#"png"];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
NSLog(#"Do real error checking here");
GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc( height * width * 4 );
CGContextRef context = CGBitmapContextCreate( imageData, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGColorSpaceRelease( colorSpace );
CGContextClearRect( context, CGRectMake( 0, 0, width, height ) );
CGContextDrawImage( context, CGRectMake( 0, 0, width, height ),image.CGImage );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
CGContextRelease(context);
free(imageData);
[image release];
[texData release];
[self populateMesh];
}
EDIT This is what I am getting as an out put. While expected is regular grid...
Guessing the mesh is cut out with zNear value. Try to change z value to -2.
verticesArr[count+2] = -2.0f;
verticesArr[count+5] = -2.0f;
By default, the camera is situated at the origin, points down the negative z-axis, and has an up-vector of (0, 1, 0).
Notice that your mesh is out of view frustum (pyramid). Check it out in OpenGL red book:
http://glprogramming.com/red/chapter03.html
The grid is not regular because of the way how vertices are ordered. Also are you sure that GL_TRIANGLE_STRIP is desired option. Maybe, GL_TRIANGLES is that what you need.
I propose simpler solution with using indices array. For example in initialization code make vertices and texture array for your grid in normal order as:
0 1 2
3 4 5
6 7 8
Update:
- (void) setup
{
vertices = (GLfloat*)malloc(rows*colums*3*sizeof(GLfloat));
texCoords = (GLfloat*)malloc(rows*columns*2*sizeof(GLfloat));
indices = (GLubyte*)malloc((rows-1)*(columns-1)*6*sizeof(GLubyte));
float xDelta = horizontalDivisions/columns;
float yDelta = verticalDivisions/rows;
for (int i=0;i<columns;i++) {
for(int j=0;j<rows; j++) {
int index = j*columns+i;
vertices[3*index+0] = i*xDelta; //x
vertices[3*index+1] = j*yDelta; //y
vertices[3*index+2] = -10; //z
texCoords[2*index+0] = i/(float)(columns-1); //x texture coordinate
texCoords[2*index+1] = j/(float)(rows-1); //y tex coordinate
}
}
for (int i=0;i<columns-1;i++) {
for(int j=0;j<rows-1; j++) {
indices[6*(j*columns+i)+0] = j*columns+i;
indices[6*(j*columns+i)+1] = j*columns+i+1;
indices[6*(j*columns+i)+2] = (j+1)*columns+i;
indices[6*(j*columns+i)+3] = j*columns+i+1;
indices[6*(j*columns+i)+4] = (j+1)*columns+i+1;
indices[6*(j*columns+i)+5] = (j+1)*columns+i;
}
}
}
- (void) dealloc {
free(vertices); free(texCoords); free(indices);
}
Practically this indices order means that tringles are rendered as following:
(013)(143)(124)(254)(346)(476)... and so on.
In render method use the following lines:
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawElements(GL_TRIANGLES, 6*(columns-1)*(rows-1), GL_UNSIGNED_BYTE, indices);
Hope that will help.
This is a great tutorial on drawing a grid in 3D. It should have the code necessary to help you. I'm not sure if you are working in 3D or 2D, but even if it is 2D, it should be fairly easy to adapt to your needs. Hope that helps!

iPhone OpenGL ES background texture problems

I am trying to set a .png file as my background in OpenGL, with the hopes of drawing more objects on top of it. I have an image that is a 320x480 RGB image. I setup my vertices, as follows:
- (void)drawView:(GLView*)view;
{
static GLfloat rot = 0.0;
glColor4f(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
// Vertices
static const Vertex3D vertices[] = {
{-1.0, 1.0, -0.0},
{ 1.0, 1.0, -0.0},
{-1.0, -1.0, -0.0},
{ 1.0, -1.0, -0.0}
};
// Normals
static const Vector3D normals[] = {
{0.0f, 0.0f, 1.0f},
{0.0f, 0.0f, 1.0f},
{0.0f, 0.0f, 1.0f},
{0.0f, 0.0f, 1.0f}
};
// Tex Coords
static const GLfloat texCoords[] = {
0.1875f, 0.03125f, // image is 320x480, but texture is 512x512. Image is offset by 96x32
0.1875f, 0.96875,
0.8125f, 0.03125f,
0.8125f, 0.96875
};
glLoadIdentity();
glTranslatef(0.0, 0.0, -3.0);
glScalef(320.0f / 512.0f, 480.0f / 512.0f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glNormalPointer(GL_FLOAT, 0, normals);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
static NSTimeInterval lastDrawTime;
if (lastDrawTime)
{
NSTimeInterval timeSinceLastDraw = [NSDate timeIntervalSinceReferenceDate] - lastDrawTime;
rot+= 60 * timeSinceLastDraw;
}
lastDrawTime = [NSDate timeIntervalSinceReferenceDate];
}
-(void)setupView:(GLView*)view
{
const GLfloat zNear = 0.01, zFar = 1000.0, fieldOfView = 45.0;
GLfloat size;
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
CGRect rect = view.bounds;
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size /
(rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, rect.size.width, rect.size.height);
glMatrixMode(GL_MODELVIEW);
// Turn necessary features on
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
//glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
// Bind the number of textures we need, in this case one.
glGenTextures(1, &texture[0]);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
NSString *path = [[NSBundle mainBundle] pathForResource:#"texture" ofType:#"png"];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
NSLog(#"Do real error checking here");
GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc( height * width * 4 );
CGContextRef context = CGBitmapContextCreate( imageData, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
// Flip the Y-axis
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGColorSpaceRelease( colorSpace );
CGContextClearRect( context, CGRectMake( 0, 0, width, height ) );
CGContextDrawImage( context, CGRectMake( 0, 0, width, height ), image.CGImage );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
CGContextRelease(context);
free(imageData);
[image release];
[texData release];
glEnable(GL_LIGHTING);
// Turn the first light on
glEnable(GL_LIGHT0);
// Define the ambient component of the first light
static const Color3D light0Ambient[] = {{0.4, 0.4, 0.4, 1.0}};
glLightfv(GL_LIGHT0, GL_AMBIENT, (const GLfloat *)light0Ambient);
// Define the diffuse component of the first light
static const Color3D light0Diffuse[] = {{0.8, 0.8, 0.8, 1.0}};
glLightfv(GL_LIGHT0, GL_DIFFUSE, (const GLfloat *)light0Diffuse);
// Define the position of the first light
// const GLfloat light0Position[] = {10.0, 10.0, 10.0};
static const Vertex3D light0Position[] = {{10.0, 10.0, 10.0}};
glLightfv(GL_LIGHT0, GL_POSITION, (const GLfloat *)light0Position);
}
when I run the code it shows as less than a full screen grey box, instead of my image. Any ideas what I am doing wrong ?

iPhone OpenGLES 2.0 Text Texture w/ strange border (not stroke) issue

I'm using the CoreGraphcis to create a text texture. Unfortunately the text renders like this (Text color is same as background to demonstrate the strange border).
I've tried playing with stroke colors and borders to I think it is do to OpenGLES 2.0 and not CoreGraphics.
// Create default framebuffer object. The backing will be allocated for the current layer in -resizeFromLayer
glGenFramebuffers(1, &defaultFramebuffer);
glGenRenderbuffers(1, &colorRenderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
glActiveTexture(GL_TEXTURE0);
glUniform1i(uniforms[UNIFORM_SAMPLER], 0);
// Set up the texture state.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
texture = [[FW2Texture alloc] initWithString:#"Text"];
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texture.width, texture.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, texture.imageData);
And the core graphics bit:
-(id)initWithString:(NSString*)str {
if((self = [super init])) {
UIFont *font = [UIFont systemFontOfSize:17];
CGSize size = [str sizeWithFont:font];
NSInteger i;
width = size.width;
if((width != 1) && (((int)width) & (((int)width) - 1))) {
i = 1;
while(i < width)
i *= 2;
width = i;
}
height = size.height;
if((height != 1) && (((int)height) & (((int)height) - 1))) {
i = 1;
while(i < height)
i *= 2;
height = i;
}
NSInteger BitsPerComponent = 8;
int bpp = BitsPerComponent / 2;
int byteCount = width * height * bpp;
uint8_t *data = (uint8_t*) calloc(byteCount, 1);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
CGContextRef context = CGBitmapContextCreate(data,
width,
height,
BitsPerComponent,
bpp * width,
colorSpace,
bitmapInfo);
CGColorSpaceRelease(colorSpace);
CGContextSetGrayFillColor(context, 0.5f, 1.0f);
CGContextTranslateCTM(context, 0.0f, height);
CGContextScaleCTM(context, 1.0f, -1.0f);
UIGraphicsPushContext(context);
[str drawInRect:CGRectMake(0,
0,
size.width,
size.height)
withFont:font
lineBreakMode:UILineBreakModeWordWrap
alignment:UITextAlignmentCenter];
UIGraphicsPopContext();
CGContextRelease(context);
imageData = (uint8_t*)[[NSData dataWithBytesNoCopy:data length:byteCount freeWhenDone:YES] bytes];
}
return self;
}
What's your glBlendFunc? You're taking premultiplied alpha from CoreGraphics, so e.g. instead of a border pixel being (r, g, b, 0.5) it'll be (0.5*r, 0.5*g, 0.5*b, 0.5). That means you should composite with blending enabled, using glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA) so that you get srcColour + (1 - alpha of srcColour)*dstColour.

Problem with opengles to show an image

I use xcode Opengl App template to create a sample.
I am new to opengles, and having to try re-write the 'render' method in ES1Renderer.m
I try create a texture and show it on the screen, but nothing showed.
Someone can help me ? I have no idea how to fix it:
- (void)render
{
int imageW = 16;
int imageH = 16;
GLubyte *textureData = (GLubyte *) malloc(imageW * imageH << 2);
for (int i = 0; i < imageW * imageH << 2; i++) {
textureData[i]= 0xff & i;
}
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
// when texture area is small, bilinear filter the closest mipmap
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR );
// when texture area is large, bilinear filter the original
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
// the texture wraps over at the edges (repeat)
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageW, imageH, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureData);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error uploading texture. glError: 0x%04X", err);
free(textureData);
float x = 10.0f;
float y = 10.0f;
float z = 0.0f;
float scaleX = 1.0f;
float scaleY = 1.0f;
float scaleZ = 1.0f;
int w = imageW /2;
int h = imageH /2;
const GLfloat squareVertices[] = {
-w, -h,
w, -h,
-w, h,
w, h,
};
const GLfloat textureCoords[] = {
0, 0,
1, 0,
0, 1,
1, 1,
};
glEnable(GL_TEXTURE_2D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glBindTexture(GL_TEXTURE_2D, textureId);
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glPushMatrix();
glTranslatef(x, y, z);
glScalef(scaleX, scaleY, scaleZ);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glPopMatrix();
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
NSLog(#"-->");
glDeleteTextures(1, &textureId);
// This application only creates a single color renderbuffer which is already bound at this point.
// This call is redundant, but needed if dealing with multiple renderbuffers.
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
Sadly the OpenGL template provided by Xcode has changed at some point — the current code (as of Xcode 3.2.5, creating an iOS Application with the 'OpenGL ES Application' template) no longer supplies a separate ES1Renderer.m and ES2Renderer.m, preferring to provide a single, simplified EAGLView.m and to perform runtime tests within GLTestViewController.m. With that in mind, I modified GLTestViewController.m's awakeFromNib no longer to attempt to get an ES 2 context:
- (void)awakeFromNib
{
EAGLContext *aContext = nil;//[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!aContext)
{
aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
}
if (!aContext)
NSLog(#"Failed to create ES context");
else if (![EAGLContext setCurrentContext:aContext])
NSLog(#"Failed to set ES context current");
self.context = aContext;
[aContext release];
[(EAGLView *)self.view setContext:context];
[(EAGLView *)self.view setFramebuffer];
if ([context API] == kEAGLRenderingAPIOpenGLES2)
[self loadShaders];
animating = FALSE;
animationFrameInterval = 1;
self.displayLink = nil;
}
And copied and pasted relevant portions of your code into drawFrame:
- (void)drawFrame
{
[(EAGLView *)self.view setFramebuffer];
// Replace the implementation of this method to do your own custom drawing.
static const GLfloat squareVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
const GLfloat textureCoords[] = {
0, 0,
1, 0,
0, 1,
1, 1,
};
static float transY = 0.0f;
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
int imageW = 16;
int imageH = 16;
GLubyte *textureData = (GLubyte *) malloc(imageW * imageH << 2);
for (int i = 0; i < imageW * imageH << 2; i++) {
textureData[i]= 0xff & i;
}
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageW, imageH, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureData);
free(textureData);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0.0f, (GLfloat)(sinf(transY)/2.0f), 0.0f);
transY += 0.075f;
glEnable(GL_TEXTURE_2D);
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDeleteTextures(1, &textureId);
[(EAGLView *)self.view presentFramebuffer];
}
The result works entirely as you seem to intend. At a guess, is it possible either that:
you're setting something other than the identity as your projection matrix, causing your geometry to be clipped because it is placed at z = 0?
you've neglected properly to abandon an attempt at ES 2 rendering, causing unexpected results because tasks like textured rendering aren't hardwired in with ES 2 in the same way that they are with ES1?

iOS 4.1 OpenGL ES 1.1 not drawing Texture

For some reason my texture are not drawing, even though my code looks exactly the same as an old project that did. So far, the vertexes and TexCoords look fine, as I am having white squares being drawn, where the texture should be drawn instead.
The process so far goes,
I load up a Contoller and in loadView, I
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
Then my renderer is loaded up, which does nothing on construction. After that I load up my Texture into gl. This code is a direct copy from my old project and I know it works.
- (GLuint)textureFromPath:(NSString *)path
{
GLuint texture;
glGenTextures(1, &texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:path];
if (!img) {
NSLog(#"Image \"%#\" could not be loaded and was not bound", path);
return 0;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef context = CGBitmapContextCreate(image, width, height, 8, 4 * width, colourSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(context, bounds);
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGContextDrawImage(context, bounds, cgimage);
CGContextRelease(context);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
[img release];
free(image);
return texture;
}
I then take the generate texture from gl and assign it's postion in the array of the Renderer at 0. I also did this in my old project and worked fine too. So far so good, I feel.
The Application then tells it to startAnimation, which it then calls setFramebuffer, which within it calls createFramebuffer as framebuffer is undefined. It then notifies the Renderer (btw, Renderer is a C++ class) that it has created the framebuffers.
void bufferHasBeenCreated() const {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-160.0f, 160.0f, -240.0f, 240.0f, -5.0f, 1.0f);
glViewport(0, 0, 320, 480);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
}
It then calls the render on Renderer.
void render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.325f, 0.0f, 0.325f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static float rot = 0.0f;
glRotatef(rot, 0.0f, 0.0f, 1.0f);
//glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture_[0]);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
printf("Error. glError: 0x%04X\n", err);
glVertexPointer(2, GL_FLOAT, 0, pos[0]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[1]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[2]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
rot += 0.5f;
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
and then finally it then calls presentFramebuffer, which binds the renderBuffer and setup context.
Edit: I have done some more work on this, and it turns out it is something to do with the context and the buffers. Whenever I do just the context while enabling GL_TEXTURE_2D and GL_BLEND, as you do, the textures don't load. Yet do it when the buffers are loaded up and everything works.
I have got my texture to draw. I pulled all my code out and put it's own file. I will then start pulling it a part again and hopefully get everything working in the structure that I already have.
(Objective-C) ES1Renderer.h
#import <QuartzCore/QuartzCore.h>
#import "OpenGLES.h"
#interface ES1Renderer : UIView {
#private
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
GLuint viewFramebuffer, viewRenderbuffer;
GLuint texture[1];
BOOL animating;
BOOL displayLinkSupported;
NSInteger animationFrameInterval;
// Use of the CADisplayLink class is the preferred method for controlling your animation timing.
// CADisplayLink will link to the main display and fire every vsync when added to a given run-loop.
// The NSTimer class is used only as fallback when running on a pre 3.1 device where CADisplayLink
// isn't available.
id displayLink;
NSTimer *animationTimer;
}
#property (readonly, nonatomic, getter=isAnimating) BOOL animating;
#property (nonatomic) NSInteger animationFrameInterval;
- (void) startAnimation;
- (void) stopAnimation;
- (void)render;
#end
Next ES1Renderer.m
#import "ES1Renderer.h"
#implementation ES1Renderer
#synthesize animating;
#dynamic animationFrameInterval;
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context])
{
[self release];
return nil;
}
// Generate buffers
glGenFramebuffersOES(1, &viewFramebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Disable Depth
glDisable(GL_DEPTH_TEST);
// Load textures
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:#"colour" ofType:#"png"]];
if (!img) {
NSLog(#"Image \"colour.png\" could not be loaded and was not bound");
[self release];
return nil;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef imgContext = CGBitmapContextCreate(image,
width, height,
8, 4 * width, colourSpace,
kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(imgContext, bounds);
CGContextTranslateCTM (imgContext, 0, height);
CGContextScaleCTM (imgContext, 1.0, -1.0);
CGContextDrawImage(imgContext, bounds, cgimage);
CGContextRelease(imgContext);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
free(image);
[img release];
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView:(id)sender
{
[self render];
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
}
- (void) render
{
//glDisable(GL_TEXTURE_2D);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
static const float textureVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const float textureCoords[] = {
0.0f, 0.0f,
0.0f, 0.515625f,
0.12890625f, 0.0f,
0.12890625f, 0.515625f,
};
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_2D, texture[0]);
//glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
glVertexPointer(2, GL_FLOAT, 0, textureVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:context];
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer *)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
}
- (void) dealloc
{
// Tear down GL
if (viewFramebuffer)
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
}
if (viewRenderbuffer)
{
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
}
// Tear down context
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
[context release];
context = nil;
displayLink = nil;
animationTimer = nil;
[super dealloc];
}
- (NSInteger) animationFrameInterval
{
return animationFrameInterval;
}
- (void) setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void) startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView:)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView:) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
#end
There is only one problem with this code. It's out of date. Apple released a new way of doing things, but hell. It works.
Update:
It turns out I had set the context up before loading the textures.