I have been 2 days trying to show an OpenGLES view over the camera View preview on the iPhone.
The camera preview alone, works.
The EAGLView(OpenGLES) alone, works.
The problem is when i try to place the EAGLView over the camera preview.
I am able to place both UIViews at the same time, but the camera preview is always over the EAGLView (wrong!). When i set alpha to 0.5 of the camera preview, i can see both UIViews just as i want, but both are blurred (it's normal).
I have tried [self.view bringSubviewToFront:(EAGLView)], but nothing changes.
The EAGLView is on the IB as a class.
The CameraView is added as a subview by code.
Here i put some code, i can upload more if you need it.
Thanks!!!
EAGLView
+ (Class)layerClass {
return [CAEAGLLayer class];
}
//The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
- (id)initWithCoder:(NSCoder*)coder {
puntosPintar=(GLfloat*)malloc(sizeof(GLfloat)*8);
puntosPintar[0] = -0.25f;
puntosPintar[1] = -1.22f;
puntosPintar[2] = -0.41f;
puntosPintar[3] = 0.0f;
puntosPintar[4] = 0.35f;
puntosPintar[5] = -1.69f;
puntosPintar[6] = 0.15f;
puntosPintar[7] = 0.0f;
if ((self = [super initWithCoder:coder])) {
// Get the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = NO;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
}
return self;
}
- (void)drawView {
const GLubyte squareColors[] = {
255, 255, 0, 255,
0, 255, 255, 255,
0, 0, 0, 0,
255, 0, 255, 255,
};
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-1.0f, 1.0f, -1.5f, 1.5f, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClear(GL_COLOR_BUFFER_BIT);
glVertexPointer(2, GL_FLOAT, 0, puntosPintar);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_UNSIGNED_BYTE, 0, squareColors);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 8);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext:context];
[self destroyFramebuffer];
[self createFramebuffer];
[self drawView];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
UIViewController where i want to show both
Load of the view of the Camera
[CameraImageHelper startRunning];
UIView *fafa;
fafa= [[UIView alloc]initWithFrame:self.view.bounds]; //returns a UIView with the cameraview as a layer of that view. It works well (checked)
fafa = [CameraImageHelper previewWithBounds:self.view.bounds];
fafa.alpha=0.5; //Only way to show both
[self.view addSubview:fafa];
[self.view bringSubviewToFront:fafa];
Load of the EAGLView
On the .h i have created
IBOutlet EAGLView *openGLVista
In the view did load:
openGLVista=[[EAGLView alloc]init];
CameraImageHelper.h
#interface CameraImageHelper : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureSession *session;
}
#property (retain) AVCaptureSession *session;
+ (void) startRunning;
+ (void) stopRunning;
+ (UIView *) previewWithBounds: (CGRect) bounds;
#end
CameraImageHelper.m
- (void) initialize
{
NSError *error;
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:&error];
if (!captureInput)
{
NSLog(#"Error: %#", error);
return;
}
self.session = [[[AVCaptureSession alloc] init] autorelease];
[self.session addInput:captureInput];
}
- (id) init
{
if (self = [super init]) [self initialize];
return self;
}
- (UIView *) previewWithBounds: (CGRect) bounds
{
UIView *view = [[[UIView alloc] initWithFrame:bounds] autorelease];
AVCaptureVideoPreviewLayer *preview = [AVCaptureVideoPreviewLayer layerWithSession: self.session];
preview.frame = bounds;
preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
[view.layer addSublayer: preview];
return view;
}
- (void) dealloc
{
self.session = nil;
[super dealloc];
}
#pragma mark Class Interface
+ (id) sharedInstance // private
{
if(!sharedInstance) sharedInstance = [[self alloc] init];
return sharedInstance;
}
+ (void) startRunning
{
[[[self sharedInstance] session] startRunning];
}
+ (void) stopRunning
{
[[[self sharedInstance] session] stopRunning];
}
+ (UIView *) previewWithBounds: (CGRect) bounds
{
return [[self sharedInstance] previewWithBounds: (CGRect) bounds];
}
#end
I see that in IB you are using the EAGLView as the view of the view controller, and in the code snippet you add the preview view as a subview of that view. In other words, your view hierarchy looks something like this:
*- EAGLView
+- Preview view
Thus, Preview view is always on top of the EAGLView because it is a subview of the EAGLView. If you want to be able to display either one on top of the other, you will instead have to lay things out like this:
*- some generic UIView
+- EAGLView
+- Preview view
In other words, in IB you should have a generic UIView bound to the view property, and then drag the EAGLView so it is inside that generic UIView. Then your code for adding the preview view should work right.
BTW, this does not do what you seem to think:
fafa= [[UIView alloc]initWithFrame:self.view.bounds]; //returns a UIView with the cameraview as a layer of that view. It works well (checked)
fafa = [CameraImageHelper previewWithBounds:self.view.bounds];
The first line creates a generic UIView. Then the second throws it away (leaking the memory!), replacing it with the preview view. You should just delete the first line.
Related
Hey guys, I am writing a custom view for doing OpenGLES rendering. I got it working when I had a view in interface builder and set the class to my rendering view, but now I switched to creating the view with initWithFrame. (Note I can set the background color of the view and see it I just can't render anything in OpenGL not even the clear color) Everything seems to be getting called and the layer class seems ok too it just for some reason does not work outside of directly creating it with InterfaceBuilder. Any Ideas?
I have this setup code:
+ (Class) layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithCoder:(NSCoder*)coder
{
if (self = [super initWithCoder:coder])
{
[self setupView];
}
return self;
}
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame])
{
[self setupView];
}
return self;
}
- (void)setupView
{
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_context || ![EAGLContext setCurrentContext:_context])
{
NSLog(#"Error could not set context");
[self release];
}
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(renderView:)];
_displayLink.paused = YES;
_displayLink.frameInterval = FPS;
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];
[EAGLContext setCurrentContext:_context];
[self setup];
}
And then some layout code.
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:_context];
[self destroyBuffers];
if (![self createBuffers]){
NSLog(#"Failed to create framebuffer!");
}
[self resumeRendering];
}
And some rendering code:
- (void)renderView:(CADisplayLink*)sender
{
[EAGLContext setCurrentContext:_context];
glBindFramebuffer(GL_FRAMEBUFFER, _viewFrameBuffer);
glClearColor(0.5, 0.5, 0.5, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
GLenum attachments[] = {GL_DEPTH_ATTACHMENT, GL_COLOR_ATTACHMENT0};
glDiscardFramebufferEXT(GL_READ_FRAMEBUFFER_APPLE, 2, attachments);
glBindRenderbuffer(GL_RENDERBUFFER, _viewRenderBuffer);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
I can't figure out why nothing is rendering as all the same things are getting called. If anyone can help me out that would be awesome!
I maybe speculating too much here but switching from IB to initWithFrame has a step omitted, that is loadView in your viewController, which is where you would instantiate your custom view class instead of a standard view class.
I have a strange problem. Sometimes it happens, sometimes it doesn't.
I have an array of GLfloat with 8 items. When EAGLView draw it, it shows the two lines of the array, but also an additional line.
EAGLView.m
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (id)initWithCoder:(NSCoder*)coder {
if ((self = [super initWithCoder:coder])) {
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = NO;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
}
return self;
}
- (void)drawView {
int numero=malloc_size(puntosPintar)/sizeof(GLfloat);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glClear(GL_COLOR_BUFFER_BIT);
glVertexPointer(2, GL_FLOAT, 0, puntosPintar);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(1,1,1,1);
glDrawArrays(GL_LINES, 0, numero);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
free(puntosPintar);
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext:context];
[self destroyFramebuffer];
[self createFramebuffer];
[self drawView];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void)destroyFramebuffer {
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer) {
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
- (void)dealloc {
if ([EAGLContext currentContext] == context) {
[EAGLContext setCurrentContext:nil];
}
[context release];
[super dealloc];
}
#end
puntosPintar
puntosPintar=(GLfloat*)malloc(sizeof(GLfloat)*8);
puntosPintar[0] = -0.25f;
puntosPintar[1] = -1.49f;
puntosPintar[2] = -0.1f;
puntosPintar[3] = 0.0f;
puntosPintar[4] = 0.25f;
puntosPintar[5] = -1.49f;
puntosPintar[6] = 0.1f;
puntosPintar[7] = 0.0f;
You should be passing '4' into the count argument of glDrawArrays, not 8 (this is the count of vertexes, not the count of floats).
For some reason my texture are not drawing, even though my code looks exactly the same as an old project that did. So far, the vertexes and TexCoords look fine, as I am having white squares being drawn, where the texture should be drawn instead.
The process so far goes,
I load up a Contoller and in loadView, I
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
Then my renderer is loaded up, which does nothing on construction. After that I load up my Texture into gl. This code is a direct copy from my old project and I know it works.
- (GLuint)textureFromPath:(NSString *)path
{
GLuint texture;
glGenTextures(1, &texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:path];
if (!img) {
NSLog(#"Image \"%#\" could not be loaded and was not bound", path);
return 0;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef context = CGBitmapContextCreate(image, width, height, 8, 4 * width, colourSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(context, bounds);
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGContextDrawImage(context, bounds, cgimage);
CGContextRelease(context);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
[img release];
free(image);
return texture;
}
I then take the generate texture from gl and assign it's postion in the array of the Renderer at 0. I also did this in my old project and worked fine too. So far so good, I feel.
The Application then tells it to startAnimation, which it then calls setFramebuffer, which within it calls createFramebuffer as framebuffer is undefined. It then notifies the Renderer (btw, Renderer is a C++ class) that it has created the framebuffers.
void bufferHasBeenCreated() const {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(-160.0f, 160.0f, -240.0f, 240.0f, -5.0f, 1.0f);
glViewport(0, 0, 320, 480);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
}
It then calls the render on Renderer.
void render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.325f, 0.0f, 0.325f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static float rot = 0.0f;
glRotatef(rot, 0.0f, 0.0f, 1.0f);
//glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture_[0]);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
printf("Error. glError: 0x%04X\n", err);
glVertexPointer(2, GL_FLOAT, 0, pos[0]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[1]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glVertexPointer(2, GL_FLOAT, 0, pos[2]);
glTexCoordPointer(2, GL_FLOAT, 0, black);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
rot += 0.5f;
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
and then finally it then calls presentFramebuffer, which binds the renderBuffer and setup context.
Edit: I have done some more work on this, and it turns out it is something to do with the context and the buffers. Whenever I do just the context while enabling GL_TEXTURE_2D and GL_BLEND, as you do, the textures don't load. Yet do it when the buffers are loaded up and everything works.
I have got my texture to draw. I pulled all my code out and put it's own file. I will then start pulling it a part again and hopefully get everything working in the structure that I already have.
(Objective-C) ES1Renderer.h
#import <QuartzCore/QuartzCore.h>
#import "OpenGLES.h"
#interface ES1Renderer : UIView {
#private
GLint backingWidth;
GLint backingHeight;
EAGLContext *context;
GLuint viewFramebuffer, viewRenderbuffer;
GLuint texture[1];
BOOL animating;
BOOL displayLinkSupported;
NSInteger animationFrameInterval;
// Use of the CADisplayLink class is the preferred method for controlling your animation timing.
// CADisplayLink will link to the main display and fire every vsync when added to a given run-loop.
// The NSTimer class is used only as fallback when running on a pre 3.1 device where CADisplayLink
// isn't available.
id displayLink;
NSTimer *animationTimer;
}
#property (readonly, nonatomic, getter=isAnimating) BOOL animating;
#property (nonatomic) NSInteger animationFrameInterval;
- (void) startAnimation;
- (void) stopAnimation;
- (void)render;
#end
Next ES1Renderer.m
#import "ES1Renderer.h"
#implementation ES1Renderer
#synthesize animating;
#dynamic animationFrameInterval;
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context])
{
[self release];
return nil;
}
// Generate buffers
glGenFramebuffersOES(1, &viewFramebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Disable Depth
glDisable(GL_DEPTH_TEST);
// Load textures
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glGenTextures(1, texture);
UIImage *img = [[UIImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:#"colour" ofType:#"png"]];
if (!img) {
NSLog(#"Image \"colour.png\" could not be loaded and was not bound");
[self release];
return nil;
}
CGImageRef cgimage = img.CGImage;
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
CGRect bounds = CGRectMake(0, 0, width, height);
CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
void *image = malloc(width * height * 4);
CGContextRef imgContext = CGBitmapContextCreate(image,
width, height,
8, 4 * width, colourSpace,
kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colourSpace);
CGContextClearRect(imgContext, bounds);
CGContextTranslateCTM (imgContext, 0, height);
CGContextScaleCTM (imgContext, 1.0, -1.0);
CGContextDrawImage(imgContext, bounds, cgimage);
CGContextRelease(imgContext);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
free(image);
[img release];
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView:(id)sender
{
[self render];
GLenum err = glGetError();
if (err != GL_NO_ERROR)
NSLog(#"Error. glError: 0x%04X\n", err);
}
- (void) render
{
//glDisable(GL_TEXTURE_2D);
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
static const float textureVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const float textureCoords[] = {
0.0f, 0.0f,
0.0f, 0.515625f,
0.12890625f, 0.0f,
0.12890625f, 0.515625f,
};
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_2D, texture[0]);
//glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
glVertexPointer(2, GL_FLOAT, 0, textureVertices);
glTexCoordPointer(2, GL_FLOAT, 0, textureCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews
{
[EAGLContext setCurrentContext:context];
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer *)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
}
- (void) dealloc
{
// Tear down GL
if (viewFramebuffer)
{
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
}
if (viewRenderbuffer)
{
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
}
// Tear down context
if ([EAGLContext currentContext] == context)
[EAGLContext setCurrentContext:nil];
[context release];
context = nil;
displayLink = nil;
animationTimer = nil;
[super dealloc];
}
- (NSInteger) animationFrameInterval
{
return animationFrameInterval;
}
- (void) setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void) startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView:)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView:) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
#end
There is only one problem with this code. It's out of date. Apple released a new way of doing things, but hell. It works.
Update:
It turns out I had set the context up before loading the textures.
I'm new to OpenGL ES and having a simple kind of problem in my project. I've successfully drawn a square on my screen but the problem is that my "drawView" function in my EAGLView is
only called once however I wrote code to call it again n again as in Xcode's OpenGL ES Template.
Do anyone have clue where's the mistake?
Following is the code of my EAGLView.m file:
#import <QuartzCore/QuartzCore.h>
#import <OpenGLES/EAGLDrawable.h>
#import "EAGLView.h"
#define USE_DEPTH_BUFFER 0
// A class extension to declare private methods
#interface EAGLView ()
#property (nonatomic, retain) EAGLContext *context;
- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
#end
#implementation EAGLView
#synthesize context;
#synthesize animating;
#dynamic animationFrameInterval;
// You must implement this method
+ (Class)layerClass {
return [CAEAGLLayer class];
}
//The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
- (id)initWithCoder:(NSCoder*)coder {
if ((self = [super initWithCoder:coder])) {
// Get the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
animating = FALSE;
displayLinkSupported = FALSE;
animationFrameInterval = 1;
displayLink = nil;
animationTimer = nil;
// A system version of 3.1 or greater is required to use CADisplayLink. The NSTimer
// class is used as fallback when it isn't available.
NSString *reqSysVer = #"3.1";
NSString *currSysVer = [[UIDevice currentDevice] systemVersion];
if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)
displayLinkSupported = TRUE;
}
return self;
}
- (void)drawView {
NSLog(#"In EAGLView's drawView func");
static const GLfloat squareVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const GLubyte squareColors[] = {
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
0, 0, 0, 0,
};
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glRotatef(10.0f, 0.0f, 0.0f, 1.0f);
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_UNSIGNED_BYTE, 0, squareColors);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext:context];
[self destroyFramebuffer];
[self createFramebuffer];
[self drawView];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void)destroyFramebuffer {
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer) {
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
- (NSInteger)animationFrameInterval
{
return animationFrameInterval;
}
- (void)setAnimationFrameInterval:(NSInteger)frameInterval
{
// Frame interval defines how many display frames must pass between each time the
// display link fires. The display link will only fire 30 times a second when the
// frame internal is two on a display that refreshes 60 times a second. The default
// frame interval setting of one will fire 60 times a second when the display refreshes
// at 60 times a second. A frame interval setting of less than one results in undefined
// behavior.
if (frameInterval >= 1)
{
animationFrameInterval = frameInterval;
if (animating)
{
[self stopAnimation];
[self startAnimation];
}
}
}
- (void)startAnimation
{
if (!animating)
{
if (displayLinkSupported)
{
// CADisplayLink is API new to iPhone SDK 3.1. Compiling against earlier versions will result in a warning, but can be dismissed
// if the system version runtime check for CADisplayLink exists in -initWithCoder:. The runtime check ensures this code will
// not be called in system versions earlier than 3.1.
displayLink = [NSClassFromString(#"CADisplayLink") displayLinkWithTarget:self selector:#selector(drawView)];
[displayLink setFrameInterval:animationFrameInterval];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
else
animationTimer = [NSTimer scheduledTimerWithTimeInterval:(NSTimeInterval)((1.0 / 60.0) * animationFrameInterval) target:self selector:#selector(drawView) userInfo:nil repeats:TRUE];
animating = TRUE;
}
}
- (void)stopAnimation
{
if (animating)
{
if (displayLinkSupported)
{
[displayLink invalidate];
displayLink = nil;
}
else
{
[animationTimer invalidate];
animationTimer = nil;
}
animating = FALSE;
}
}
- (void)dealloc {
if ([EAGLContext currentContext] == context) {
[EAGLContext setCurrentContext:nil];
}
[context release];
[super dealloc];
}
#end
Check to see if your method:
- (void)startAnimation
is actually being called.
I'm using OpenGL ES in my iPhone application and sometimes during startup the screen goes purple or black - in one case from twenty. This happens only during initialization and if the screen became black or purple it will remain in such a color - only restarting the application helps.
Also I found out that when this bug happens, application is running (I can see the game loop working through console), but the iPhone stops responding to touches (touchesBegan method is not being invoked).
Here's my code:
#define USE_DEPTH_BUFFER FALSE
#implementation EAGLView
#synthesize context;
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (id)initWithFrame: (CGRect)frame {
NSLog(#"init EAGLView");
if ( (self = [super initWithFrame: frame]) ) {
NSLog(#"initializing CAEAGLLayer and EAGLContext");
// Get the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
NSLog(#"!context || ![EAGLContext setCurrentContext:context]");
[self release];
return nil;
}
}
return self;
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext: context];
[self destroyFramebuffer];
[self createFramebuffer];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void)destroyFramebuffer {
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer) {
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
-(void) startDrawing: (GLfloat) viewWidth andHeight: (GLfloat) viewHeight
{
[EAGLContext setCurrentContext: context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glRotatef(-90.0f, 0.0f , 0.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
}
-(void) endDrawing
{
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer: GL_RENDERBUFFER_OES];
}
- (void)dealloc {
if ([EAGLContext currentContext] == context) {
[EAGLContext setCurrentContext:nil];
}
[context release];
[super dealloc];
}
...touches processing...
#end
What may be the problem and how can I fix it?
Thank you in advance,
Ilya.
That sounds similar to a problem I had on another phone.
What happened there was that sometimes we would get "out of memory" when trying to allocate a texture and from then on, OpenGL would simply not render, with the non-visual part of the application still running normally.
Make sure to call glGetError often and trace a message / break every time it reports something else than GL_NO_ERROR. (We have a macro for that, so that, while debugging, we call glGetError after every OpenGL call.)