I am using the following code to modify pixels of an image.
The issue I have is the color of the images are icorrect.
Do you have any idea of the issue?
CGImageRef inImage = img.CGImage;
CGContextRef ctx;
CFDataRef m_DataRef;
m_DataRef = CGDataProviderCopyData(CGImageGetDataProvider(inImage));
UInt8 * m_PixelBuf = (UInt8 *) CFDataGetBytePtr(m_DataRef);
// Byte tmpByte;
int length = CFDataGetLength(m_DataRef);
for (int index = 0; index < length; index += 4)
{
// DO Stuff
}
ctx = CGBitmapContextCreate(m_PixelBuf,
CGImageGetWidth( inImage ),
CGImageGetHeight( inImage ),
8,
CGImageGetBytesPerRow( inImage ),
CGImageGetColorSpace( inImage ),
kCGImageAlphaPremultipliedFirst );
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage* rawImage = [UIImage imageWithCGImage:imageRef];
CGContextRelease(ctx);
Try using kCGImageAlphaPremultipliedLast instead of kCGImageAlphaPremultipliedFirst.
Related
After using this function to crop image:
CGImageRef imageRef = CGImageCreateWithImageInRect([self.imageInput CGImage], rect);
UIImage *result = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
I bring "result" to use for process with pixel like that:
......
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
// Now your rawData contains the image data in the RGBA8888 pixel format.
int byteIndex = 0;
for (NSInteger i = 0 ; i < width * height; i++) {
int R = rawData[byteIndex];
int G = rawData[byteIndex + 1];
int B = rawData[byteIndex + 2];
int test = [self getValueof:0.3* R + 0.59* G + 0.11 * B inRange:0 to:255];
rawData[byteIndex] = (char)(test);
rawData[byteIndex + 1] = (char)(test);
rawData[byteIndex + 2] = (char)(test);
byteIndex += 4;
}
ctx = CGBitmapContextCreate(rawData,
CGImageGetWidth( imageRef ),
CGImageGetHeight( imageRef ),
8,
CGImageGetBytesPerRow( imageRef ),
CGImageGetColorSpace( imageRef ),
kCGImageAlphaPremultipliedLast );
imageRef = CGBitmapContextCreateImage (ctx);
UIImage* rawImage = [UIImage imageWithCGImage:imageRef];
CGContextRelease(ctx);
CGImageRelease(imageRef);
free(rawData);
I will have crash? Anyone know about it, please help me.
update crash report: : copy_read_only: vm_copy failed: status 1.
after a long time find solution for this crash. I got an experience:
- When process bitmap on iOS: if we don't notice orientation of image, maybe we got some strange crash.
With my crash above: I have fixed it very simple: change
UIImage *result = [UIImage imageWithCGImage:imageRef];
by
UIImage *result = [UIImage imageWithCGImage:imageRef scale:self.imageInput.scale orientation:self.imageInput.imageOrientation];
After using this function to crop image:
CGImageRef imageRef = CGImageCreateWithImageInRect([self.imageInput CGImage], rect);
UIImage *result = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
I bring "result" to use for process with pixel like that:
......
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
// Now your rawData contains the image data in the RGBA8888 pixel format.
int byteIndex = 0;
for (NSInteger i = 0 ; i < width * height; i++) {
int R = rawData[byteIndex];
int G = rawData[byteIndex + 1];
int B = rawData[byteIndex + 2];
int test = [self getValueof:0.3* R + 0.59* G + 0.11 * B inRange:0 to:255];
rawData[byteIndex] = (char)(test);
rawData[byteIndex + 1] = (char)(test);
rawData[byteIndex + 2] = (char)(test);
byteIndex += 4;
}
ctx = CGBitmapContextCreate(rawData,
CGImageGetWidth( imageRef ),
CGImageGetHeight( imageRef ),
8,
CGImageGetBytesPerRow( imageRef ),
CGImageGetColorSpace( imageRef ),
kCGImageAlphaPremultipliedLast );
imageRef = CGBitmapContextCreateImage (ctx);
UIImage* rawImage = [UIImage imageWithCGImage:imageRef];
CGContextRelease(ctx);
CGImageRelease(imageRef);
free(rawData);
I will have crash? Anyone know about it, please help me.
update crash report: : copy_read_only: vm_copy failed: status 1.
after a long time find solution for this crash. I got an experience:
- When process bitmap on iOS: if we don't notice orientation of image, maybe we got some strange crash.
With my crash above: I have fixed it very simple: change
UIImage *result = [UIImage imageWithCGImage:imageRef];
by
UIImage *result = [UIImage imageWithCGImage:imageRef scale:self.imageInput.scale orientation:self.imageInput.imageOrientation];
I'm using UIImagePickerController class. I have a button, when i click that button, i want image bright. I have searched online, but the code i found upon search is not working.
Here is the code i have found and used :
- (UIImage *) brightness:(UIImage *)image
{
int level= 200;//put here an int value that go from -255 to 255;
CGImageRef inImage = image.CGImage; //Input image cgi
CGContextRef ctx;
CFDataRef m_DataRef;
m_DataRef = CGDataProviderCopyData(CGImageGetDataProvider(inImage));
UInt8 * m_PixelBuf = (UInt8 *) CFDataGetBytePtr(m_DataRef);
int length = CFDataGetLength(m_DataRef);
CGImageGetBitsPerComponent(inImage), CGImageGetBitsPerPixel(inImage),CGImageGetBytesPerRow(inImage);
for (int index = 0; index < length; index += 4)
{
Byte tempR = m_PixelBuf[index + 1];
Byte tempG = m_PixelBuf[index + 2];
Byte tempB = m_PixelBuf[index + 3];
int outputRed = level + tempR;
int outputGreen = level + tempG;
int outputBlue = level + tempB;
if (outputRed>255) outputRed=255;
if (outputGreen>255) outputGreen=255;
if (outputBlue>255) outputBlue=255;
if (outputRed<0) outputRed=0;
if (outputGreen<0) outputGreen=0;
if (outputBlue<0) outputBlue=0;
m_PixelBuf[index + 1] = outputRed;
m_PixelBuf[index + 2] = outputGreen;
m_PixelBuf[index + 3] = outputBlue;
}
ctx = CGBitmapContextCreate(m_PixelBuf,
CGImageGetWidth( inImage ),
CGImageGetHeight( inImage ),
8,
CGImageGetBytesPerRow( inImage ),
CGImageGetColorSpace( inImage ),
kCGImageAlphaPremultipliedFirst );
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage* rawImage = [UIImage imageWithCGImage:imageRef];
CGContextRelease(ctx);
CGImageRelease(imageRef);
CFRelease(m_DataRef);
return rawImage;
}
Image is still at original image's brightness, it is not changing.
In general the idea looks good, probably there is an error with updating pixel data.
But to change brightness I suggest to use CoreGraphics instead of soft processing:
CGFloat brightness = 0.5;
UIGraphicsBeginImageContext(image.size);
CGRect imageRect = CGRectMake(0, 0, image.size.width, image.size.height);
CGContextRef context = UIGraphicsGetCurrentContext();
// Original image
[image drawInRect:imageRect];
// Brightness overlay
CGContextSetFillColorWithColor(context, [UIColor colorWithRed:1.0 green:1.0 blue:1.0 alpha:brightness].CGColor);
CGContextAddRect(context, imageRect);
CGContextFillPath(context);
UIImage* resultImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
You can change brightness overlay color to get proper results.
I have a code thet changes the pixels in UIImage. I need set pixels in touch places transparent and later after when I touch there again non-transparent. How can I do it???
Code
//It create new image with changed pixels
- (UIImage*)fromImage:(UIImage*)source
colorBuffer:(NSArray *)colors erase:(BOOL)eraser
{
CGContextRef ctx;
CGImageRef imageRef = [source CGImage];
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
byte *rawData = malloc(height * width * 4);
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
CGContextRef context = CGBitmapContextCreate(rawData, width, height,
bitsPerComponent, bytesPerRow, colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
int byteIndex = 0;
for (int ii = 0 ; ii < width * height ; ++ii)
{
if(eraser)
{
// SET PIXEL TRANSPARENT
}
else
{
//SET PIXEL NON-TRANSPARENT
}
}
ctx = CGBitmapContextCreate(rawData,
CGImageGetWidth( imageRef ),
CGImageGetHeight( imageRef ),
8,
bytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedLast );
CGColorSpaceRelease(colorSpace);
imageRef = CGBitmapContextCreateImage (ctx);
UIImage* rawImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGContextRelease(ctx);
free(rawData);
return rawImage;
}
I have a problem with cropping image using OpenCV library on a real iPhone.
I have an image with selected area and I'd like to apply a perspective transform on the image with this area. This works fine on the simulator but on the iphone the new image isn't map to rectangle and the new image has also blue color.
Here is my code :
+ (IplImage *)CreateIplImageFromUIImage:(UIImage *)image {
// Getting CGImage from UIImage
CGImageRef imageRef = image.CGImage;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Creating temporal IplImage for drawing
IplImage *iplimage = cvCreateImage(
cvSize(image.size.width,image.size.height), IPL_DEPTH_8U, 4
);
// Creating CGContext for temporal IplImage
CGContextRef contextRef = CGBitmapContextCreate(
iplimage->imageData, iplimage->width, iplimage->height,
iplimage->depth, iplimage->widthStep,
colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault
);
// Drawing CGImage to CGContext
CGContextDrawImage(
contextRef,
CGRectMake(0, 0, image.size.width, image.size.height),
imageRef
);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
// Creating result IplImage
IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3);
cvCvtColor(iplimage, ret, CV_RGBA2BGR);
cvReleaseImage(&iplimage);
return ret;
}
+ (UIImage *)UIImageFromIplImage:(IplImage *)img {
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Creating result IplImage
IplImage *image = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 4);
cvCvtColor(img, image, CV_BGR2RGBA);
// Allocating the buffer for CGImage
NSData *data =
[NSData dataWithBytes:image->imageData length:image->imageSize];
CGDataProviderRef provider =
CGDataProviderCreateWithCFData((CFDataRef)data);
// Creating CGImage from chunk of IplImage
CGImageRef imageRef = CGImageCreate(
image->width, image->height,
image->depth, image->depth * image->nChannels, image->widthStep,
colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault,
provider, NULL, false, kCGRenderingIntentDefault
);
// Getting UIImage from CGImage
UIImage *ret = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return ret;
}
+ (UIImage *)perspectiveTransform: (UIImage*) originalImage :(CGFloat) h :(CGFloat) w :(CGPoint) point1 :(CGPoint) point2 :(CGPoint) point3 :(CGPoint) point4
{
CvPoint2D32f srcQuad[4];
srcQuad[0].x = point1.x;
srcQuad[0].y = point1.y;
srcQuad[1].x = point2.x;
srcQuad[1].y = point2.y;
srcQuad[2].x = point3.x;
srcQuad[2].y = point3.y;
srcQuad[3].x = point4.x;
srcQuad[3].y = point4.y;
IplImage *src = [self CreateIplImageFromUIImage:originalImage];
IplImage *dst = cvCreateImage(cvGetSize(src),
src->depth,
src->nChannels);
cvZero(dst);
CGFloat width = src->width;
CGFloat height = src->height;
CvMat* mmat = cvCreateMat(3, 3, CV_32FC1);
CvPoint2D32f *c1 = (CvPoint2D32f *)malloc(4 * sizeof(CvPoint2D32f));
CvPoint2D32f *c2 = (CvPoint2D32f *)malloc(4 * sizeof(CvPoint2D32f));
c1[0].x = round((width/w)*srcQuad[0].x); c1[0].y = round((height/h)*srcQuad[0].y);
c1[1].x = round((width/w)*srcQuad[1].x); c1[1].y = round((height/h)*srcQuad[1].y);
c1[2].x = round((width/w)*srcQuad[2].x); c1[2].y = round((height/h)*srcQuad[2].y);
c1[3].x = round((width/w)*srcQuad[3].x); c1[3].y = round((height/h)*srcQuad[3].y);
c2[0].x = 0; c2[0].y = 0;
c2[1].x = width - 1; c2[1].y = 0;
c2[2].x = 0; c2[2].y = height - 1;
c2[3].x = width - 1; c2[3].y = height - 1;
mmat = cvGetPerspectiveTransform(c1, c2, mmat);
free(c1);
free(c2);
cvWarpPerspective(src, dst, mmat, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
cvReleaseImage(&src);
cvReleaseMat(&mmat);
UIImage *newImage = [self UIImageFromIplImage:dst];
cvReleaseImage(&dst);
return newImage;
}
Thanks for your help !