Flutter image_picker choose video - flutter

I am successfully using the Flutter Plugin Image_picker to choose images so that I can use them for upload, display, etc... I wondered if anyone had any guidance on how to modify this plugin to also see videos and allow them to be chosen and use for upload, etc...
Looking for iOS and Android modifications if anyone has guidance on how to proceed or example code. I have made some progress but still need to get the camera to save video and be able to present. I will post the code changes so far. I have it selecting a video, but it will not present back to the app.
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import UIKit;
#import <MobileCoreServices/MobileCoreServices.h>
#import "ImagePickerPlugin.h"
#interface ImagePickerPlugin ()<UINavigationControllerDelegate, UIImagePickerControllerDelegate>
#end
static const int SOURCE_ASK_USER = 0;
static const int SOURCE_CAMERA = 1;
static const int SOURCE_GALLERY = 2;
#implementation ImagePickerPlugin {
FlutterResult _result;
NSDictionary *_arguments;
UIImagePickerController *_imagePickerController;
UIViewController *_viewController;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
FlutterMethodChannel *channel =
[FlutterMethodChannel methodChannelWithName:#"image_picker"
binaryMessenger:[registrar messenger]];
UIViewController *viewController =
[UIApplication sharedApplication].delegate.window.rootViewController;
ImagePickerPlugin *instance = [[ImagePickerPlugin alloc] initWithViewController:viewController];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithViewController:(UIViewController *)viewController {
self = [super init];
if (self) {
_viewController = viewController;
_imagePickerController = [[UIImagePickerController alloc] init];
}
return self;
}
- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
if (_result) {
_result([FlutterError errorWithCode:#"multiple_request"
message:#"Cancelled by a second request"
details:nil]);
_result = nil;
}
if ([#"pickImage" isEqualToString:call.method]) {
_imagePickerController.modalPresentationStyle = UIModalPresentationCurrentContext;
_imagePickerController.delegate = self;
_result = result;
_arguments = call.arguments;
int imageSource = [[_arguments objectForKey:#"source"] intValue];
switch (imageSource) {
case SOURCE_ASK_USER:
[self showImageSourceSelector];
break;
case SOURCE_CAMERA:
[self showCamera];
break;
case SOURCE_GALLERY:
[self showPhotoLibrary];
break;
default:
result([FlutterError errorWithCode:#"invalid_source"
message:#"Invalid image source."
details:nil]);
break;
}
} else {
result(FlutterMethodNotImplemented);
}
}
- (void)showImageSourceSelector {
UIAlertControllerStyle style = UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad
? UIAlertControllerStyleAlert
: UIAlertControllerStyleActionSheet;
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil message:nil preferredStyle:style];
UIAlertAction *camera = [UIAlertAction actionWithTitle:#"Take Photo"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showCamera];
}];
UIAlertAction *library = [UIAlertAction actionWithTitle:#"Choose Photo"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showPhotoLibrary];
}];
UIAlertAction *cancel =
[UIAlertAction actionWithTitle:#"Cancel" style:UIAlertActionStyleCancel handler:nil];
[alert addAction:camera];
[alert addAction:library];
[alert addAction:cancel];
[_viewController presentViewController:alert animated:YES completion:nil];
}
- (void)showCamera {
// Camera is not available on simulators
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
_imagePickerController.sourceType = UIImagePickerControllerCameraCaptureModeVideo;
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
} else {
[[[UIAlertView alloc] initWithTitle:#"Error"
message:#"Camera not available."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
}
}
- (void)showPhotoLibrary {
// No need to check if SourceType is available. It always is.
//_imagePickerController.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
_imagePickerController.mediaTypes =[UIImagePickerController availableMediaTypesForSourceType:_imagePickerController.sourceType];
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
}
- (void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary<NSString *, id> *)info {
[_imagePickerController dismissViewControllerAnimated:YES completion:nil];
UIImage *image = [info objectForKey:UIImagePickerControllerEditedImage];
NSURL *videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
if (image == nil) {
image = [info objectForKey:UIImagePickerControllerOriginalImage];
} else {
image = [self normalizedImage:image];
}
if (videoURL == nil) {
} else {
//image = videoURL;
}
NSNumber *maxWidth = [_arguments objectForKey:#"maxWidth"];
NSNumber *maxHeight = [_arguments objectForKey:#"maxHeight"];
if (maxWidth != (id)[NSNull null] || maxHeight != (id)[NSNull null]) {
image = [self scaledImage:image maxWidth:maxWidth maxHeight:maxHeight];
}
NSData *data = UIImageJPEGRepresentation(image, 1.0);
NSString *tmpDirectory = NSTemporaryDirectory();
NSString *guid = [[NSProcessInfo processInfo] globallyUniqueString];
// TODO(jackson): Using the cache directory might be better than temporary
// directory.
NSString *tmpFile = [NSString stringWithFormat:#"image_picker_%#.jpg", guid];
NSString *tmpPath = [tmpDirectory stringByAppendingPathComponent:tmpFile];
if ([[NSFileManager defaultManager] createFileAtPath:tmpPath contents:data attributes:nil]) {
_result(tmpPath);
} else {
_result([FlutterError errorWithCode:#"create_error"
message:#"Temporary file could not be created"
details:nil]);
}
_result = nil;
_arguments = nil;
}
// The way we save images to the tmp dir currently throws away all EXIF data
// (including the orientation of the image). That means, pics taken in portrait
// will not be orientated correctly as is. To avoid that, we rotate the actual
// image data.
// TODO(goderbauer): investigate how to preserve EXIF data.
- (UIImage *)normalizedImage:(UIImage *)image {
if (image.imageOrientation == UIImageOrientationUp) return image;
UIGraphicsBeginImageContextWithOptions(image.size, NO, image.scale);
[image drawInRect:(CGRect){0, 0, image.size}];
UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return normalizedImage;
}
- (UIImage *)scaledImage:(UIImage *)image
maxWidth:(NSNumber *)maxWidth
maxHeight:(NSNumber *)maxHeight {
double originalWidth = image.size.width;
double originalHeight = image.size.height;
bool hasMaxWidth = maxWidth != (id)[NSNull null];
bool hasMaxHeight = maxHeight != (id)[NSNull null];
double width = hasMaxWidth ? MIN([maxWidth doubleValue], originalWidth) : originalWidth;
double height = hasMaxHeight ? MIN([maxHeight doubleValue], originalHeight) : originalHeight;
bool shouldDownscaleWidth = hasMaxWidth && [maxWidth doubleValue] < originalWidth;
bool shouldDownscaleHeight = hasMaxHeight && [maxHeight doubleValue] < originalHeight;
bool shouldDownscale = shouldDownscaleWidth || shouldDownscaleHeight;
if (shouldDownscale) {
double downscaledWidth = (height / originalHeight) * originalWidth;
double downscaledHeight = (width / originalWidth) * originalHeight;
if (width < height) {
if (!hasMaxWidth) {
width = downscaledWidth;
} else {
height = downscaledHeight;
}
} else if (height < width) {
if (!hasMaxHeight) {
height = downscaledHeight;
} else {
width = downscaledWidth;
}
} else {
if (originalWidth < originalHeight) {
width = downscaledWidth;
} else if (originalHeight < originalWidth) {
height = downscaledHeight;
}
}
}
UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), NO, 1.0);
[image drawInRect:CGRectMake(0, 0, width, height)];
UIImage *scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
#end
Thanks

Here is the IOS code that I have completed, I am still working on the Android if anyone wants to assist, I will post where I am at so far. This code replaces what is in the .m file of the IOS folder, no other changes are needed for this to work to pick and capture video along with images. You must figure out how to display the selected video/image in your app, but that's however you want to handle it. Again let me know if you want to assist with finishing he Android side.
#import UIKit;
#import <MobileCoreServices/MobileCoreServices.h>
#import "MediaPickerPlugin.h"
#interface MediaPickerPlugin ()<UINavigationControllerDelegate, UIImagePickerControllerDelegate>
#end
static const int SOURCE_ASK_USER = 0;
//static const int SOURCE_CAMERA = 0;
//static const int SOURCE_GALLERY = 0;
#implementation MediaPickerPlugin {
FlutterResult _result;
NSDictionary *_arguments;
UIImagePickerController *_imagePickerController;
UIViewController *_viewController;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
FlutterMethodChannel *channel =
[FlutterMethodChannel methodChannelWithName:#"media_picker"
binaryMessenger:[registrar messenger]];
UIViewController *viewController =
[UIApplication sharedApplication].delegate.window.rootViewController;
MediaPickerPlugin *instance =
[[MediaPickerPlugin alloc] initWithViewController:viewController];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithViewController:(UIViewController *)viewController {
self = [super init];
if (self) {
_viewController = viewController;
_imagePickerController = [[UIImagePickerController alloc] init];
}
return self;
}
- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
if (_result) {
_result([FlutterError errorWithCode:#"multiple_request"
message:#"Cancelled by a second request"
details:nil]);
_result = nil;
_arguments = nil;
}
if ([#"pickImage" isEqualToString:call.method]) {
_imagePickerController.modalPresentationStyle = UIModalPresentationCurrentContext;
_imagePickerController.delegate = self;
_result = result;
_arguments = call.arguments;
int imageSource = [[_arguments objectForKey:#"source"] intValue];
switch (imageSource) {
case SOURCE_ASK_USER:
[self showImageSourceSelector];
break;
default:
result([FlutterError errorWithCode:#"invalid_source"
message:#"Invalid image source."
details:nil]);
break;
}
} else {
result(FlutterMethodNotImplemented);
}
}
- (void)showImageSourceSelector {
UIAlertControllerStyle style = UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad
? UIAlertControllerStyleAlert
: UIAlertControllerStyleActionSheet;
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil message:nil preferredStyle:style];
UIAlertAction *camera = [UIAlertAction actionWithTitle:#"Camera"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showCamera];
}];
UIAlertAction *library = [UIAlertAction actionWithTitle:#"Gallery"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showPhotoLibrary];
}];
UIAlertAction *cancel =
[UIAlertAction actionWithTitle:#"Cancel" style:UIAlertActionStyleCancel handler:nil];
[alert addAction:camera];
[alert addAction:library];
[alert addAction:cancel];
[_viewController presentViewController:alert animated:YES completion:nil];
}
- (void)showCamera {
// Camera is not available on simulators
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
_imagePickerController.sourceType = UIImagePickerControllerSourceTypeCamera;
_imagePickerController.mediaTypes = [NSArray arrayWithObjects:#"public.movie", #"public.image", nil];
_imagePickerController.delegate = self;
_imagePickerController.restoresFocusAfterTransition = false;
_imagePickerController.allowsEditing = NO;
_imagePickerController.videoQuality = UIImagePickerControllerQualityTypeLow;
_imagePickerController.videoMaximumDuration = 30.0f; // 30 seconds
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
} else {
[[[UIAlertView alloc] initWithTitle:#"Error"
message:#"Camera not available."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
}
}
- (void)showPhotoLibrary {
// No need to check if SourceType is available. It always is.
_imagePickerController.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
_imagePickerController.mediaTypes = [NSArray arrayWithObjects:#"public.movie", #"public.image", nil];
//_imagePickerController.mediaTypes =[UIImagePickerController availableMediaTypesForSourceType:_imagePickerController.sourceType];
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
}
- (void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary<NSString *, id> *)info {
[_imagePickerController dismissViewControllerAnimated:YES completion:nil];
NSString *mediaType = [info objectForKey:UIImagePickerControllerMediaType];
if ([mediaType isEqualToString:#"public.movie"]) {
NSURL *videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
NSString *videoString = [videoURL absoluteString];
NSLog(#"Video File:%#", videoString);
_result(videoString);
} else {
UIImage *image = [info objectForKey:UIImagePickerControllerEditedImage];
if (image == nil) {
image = [info objectForKey:UIImagePickerControllerOriginalImage];
}
image = [self normalizedImage:image];
NSNumber *maxWidth = [_arguments objectForKey:#"maxWidth"];
NSNumber *maxHeight = [_arguments objectForKey:#"maxHeight"];
if (maxWidth != (id)[NSNull null] || maxHeight != (id)[NSNull null]) {
image = [self scaledImage:image maxWidth:maxWidth maxHeight:maxHeight];
}
NSData *data = UIImageJPEGRepresentation(image, 1.0);
NSString *tmpDirectory = NSTemporaryDirectory();
NSString *guid = [[NSProcessInfo processInfo] globallyUniqueString];
// TODO(jackson): Using the cache directory might be better than temporary
// directory.
NSString *tmpFile = [NSString stringWithFormat:#"image_picker_%#.jpg", guid];
NSString *tmpPath = [tmpDirectory stringByAppendingPathComponent:tmpFile];
NSLog(#"Image File:%#", tmpPath);
if ([[NSFileManager defaultManager] createFileAtPath:tmpPath contents:data attributes:nil]) {
_result(tmpPath);
} else {
_result([FlutterError errorWithCode:#"create_error"
message:#"Temporary file could not be created"
details:nil]);
}
_result = nil;
_arguments = nil;
}
_result = nil;
_arguments = nil;
}
// The way we save images to the tmp dir currently throws away all EXIF data
// (including the orientation of the image). That means, pics taken in portrait
// will not be orientated correctly as is. To avoid that, we rotate the actual
// image data.
// TODO(goderbauer): investigate how to preserve EXIF data.
- (UIImage *)normalizedImage:(UIImage *)image {
if (image.imageOrientation == UIImageOrientationUp) return image;
UIGraphicsBeginImageContextWithOptions(image.size, NO, image.scale);
[image drawInRect:(CGRect){0, 0, image.size}];
UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return normalizedImage;
}
//- (NSString *)normalVideo:(NSURL *)videoURL {
// NSString *normalVideo = UIImagePickerControllerMediaURL;
// return normalVideo;
//}
- (UIImage *)scaledImage:(UIImage *)image
maxWidth:(NSNumber *)maxWidth
maxHeight:(NSNumber *)maxHeight {
double originalWidth = image.size.width;
double originalHeight = image.size.height;
bool hasMaxWidth = maxWidth != (id)[NSNull null];
bool hasMaxHeight = maxHeight != (id)[NSNull null];
double width = hasMaxWidth ? MIN([maxWidth doubleValue], originalWidth) : originalWidth;
double height = hasMaxHeight ? MIN([maxHeight doubleValue], originalHeight) : originalHeight;
bool shouldDownscaleWidth = hasMaxWidth && [maxWidth doubleValue] < originalWidth;
bool shouldDownscaleHeight = hasMaxHeight && [maxHeight doubleValue] < originalHeight;
bool shouldDownscale = shouldDownscaleWidth || shouldDownscaleHeight;
if (shouldDownscale) {
double downscaledWidth = (height / originalHeight) * originalWidth;
double downscaledHeight = (width / originalWidth) * originalHeight;
if (width < height) {
if (!hasMaxWidth) {
width = downscaledWidth;
} else {
height = downscaledHeight;
}
} else if (height < width) {
if (!hasMaxHeight) {
height = downscaledHeight;
} else {
width = downscaledWidth;
}
} else {
if (originalWidth < originalHeight) {
width = downscaledWidth;
} else if (originalHeight < originalWidth) {
height = downscaledHeight;
}
}
}
UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), NO, 1.0);
[image drawInRect:CGRectMake(0, 0, width, height)];
UIImage *scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
#end

I would probably add another method pickVideo to image_picker.dart, and then add the corresponding Android and iOS implementations of that in imagePickerPlugin.m and ImagePickerPlugin.java.
Inside the latter two, I would use the iOS and Android APIs for videos, e.g. something like this on iOS:
Objective c - ios : How to pick video from Camera Roll?

as of version 0.4.2, the plugin allows video to be selected
Added support for picking videos. Updated example app to show video
preview.

You can do that now using pickVideo available in image_picker
final _picker = ImagePicker();
PickedFile video = await _picker.getVideo(...)
...
Ref - https://pub.dev/packages/image_picker

you can use the image picker for recording video, and chewie library to show a video via video controller.
for more reference use this video link -
https://www.youtube.com/watch?time_continue=17&v=XSn5EwWBG-4&feature=emb_logo

Related

Printing on iOS

Hi there can any one help me with this? I am new to iOS development. I am trying to implement a print feature, but I am getting errors in it. I see nothing but .xib file with some labels. The textview is there, I just want to print that view... when i pressed the print button the program is crashing..
Here is my code:
- (NSData *)generatePDFDataForPrinting {
NSMutableData *myPdfData = [NSMutableData data];
UIGraphicsBeginPDFContextToData(myPdfData, kPDFPageBounds, nil);
UIGraphicsBeginPDFPage();
CGContextRef ctx = UIGraphicsGetCurrentContext();
[self drawStuffInContext:ctx]; // Method also usable from drawRect:.
UIGraphicsEndPDFContext();
return myPdfData;
}
- (void)drawStuffInContext:(CGContextRef)ctx {
UIFont *font = [UIFont fontWithName:#"Zapfino" size:48];
CGRect textRect = CGRectInset(kPDFPageBounds, 36, 36);
[#"hello world!" drawInRect:textRect withFont:font];
}
- (IBAction)printFromIphone:(id)sender {
float systemVersion = [[[UIDevice currentDevice] systemVersion] floatValue];
if (systemVersion>4.1) {
NSData *myPdfData = [NSData dataWithContentsOfFile:myPdfData]; //check the value inside |myPdfData| and |pdfPath| is the path of your pdf.
UIPrintInteractionController *controller = [UIPrintInteractionController sharedPrintController];
if (controller && [UIPrintInteractionController canPrintData:myPdfData]){
//controller.delegate = delegate; //if necessary else nil
UIPrintInfo *printInfo = [UIPrintInfo printInfo];
printInfo.outputType = UIPrintInfoOutputGeneral;
printInfo.jobName = [myPdfData lastPathComponent];
//printInfo.duplex = UIPrintInfoDuplexLongEdge;
controller.printInfo = printInfo;
controller.showsPageRange = YES;
controller.printingItem = myPdfData;
// We need a completion handler block for printing.
UIPrintInteractionCompletionHandler completionHandler = ^(UIPrintInteractionController *printController, BOOL completed, NSError *error) {
if(completed && error){
NSLog(#"FAILED! due to error in domain %# with error code %u", error.domain, error.code);
}
};
// [controller presentFromRect:CGRectMake(200, 300, 100, 100) inView:senderView animated:YES completionHandler:completionHandler];
}else {
NSLog(#"Couldn't get shared UIPrintInteractionController!");
}
}
}
Not sure if its a typo or not but you've commented out your actual pdfData which is why its undeclared.
This line needs to be uncommented because you need myPdfData.
//NSData *myPdfData = [NSData dataWithContentsOfFile:pdfData]; //check the value inside |myPdfData| and |pdfPath| is the path of your pdf.
You can replace it with this line to use YOUR pdf instead of a file.
NSData *myPdfData = [self generatePDFDataForPrinting];

Sending a email message using app in mailcore xcode:error in text of body of the mail missing

This is the implementation file.Mail is sending but the problem is text in the body of mail is missing to the recepient of mail,can any one pls help me...... how to rectify the this issue
#implementation MyController
- (id)init
{
self = [super init];
if(self)
{
myMessage = [[CTCoreMessage alloc] init];
}
return self;
}
- (void)dealloc
{
[myMessage release];
[super dealloc];
}
- (IBAction)sendMessage:(id)sender
{
CTCoreMessage *msg = [[CTCoreMessage alloc] init];
[msg setTo:[myMessage to]];
[msg setFrom:[myMessage from]];
[msg setBody:[myMessage body]];
[msg setSubject:[myMessage subject]];
BOOL auth = ([useAuth state] == NSOnState);
BOOL tls = ([useTLS state] == NSOnState);
[CTSMTPConnection sendMessage:msg server:[server stringValue] username:[username stringValue]
password:[password stringValue] port:[port intValue] useTLS:tls useAuth:auth];
[msg release];
}
- (NSString *)to
{
return [[[myMessage to] anyObject] email];
}
- (void)setTo:(NSString *)aValue
{
CTCoreAddress *addr = [CTCoreAddress address];
[addr setEmail:aValue];
[myMessage setTo:[NSSet setWithObject:addr]];
}
- (NSString *)from
{
return [[[myMessage from] anyObject] email];
}
- (void)setFrom:(NSString *)aValue
{
CTCoreAddress *addr = [CTCoreAddress address];
[addr setEmail:aValue];
[addr setName:#""];
[myMessage setFrom:[NSSet setWithObject:addr]];
}
- (NSString *)subject
{
return [myMessage subject];
}
- (void)setSubject:(NSString *)aValue
{
[myMessage setSubject:aValue];
}
- (NSString *)body
{
return [myMessage body];
}
- (void)setBody:(NSString *)aValue
{
[myMessage setBody:aValue];
}
#end
You have cyclical references in there, Rehana.
Look at your setBody method.
You're trying to set the body of myMessage to the current body of myMessage. Both of which are NULL.
You need to set the values in myMessage object first there is no need to calling the same thing again.
Here how you will do this
- (IBAction)sendMessage:(id)sender
{
//Assuming textfields with the names I have passed as parameter.
//This is only for demonstration
//Set body to myMessage object from textview
[self setBody:bodytextview.text];
[self setTo:txtfTo.text];
[self setFrom:txtfFrom.text];
[self setSubject:txtfSubject.text];
BOOL auth = ([useAuth state] == NSOnState);
BOOL tls = ([useTLS state] == NSOnState);
//Note I'm passing myMessage Object
[CTSMTPConnection sendMessage:myMessage server:[server stringValue] username:[username stringValue]
password:[password stringValue] port:[port intValue] useTLS:tls useAuth:auth];
}

ALAssetLibrary and iOS 5.0

I have coded simple method to fetch data from assets library which is working fine on iOS4.3 but makes delay in fetching images in iOS5. What should i do to fasten fetching process on iOS 5.
-(void)setImages
{
int count =0;
int photoNumber = [[templateDictionary objectForKey:#"ElementsOnPage"] intValue];
for (int i=currentCount; count<photoNumber; i++) {
[self data:count+1 count:i];
count++;
}
}
-(void)data:(int)photoNumber count:(int)currentCount
{
NSURL *url;
UIImageView *firstImageView = [[UIImageView alloc]init];
CGFloat x,y,wid,h;
float ang;
if (currentCount>=[ImageURLArray count]) {
[firstImageView release];
return;
}
else
{
url = [NSURL URLWithString:[ImageURLArray objectAtIndex:currentCount]];
switch (photoNumber) {
case 1:
{
x = [[templateDictionary objectForKey:#"FirstElement_X"]floatValue];
y=[[templateDictionary objectForKey:#"FirstElement_Y"]floatValue];
wid = [[templateDictionary objectForKey:#"FirstElement_Width"]floatValue];
h=[[templateDictionary objectForKey:#"FirstElement_Height"]floatValue];
ang =[[templateDictionary objectForKey:#"FirstElement_Angle"]floatValue];
firstImageView.tag = 1+10;
//FirstImage
}
break;
case 2:
{
x = [[templateDictionary objectForKey:#"SecondElement_X"]floatValue];
y=[[templateDictionary objectForKey:#"SecondElement_Y"]floatValue];
wid = [[templateDictionary objectForKey:#"SecondElement_Width"]floatValue];
h=[[templateDictionary objectForKey:#"SecondElement_Height"]floatValue];
ang =[[templateDictionary objectForKey:#"SecondElement_Angle"]floatValue];
firstImageView.tag=2+10;
//SecondImage
}
break;
case 3:
{
x = [[templateDictionary objectForKey:#"ThirdElement_X"]floatValue];
y=[[templateDictionary objectForKey:#"ThirdElement_Y"]floatValue];
wid = [[templateDictionary objectForKey:#"ThirdElement_Width"]floatValue];
h=[[templateDictionary objectForKey:#"ThirdElement_Height"]floatValue];
ang =[[templateDictionary objectForKey:#"ThirdElement_Angle"]floatValue];
firstImageView.tag = 3+10;
//ThirdImage
}
break;
default:
break;
}
[firstImageView setFrame:CGRectMake(x, y, wid, h)];
dispatch_async(dispatch_get_main_queue(), ^
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos.
[library assetForURL:url
resultBlock:^(ALAsset* asset)
{
UIImage* img = [UIImage imageWithCGImage:[asset.defaultRepresentation fullResolutionImage]];
[firstImageView setImage:img];
}
failureBlock:^(NSError* error)
{
NSLog(#"error requesting asset");
}
];
[library release];
// Group enumerator Block
[pool release];
});
if ([[contentType objectAtIndex:currentCount]isEqualToString:#"1"]) {
UIButton *videoImage = [[UIButton alloc]initWithFrame:CGRectMake((firstImageView.frame.size.width/2)-25,(firstImageView.frame.size.height/2)-25,50,50)];
videoImage.transform = CGAffineTransformMakeRotation(ang*(3.14/180));
[videoImage setBackgroundImage:[UIImage imageNamed:#"videothumb.png"] forState:UIControlStateNormal];
[videoImage addTarget:self action:#selector(PlayMusicOnClickofButton:) forControlEvents:UIControlEventTouchUpInside];
[firstImageView addSubview:videoImage];
videoImage.tag = currentCount+1000;
[videoImage release];
}
}
firstImageView.transform = CGAffineTransformMakeRotation(ang*(3.14/180));
firstImageView.userInteractionEnabled = YES;
[coverImageView addSubview:firstImageView];
[coverImageView setImage:[UIImage imageNamed:innerBackground]];
[firstImageView release];
}
I already reported a bug to Apple (following a request of an Apple employee in the dev forums) on the performance degradation of assetForUrl in iOS5.
Background: The assetLibrary was refactored and it is now based on CoreData, on each assetForUrl call, the SDK actually opens a new SQLite connection (BAH...), causing a significant performance hit.
Temp solution: In my app I need to load 200 pics using assetForUrl. in IOS4 it took 100ms, in iOS5 around 5+ seconds. I found out that enumerating the entire library (around 1500 pics) and caching it in a URL-->ASSET dictionary, takes around 3 seconds. I'm using this technique for now. Watch out for stale assets if you hold on to them and changes to the library occur.

UIImage problem

I am loading the images of size 450KB in UIImage view and then adding it to UIscrollview. while am scrolling the 30 images continously,its getting crashed.. what may be the reason..is this memory leak issue...or image size is the problem...? thanks in advance..
here is my code ..
#try{
NSAutoreleasePool *pool;
pool = [[NSAutoreleasePool alloc] init];
//NSArray *array = [global_ContentString componentsSeparatedByString:#"###"];
NSArray *array1 = [catalogURL componentsSeparatedByString:#"&"];
//**NSLog(#"array1****** = %#",array1);
NSLog(#"loading catalog image(method: loadCatalogImage).......%#%#",baseURL, [[[array1 objectAtIndex:0] componentsSeparatedByString:#"##"] objectAtIndex:0]);
//NSLog(#"baseURL = %#",baseURL);
NSLog(#"loading catalog image.......%#%#",baseURL, [[[array1 objectAtIndex:0] componentsSeparatedByString:#"##"] objectAtIndex:0]);
zoomedImageURL = [NSString stringWithFormat:#"%#%#", baseURL, [[[array1 objectAtIndex:0] componentsSeparatedByString:#"##"] objectAtIndex:1]];
[zoomedImageURL retain];
NSLog(#"aaaaaaaaaaaaaa = %#",zoomedImageURL);
//UIImage *img = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:[NSString stringWithFormat:#"%#%#",baseURL, [[[array1 objectAtIndex:0] componentsSeparatedByString:#"##"] objectAtIndex:0]]]]];
UIImage *img = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:[NSString stringWithFormat:#"%#",zoomedImageURL]]]];
imgView.contentMode = UIViewContentModeScaleAspectFit;
imgView.image = img;//[GPSTripTracking generatePhotoThumbnail:img:109];
[pool release];
[global_imgProgress stopAnimating];
}
#catch (NSException *e) {
[global_imgProgress stopAnimating];
NSLog(#"Exception....");
}
#finally {
}
am releasing my imgView in dealloc method..
i imlemented the following code in "scrollviewdidscroll"
- (void)scrollViewDidScroll:(UIScrollView *)sender {
// We don't want a "feedback loop" between the UIPageControl and the scroll delegate in
// which a scroll event generated from the user hitting the page control triggers updates from
// the delegate method. We use a boolean to disable the delegate logic when the page control is used.
if (pageControlUsed) {
// do nothing - the scroll was initiated from the page control, not the user dragging
//pageText.text = [NSString stringWithFormat:#"%d/%d", (pageControl.currentPage +1), pageControl.numberOfPages];
pageText.text = [NSString stringWithFormat:#"%d/%d", (pageControl.currentPage ), pageControl.numberOfPages];
//NSLog(#"not scrollling page....");
return;
}
// Switch the indicator when more than 50% of the previous/next page is visible
CGFloat pageWidth = scrollView.frame.size.width;
int page = floor((scrollView.contentOffset.x - pageWidth / 2) / pageWidth) + 1;
pageControl.currentPage = page;
// load the visible page and the page on either side of it (to avoid flashes when the user starts scrolling)
[self loadScrollViewWithPage:page - 1];
[self loadScrollViewWithPage:page];
[self loadScrollViewWithPage:page + 1];
//NSLog(#"scrolling page....%d", page);
// A possible optimization would be to unload the views+controllers which are no longer visible
}
and my code for " loadScrollViewWithPage" is
- (void)loadScrollViewWithPage:(int)page
{
//page--;
if (page < 0) return;
if (page >= numberOfPages) return;
if(!isViewCatalog && searchId == 1)
{
//NSLog(#"curre page = %d",pageControl.currentPage);
NSArray *array1 = [global_ContentString componentsSeparatedByString:#"###"];
if(searchInCatalogFlag == 1)
{
pageControl.currentPage=0;
NSArray *urlArray = [[array1 objectAtIndex:pageControl.currentPage] componentsSeparatedByString:#"##"];
//NSLog(#"url array** = %#",urlArray);
headerText.text = [NSString stringWithString:[urlArray objectAtIndex:0]];
pageText.text = [NSString stringWithFormat:#"%d/%d", pageControl.currentPage, (pageControl.numberOfPages - 1)];
}
else
{
NSArray *urlArray = [[array1 objectAtIndex:pageControl.currentPage] componentsSeparatedByString:#"##"];
//NSLog(#"url array** = %#",urlArray);
headerText.text = [NSString stringWithString:[urlArray objectAtIndex:0]];
pageText.text = [NSString stringWithFormat:#"%d/%d", pageControl.currentPage, (pageControl.numberOfPages - 1)];
}
if(page == selectedPage && ![global_imgProgress isAnimating])
[global_imgProgress startAnimating];
}
else
{
headerText.text = [NSString stringWithString:global_SelectedCatalogName];
pageText.text = [NSString stringWithFormat:#"%d/%d", (pageControl.currentPage + 1), (pageControl.numberOfPages - 1)];
if(page == selectedPage + 1 && ![global_imgProgress isAnimating] )
[global_imgProgress startAnimating];
// NSLog(#"header text = %#", headerText.text);
//headerText.text = [NSString stringWithString:[urlArray objectAtIndex:0]];
}
FullPageView *controller = [viewControllers objectAtIndex:page];
if ((NSNull *)controller == [NSNull null] ) {
//NSLog(#"Loading page =========== %d, %d", page, selectedPage);
//voucherPageNo = page;
//[voucherImage retain];
if(universalApp==2)
{
controller = [[FullPageView alloc] initWithNibName:#"FullPageView_iphone" bundle:nil];//:page];
[controller.view setFrame:CGRectMake(0, 0, 320,332)];
}
else
{
controller = [[FullPageView alloc] initWithNibName:#"FullPageView" bundle:nil];//:page];
[controller.view setFrame:CGRectMake(0, 192, 768, 691)];
}
//[controller.view setFrame:CGRectMake(0, 0, 320,480)];
//[controller.view setFrame:CGRectMake(0, 192, 768, 691)];
if((!isViewCatalog && searchId < 2 && searchInCatalogFlag == 0))// || searchInCatalogFlag == 1)
{
// NSLog(#">>>>>>>>>>>>>>>>>> LOADING IMAGE >>>>>>>>>>>>>>>>>>>>");
[controller setPageNo:page];
// if(page >= selectedPage - 1)
[NSThread detachNewThreadSelector:#selector(loadImage) toTarget:controller withObject:nil];
}
else //if((page >= (selectedPage - 1) && page <= (selectedPage + 1)) || !isFirstTimeLoading)
{
NSLog(#"Loading CATALOG IMAGE = %d, %d, %#", page, selectedPage, (isFirstTimeLoading ?#"YES" : #"NO"));
[controller setCatalogURL:[NSString stringWithFormat:#"%#", [catalogArray objectAtIndex:page+(searchId< 2 && !isViewCatalog && searchInCatalogFlag == 0?0:1)]]];
NSLog(#"loading image ipad= %#", [catalogArray objectAtIndex:page+(searchId< 2 && !isViewCatalog && searchInCatalogFlag == 0?0:1)]);
// if(page >= selectedPage - 1)
[NSThread detachNewThreadSelector:#selector(loadCatalogImage) toTarget:controller withObject:nil];
// if(page == (selectedPage + 1))
//isFirstTimeLoading = NO;
}
[viewControllers replaceObjectAtIndex:page withObject:controller];
[controller release];
}
// add the controller's view to the scroll view
if (nil == controller.view.superview)
{
// NSLog(#"Voucher view addead at page..... %d", page);
CGRect frame = scrollView.frame;
frame.origin.x = frame.size.width * page;
frame.origin.y = 0;
controller.view.frame = frame;
// NSLog(#">>>>>>>>> %f, %f", frame.size.width, frame.origin.x);
[scrollView addSubview:controller.view];
}
//if(page == pageControl.currentPage)
//[imgProgress startAnimating];
//else
//pageControlUsed = YES;
}
where wil be the problem..?
450KB is the compressed image size. When an image is loaded into memory, it is uncompressed.
A rule of thumb for working out how much memory an uncompressed image will occupy is:
width * height * 4
With 30 images it is very likely that you're running out of memory.
You should write your code to only keep images in memory if they are visible on screen.
It very likely that your program gets terminated by iOS because it consumes too much memory. Start it from XCode and look at the console - it will probably print that it receives memory warnings.
You will have to load the images on demand, i.e. only when the user gets close to seeing them, and you will have to release the ones that move out of view again. To that end, implement the
- (void)scrollViewDidScroll:(UIScrollView *)scrollView
delegate method, look at the scroll view's content offset and load/release the appropriate images. Alternatively, you can choose to not release them there but instead wait for a memory warning to do so. To do so, implement the
- (void)didReceiveMemoryWarning
method in your view controller.
This might be the issue of memory leaks.
Try to replace this line
UIImage *img = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:[NSString stringWithFormat:#"%#",zoomedImageURL]]]]
and use this
UIImage *img = [[UIImage alloc] initWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:[NSString stringWithFormat:#"%#",zoomedImageURL]]]];
<<<<<YOUR CODE>>>>
[img release];
I hope this helps

Drawing waveform with AVAssetReader

I reading song from iPod library using assetUrl (in code it named audioUrl)
I can play it many ways, I can cut it, I can make some precessing with this but...
I really don't understand what I gonna do with this CMSampleBufferRef to get data for drawing waveform! I need info about peak values, how I can get it this (maybe another) way?
AVAssetTrack * songTrack = [audioUrl.tracks objectAtIndex:0];
AVAssetReaderTrackOutput * output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:nil];
[reader addOutput:output];
[output release];
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput =
(AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){/* what I gonna do with this? */}
Please help me!
I was searching for a similar thing and decided to "roll my own."
I realize this is an old post, but in case anyone else is in search of this, here is my solution. it is relatively quick and dirty and normalizes the image to "full scale".
the images it creates are "wide" ie you need to put them in a UIScrollView or otherwise manage the display.
this is based on some answers given to this question
Sample Output
EDIT: I have added a logarithmic version of the averaging and render methods, see the end of this message for the alternate version & comparison outputs. I personally prefer the original linear version, but have decided to post it, in case someone can improve on the algorithm used.
You'll need these imports:
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
First, a generic rendering method that takes a pointer to averaged sample data,
and returns a UIImage. Note these samples are not playable audio samples.
-(UIImage *) audioImageGraph:(SInt16 *) samples
normalizeMax:(SInt16) normalizeMax
sampleCount:(NSInteger) sampleCount
channelCount:(NSInteger) channelCount
imageHeight:(float) imageHeight {
CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;
CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];
CGContextFillRect(context, rect);
CGContextSetLineWidth(context, 1.0);
float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ;
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (float) normalizeMax;
for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
SInt16 left = *samples++;
float pixels = (float) left;
pixels *= sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerLeft-pixels);
CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
CGContextSetStrokeColorWithColor(context, leftcolor);
CGContextStrokePath(context);
if (channelCount==2) {
SInt16 right = *samples++;
float pixels = (float) right;
pixels *= sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerRight - pixels);
CGContextAddLineToPoint(context, intSample, centerRight + pixels);
CGContextSetStrokeColorWithColor(context, rightcolor);
CGContextStrokePath(context);
}
}
// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
// Tidy up
UIGraphicsEndImageContext();
return newImage;
}
Next, a method that takes a AVURLAsset, and returns PNG image data
- (NSData *) renderPNGAudioPictogramForAsset:(AVURLAsset *)songAsset {
NSError * error = nil;
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];
NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
// [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
// [NSNumber numberWithInt: 2],AVNumberOfChannelsKey, /*Not Supported*/
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];
[reader addOutput:output];
[output release];
UInt32 sampleRate,channelCount;
NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
if(fmtDesc ) {
sampleRate = fmtDesc->mSampleRate;
channelCount = fmtDesc->mChannelsPerFrame;
// NSLog(#"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
}
}
UInt32 bytesPerSample = 2 * channelCount;
SInt16 normalizeMax = 0;
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
UInt64 totalBytes = 0;
SInt64 totalLeft = 0;
SInt64 totalRight = 0;
NSInteger sampleTally = 0;
NSInteger samplesPerPixel = sampleRate / 50;
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
totalBytes += length;
NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];
NSMutableData * data = [NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
SInt16 * samples = (SInt16 *) data.mutableBytes;
int sampleCount = length / bytesPerSample;
for (int i = 0; i < sampleCount ; i ++) {
SInt16 left = *samples++;
totalLeft += left;
SInt16 right;
if (channelCount==2) {
right = *samples++;
totalRight += right;
}
sampleTally++;
if (sampleTally > samplesPerPixel) {
left = totalLeft / sampleTally;
SInt16 fix = abs(left);
if (fix > normalizeMax) {
normalizeMax = fix;
}
[fullSongData appendBytes:&left length:sizeof(left)];
if (channelCount==2) {
right = totalRight / sampleTally;
SInt16 fix = abs(right);
if (fix > normalizeMax) {
normalizeMax = fix;
}
[fullSongData appendBytes:&right length:sizeof(right)];
}
totalLeft = 0;
totalRight = 0;
sampleTally = 0;
}
}
[wader drain];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
NSData * finalData = nil;
if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
// Something went wrong. return nil
return nil;
}
if (reader.status == AVAssetReaderStatusCompleted){
NSLog(#"rendering output graphics using normalizeMax %d",normalizeMax);
UIImage *test = [self audioImageGraph:(SInt16 *)
fullSongData.bytes
normalizeMax:normalizeMax
sampleCount:fullSongData.length / 4
channelCount:2
imageHeight:100];
finalData = imageToData(test);
}
[fullSongData release];
[reader release];
return finalData;
}
Advanced Option:
Finally, if you want to be able to play the audio using AVAudioPlayer, you'll need to cache
it to your apps's bundle cache folder. Since I was doing that, i decided to cache the image data
also, and wrapped the whole thing into a UIImage category. you need to include this open source offering to extract the audio, and some code from here to handle some background threading features.
first, some defines, and a few generic class methods for handling path names etc
//#define imgExt #"jpg"
//#define imageToData(x) UIImageJPEGRepresentation(x,4)
#define imgExt #"png"
#define imageToData(x) UIImagePNGRepresentation(x)
+ (NSString *) assetCacheFolder {
NSArray *assetFolderRoot = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
return [NSString stringWithFormat:#"%#/audio", [assetFolderRoot objectAtIndex:0]];
}
+ (NSString *) cachedAudioPictogramPathForMPMediaItem:(MPMediaItem*) item {
NSString *assetFolder = [[self class] assetCacheFolder];
NSNumber * libraryId = [item valueForProperty:MPMediaItemPropertyPersistentID];
NSString *assetPictogramFilename = [NSString stringWithFormat:#"asset_%#.%#",libraryId,imgExt];
return [NSString stringWithFormat:#"%#/%#", assetFolder, assetPictogramFilename];
}
+ (NSString *) cachedAudioFilepathForMPMediaItem:(MPMediaItem*) item {
NSString *assetFolder = [[self class] assetCacheFolder];
NSURL * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
NSNumber * libraryId = [item valueForProperty:MPMediaItemPropertyPersistentID];
NSString *assetFileExt = [[[assetURL path] lastPathComponent] pathExtension];
NSString *assetFilename = [NSString stringWithFormat:#"asset_%#.%#",libraryId,assetFileExt];
return [NSString stringWithFormat:#"%#/%#", assetFolder, assetFilename];
}
+ (NSURL *) cachedAudioURLForMPMediaItem:(MPMediaItem*) item {
NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];
return [NSURL fileURLWithPath:assetFilepath];
}
Now the init method that does "the business"
- (id) initWithMPMediaItem:(MPMediaItem*) item
completionBlock:(void (^)(UIImage* delayedImagePreparation))completionBlock {
NSFileManager *fman = [NSFileManager defaultManager];
NSString *assetPictogramFilepath = [[self class] cachedAudioPictogramPathForMPMediaItem:item];
if ([fman fileExistsAtPath:assetPictogramFilepath]) {
NSLog(#"Returning cached waveform pictogram: %#",[assetPictogramFilepath lastPathComponent]);
self = [self initWithContentsOfFile:assetPictogramFilepath];
return self;
}
NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];
NSURL *assetFileURL = [NSURL fileURLWithPath:assetFilepath];
if ([fman fileExistsAtPath:assetFilepath]) {
NSLog(#"scanning cached audio data to create UIImage file: %#",[assetFilepath lastPathComponent]);
[assetFileURL retain];
[assetPictogramFilepath retain];
[NSThread MCSM_performBlockInBackground: ^{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
NSData *waveFormData = [self renderPNGAudioPictogramForAsset:asset];
[waveFormData writeToFile:assetPictogramFilepath atomically:YES];
[assetFileURL release];
[assetPictogramFilepath release];
if (completionBlock) {
[waveFormData retain];
[NSThread MCSM_performBlockOnMainThread:^{
UIImage *result = [UIImage imageWithData:waveFormData];
NSLog(#"returning rendered pictogram on main thread (%d bytes %# data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);
completionBlock(result);
[waveFormData release];
}];
}
}];
return nil;
} else {
NSString *assetFolder = [[self class] assetCacheFolder];
[fman createDirectoryAtPath:assetFolder withIntermediateDirectories:YES attributes:nil error:nil];
NSLog(#"Preparing to import audio asset data %#",[assetFilepath lastPathComponent]);
[assetPictogramFilepath retain];
[assetFileURL retain];
TSLibraryImport* import = [[TSLibraryImport alloc] init];
NSURL * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
[import importAsset:assetURL toURL:assetFileURL completionBlock:^(TSLibraryImport* import) {
//check the status and error properties of
//TSLibraryImport
if (import.error) {
NSLog (#"audio data import failed:%#",import.error);
} else{
NSLog (#"Creating waveform pictogram file: %#", [assetPictogramFilepath lastPathComponent]);
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
NSData *waveFormData = [self renderPNGAudioPictogramForAsset:asset];
[waveFormData writeToFile:assetPictogramFilepath atomically:YES];
if (completionBlock) {
[waveFormData retain];
[NSThread MCSM_performBlockOnMainThread:^{
UIImage *result = [UIImage imageWithData:waveFormData];
NSLog(#"returning rendered pictogram on main thread (%d bytes %# data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);
completionBlock(result);
[waveFormData release];
}];
}
}
[assetPictogramFilepath release];
[assetFileURL release];
} ];
return nil;
}
}
An example of invoking this :
-(void) importMediaItem {
MPMediaItem* item = [self mediaItem];
// since we will be needing this for playback, save the url to the cached audio.
[url release];
url = [[UIImage cachedAudioURLForMPMediaItem:item] retain];
[waveFormImage release];
waveFormImage = [[UIImage alloc ] initWithMPMediaItem:item completionBlock:^(UIImage* delayedImagePreparation){
waveFormImage = [delayedImagePreparation retain];
[self displayWaveFormImage];
}];
if (waveFormImage) {
[waveFormImage retain];
[self displayWaveFormImage];
}
}
Logarithmic version of averaging and render methods
#define absX(x) (x<0?0-x:x)
#define minMaxX(x,mn,mx) (x<=mn?mn:(x>=mx?mx:x))
#define noiseFloor (-90.0)
#define decibel(amplitude) (20.0 * log10(absX(amplitude)/32767.0))
-(UIImage *) audioImageLogGraph:(Float32 *) samples
normalizeMax:(Float32) normalizeMax
sampleCount:(NSInteger) sampleCount
channelCount:(NSInteger) channelCount
imageHeight:(float) imageHeight {
CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;
CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];
CGContextFillRect(context, rect);
CGContextSetLineWidth(context, 1.0);
float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ;
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (normalizeMax - noiseFloor) / 2;
for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
Float32 left = *samples++;
float pixels = (left - noiseFloor) * sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerLeft-pixels);
CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
CGContextSetStrokeColorWithColor(context, leftcolor);
CGContextStrokePath(context);
if (channelCount==2) {
Float32 right = *samples++;
float pixels = (right - noiseFloor) * sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerRight - pixels);
CGContextAddLineToPoint(context, intSample, centerRight + pixels);
CGContextSetStrokeColorWithColor(context, rightcolor);
CGContextStrokePath(context);
}
}
// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
// Tidy up
UIGraphicsEndImageContext();
return newImage;
}
- (NSData *) renderPNGAudioPictogramLogForAsset:(AVURLAsset *)songAsset {
NSError * error = nil;
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];
NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
// [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
// [NSNumber numberWithInt: 2],AVNumberOfChannelsKey, /*Not Supported*/
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];
[reader addOutput:output];
[output release];
UInt32 sampleRate,channelCount;
NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
if(fmtDesc ) {
sampleRate = fmtDesc->mSampleRate;
channelCount = fmtDesc->mChannelsPerFrame;
// NSLog(#"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
}
}
UInt32 bytesPerSample = 2 * channelCount;
Float32 normalizeMax = noiseFloor;
NSLog(#"normalizeMax = %f",normalizeMax);
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
UInt64 totalBytes = 0;
Float64 totalLeft = 0;
Float64 totalRight = 0;
Float32 sampleTally = 0;
NSInteger samplesPerPixel = sampleRate / 50;
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
totalBytes += length;
NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];
NSMutableData * data = [NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
SInt16 * samples = (SInt16 *) data.mutableBytes;
int sampleCount = length / bytesPerSample;
for (int i = 0; i < sampleCount ; i ++) {
Float32 left = (Float32) *samples++;
left = decibel(left);
left = minMaxX(left,noiseFloor,0);
totalLeft += left;
Float32 right;
if (channelCount==2) {
right = (Float32) *samples++;
right = decibel(right);
right = minMaxX(right,noiseFloor,0);
totalRight += right;
}
sampleTally++;
if (sampleTally > samplesPerPixel) {
left = totalLeft / sampleTally;
if (left > normalizeMax) {
normalizeMax = left;
}
// NSLog(#"left average = %f, normalizeMax = %f",left,normalizeMax);
[fullSongData appendBytes:&left length:sizeof(left)];
if (channelCount==2) {
right = totalRight / sampleTally;
if (right > normalizeMax) {
normalizeMax = right;
}
[fullSongData appendBytes:&right length:sizeof(right)];
}
totalLeft = 0;
totalRight = 0;
sampleTally = 0;
}
}
[wader drain];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
NSData * finalData = nil;
if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
// Something went wrong. Handle it.
}
if (reader.status == AVAssetReaderStatusCompleted){
// You're done. It worked.
NSLog(#"rendering output graphics using normalizeMax %f",normalizeMax);
UIImage *test = [self audioImageLogGraph:(Float32 *) fullSongData.bytes
normalizeMax:normalizeMax
sampleCount:fullSongData.length / (sizeof(Float32) * 2)
channelCount:2
imageHeight:100];
finalData = imageToData(test);
}
[fullSongData release];
[reader release];
return finalData;
}
comparison outputs
Linear plot for start of "Warm It Up" by Acme Swing Company
Logarithmic plot for start of "Warm It Up" by Acme Swing Company
You should be able to get a buffer of audio from your sampleBuffRef and then iterate through those values to build your waveform:
CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer( sampleBufferRef );
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(sampleBufferRef);
AudioBufferList audioBufferList;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBufferRef,
NULL,
&audioBufferList,
sizeof(audioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&buffer
);
// this copies your audio out to a temp buffer but you should be able to iterate through this buffer instead
SInt32* readBuffer = (SInt32 *)malloc(numSamplesInBuffer * sizeof(SInt32));
memcpy( readBuffer, audioBufferList.mBuffers[0].mData, numSamplesInBuffer*sizeof(SInt32));
Another approach using Swift 5 and using AVAudioFile:
///Gets the audio file from an URL, downsaples and draws into the sound layer.
func drawSoundWave(fromURL url:URL, fromPosition:Int64, totalSeconds:UInt32, samplesSecond:CGFloat) throws{
print("\(logClassName) Drawing sound from \(url)")
do{
waveViewInfo.samplesSeconds = samplesSecond
//Get audio file and format from URL
let audioFile = try AVAudioFile(forReading: url)
waveViewInfo.format = audioFile.processingFormat
audioFile.framePosition = fromPosition * Int64(waveViewInfo.format.sampleRate)
//Getting the buffer
let frameCapacity:UInt32 = totalSeconds * UInt32(waveViewInfo.format.sampleRate)
guard let audioPCMBuffer = AVAudioPCMBuffer(pcmFormat: waveViewInfo.format, frameCapacity: frameCapacity) else{ throw AppError("Unable to get the AVAudioPCMBuffer") }
try audioFile.read(into: audioPCMBuffer, frameCount: frameCapacity)
let audioPCMBufferFloatValues:[Float] = Array(UnsafeBufferPointer(start: audioPCMBuffer.floatChannelData?.pointee,
count: Int(audioPCMBuffer.frameLength)))
waveViewInfo.points = []
waveViewInfo.maxValue = 0
for index in stride(from: 0, to: audioPCMBufferFloatValues.count, by: Int(audioFile.fileFormat.sampleRate) / Int(waveViewInfo.samplesSeconds)){
let aSample = CGFloat(audioPCMBufferFloatValues[index])
waveViewInfo.points.append(aSample)
let fix = abs(aSample)
if fix > waveViewInfo.maxValue{
waveViewInfo.maxValue = fix
}
}
print("\(logClassName) Finished the points - Count = \(waveViewInfo.points.count) / Max = \(waveViewInfo.maxValue)")
populateSoundImageView(with: waveViewInfo)
}
catch{
throw error
}
}
///Converts the sound wave in to a UIImage
func populateSoundImageView(with waveViewInfo:WaveViewInfo){
let imageSize:CGSize = CGSize(width: CGFloat(waveViewInfo.points.count),//CGFloat(waveViewInfo.points.count) * waveViewInfo.sampleSpace,
height: frame.height)
let drawingRect = CGRect(origin: .zero, size: imageSize)
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0)
defer {
UIGraphicsEndImageContext()
}
print("\(logClassName) Converting sound view in rect \(drawingRect)")
guard let context:CGContext = UIGraphicsGetCurrentContext() else{ return }
context.setFillColor(waveViewInfo.backgroundColor.cgColor)
context.setAlpha(1.0)
context.fill(drawingRect)
context.setLineWidth(1.0)
// context.setLineWidth(waveViewInfo.lineWidth)
let sampleAdjustFactor = imageSize.height / waveViewInfo.maxValue
for pointIndex in waveViewInfo.points.indices{
let pixel = waveViewInfo.points[pointIndex] * sampleAdjustFactor
context.move(to: CGPoint(x: CGFloat(pointIndex), y: middleY - pixel))
context.addLine(to: CGPoint(x: CGFloat(pointIndex), y: middleY + pixel))
context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
context.strokePath()
}
// for pointIndex in waveViewInfo.points.indices{
//
// let pixel = waveViewInfo.points[pointIndex] * sampleAdjustFactor
//
// context.move(to: CGPoint(x: CGFloat(pointIndex) * waveViewInfo.sampleSpace, y: middleY - pixel))
// context.addLine(to: CGPoint(x: CGFloat(pointIndex) * waveViewInfo.sampleSpace, y: middleY + pixel))
//
// context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
// context.strokePath()
//
// }
// var xIncrement:CGFloat = 0
// for point in waveViewInfo.points{
//
// let normalizedPoint = point * sampleAdjustFactor
//
// context.move(to: CGPoint(x: xIncrement, y: middleY - normalizedPoint))
// context.addLine(to: CGPoint(x: xIncrement, y: middleX + normalizedPoint))
// context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
// context.strokePath()
//
// xIncrement += waveViewInfo.sampleSpace
//
// }
guard let soundWaveImage = UIGraphicsGetImageFromCurrentImageContext() else{ return }
soundWaveImageView.image = soundWaveImage
// //In case of handling sample space in for
// updateWidthConstraintValue(soundWaveImage.size.width)
updateWidthConstraintValue(soundWaveImage.size.width * waveViewInfo.sampleSpace)
}
WHERE
class WaveViewInfo {
var format:AVAudioFormat!
var samplesSeconds:CGFloat = 50
var lineWidth:CGFloat = 0.20
var sampleSpace:CGFloat = 0.20
var strokeColor:UIColor = .red
var backgroundColor:UIColor = .clear
var maxValue:CGFloat = 0
var points:[CGFloat] = [CGFloat]()
}
At the moment only prints one sound wave but it can be extended. The good part is that you can print an audio track by parts
A little bit refactoring from the above answers (using AVAudioFile)
import AVFoundation
import CoreGraphics
import Foundation
import UIKit
class WaveGenerator {
private func readBuffer(_ audioUrl: URL) -> UnsafeBufferPointer<Float> {
let file = try! AVAudioFile(forReading: audioUrl)
let audioFormat = file.processingFormat
let audioFrameCount = UInt32(file.length)
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount)
else { return UnsafeBufferPointer<Float>(_empty: ()) }
do {
try file.read(into: buffer)
} catch {
print(error)
}
// let floatArray = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength)))
let floatArray = UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength))
return floatArray
}
private func generateWaveImage(
_ samples: UnsafeBufferPointer<Float>,
_ imageSize: CGSize,
_ strokeColor: UIColor,
_ backgroundColor: UIColor
) -> UIImage? {
let drawingRect = CGRect(origin: .zero, size: imageSize)
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0)
let middleY = imageSize.height / 2
guard let context: CGContext = UIGraphicsGetCurrentContext() else { return nil }
context.setFillColor(backgroundColor.cgColor)
context.setAlpha(1.0)
context.fill(drawingRect)
context.setLineWidth(0.25)
let max: CGFloat = CGFloat(samples.max() ?? 0)
let heightNormalizationFactor = imageSize.height / max / 2
let widthNormalizationFactor = imageSize.width / CGFloat(samples.count)
for index in 0 ..< samples.count {
let pixel = CGFloat(samples[index]) * heightNormalizationFactor
let x = CGFloat(index) * widthNormalizationFactor
context.move(to: CGPoint(x: x, y: middleY - pixel))
context.addLine(to: CGPoint(x: x, y: middleY + pixel))
context.setStrokeColor(strokeColor.cgColor)
context.strokePath()
}
guard let soundWaveImage = UIGraphicsGetImageFromCurrentImageContext() else { return nil }
UIGraphicsEndImageContext()
return soundWaveImage
}
func generateWaveImage(from audioUrl: URL, in imageSize: CGSize) -> UIImage? {
let samples = readBuffer(audioUrl)
let img = generateWaveImage(samples, imageSize, UIColor.blue, UIColor.white)
return img
}
}
Usage
let url = Bundle.main.url(forResource: "TEST1.mp3", withExtension: "")!
let img = waveGenerator.generateWaveImage(from: url, in: CGSize(width: 600, height: 200))