I am successfully using the Flutter Plugin Image_picker to choose images so that I can use them for upload, display, etc... I wondered if anyone had any guidance on how to modify this plugin to also see videos and allow them to be chosen and use for upload, etc...
Looking for iOS and Android modifications if anyone has guidance on how to proceed or example code. I have made some progress but still need to get the camera to save video and be able to present. I will post the code changes so far. I have it selecting a video, but it will not present back to the app.
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import UIKit;
#import <MobileCoreServices/MobileCoreServices.h>
#import "ImagePickerPlugin.h"
#interface ImagePickerPlugin ()<UINavigationControllerDelegate, UIImagePickerControllerDelegate>
#end
static const int SOURCE_ASK_USER = 0;
static const int SOURCE_CAMERA = 1;
static const int SOURCE_GALLERY = 2;
#implementation ImagePickerPlugin {
FlutterResult _result;
NSDictionary *_arguments;
UIImagePickerController *_imagePickerController;
UIViewController *_viewController;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
FlutterMethodChannel *channel =
[FlutterMethodChannel methodChannelWithName:#"image_picker"
binaryMessenger:[registrar messenger]];
UIViewController *viewController =
[UIApplication sharedApplication].delegate.window.rootViewController;
ImagePickerPlugin *instance = [[ImagePickerPlugin alloc] initWithViewController:viewController];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithViewController:(UIViewController *)viewController {
self = [super init];
if (self) {
_viewController = viewController;
_imagePickerController = [[UIImagePickerController alloc] init];
}
return self;
}
- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
if (_result) {
_result([FlutterError errorWithCode:#"multiple_request"
message:#"Cancelled by a second request"
details:nil]);
_result = nil;
}
if ([#"pickImage" isEqualToString:call.method]) {
_imagePickerController.modalPresentationStyle = UIModalPresentationCurrentContext;
_imagePickerController.delegate = self;
_result = result;
_arguments = call.arguments;
int imageSource = [[_arguments objectForKey:#"source"] intValue];
switch (imageSource) {
case SOURCE_ASK_USER:
[self showImageSourceSelector];
break;
case SOURCE_CAMERA:
[self showCamera];
break;
case SOURCE_GALLERY:
[self showPhotoLibrary];
break;
default:
result([FlutterError errorWithCode:#"invalid_source"
message:#"Invalid image source."
details:nil]);
break;
}
} else {
result(FlutterMethodNotImplemented);
}
}
- (void)showImageSourceSelector {
UIAlertControllerStyle style = UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad
? UIAlertControllerStyleAlert
: UIAlertControllerStyleActionSheet;
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil message:nil preferredStyle:style];
UIAlertAction *camera = [UIAlertAction actionWithTitle:#"Take Photo"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showCamera];
}];
UIAlertAction *library = [UIAlertAction actionWithTitle:#"Choose Photo"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showPhotoLibrary];
}];
UIAlertAction *cancel =
[UIAlertAction actionWithTitle:#"Cancel" style:UIAlertActionStyleCancel handler:nil];
[alert addAction:camera];
[alert addAction:library];
[alert addAction:cancel];
[_viewController presentViewController:alert animated:YES completion:nil];
}
- (void)showCamera {
// Camera is not available on simulators
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
_imagePickerController.sourceType = UIImagePickerControllerCameraCaptureModeVideo;
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
} else {
[[[UIAlertView alloc] initWithTitle:#"Error"
message:#"Camera not available."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
}
}
- (void)showPhotoLibrary {
// No need to check if SourceType is available. It always is.
//_imagePickerController.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
_imagePickerController.mediaTypes =[UIImagePickerController availableMediaTypesForSourceType:_imagePickerController.sourceType];
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
}
- (void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary<NSString *, id> *)info {
[_imagePickerController dismissViewControllerAnimated:YES completion:nil];
UIImage *image = [info objectForKey:UIImagePickerControllerEditedImage];
NSURL *videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
if (image == nil) {
image = [info objectForKey:UIImagePickerControllerOriginalImage];
} else {
image = [self normalizedImage:image];
}
if (videoURL == nil) {
} else {
//image = videoURL;
}
NSNumber *maxWidth = [_arguments objectForKey:#"maxWidth"];
NSNumber *maxHeight = [_arguments objectForKey:#"maxHeight"];
if (maxWidth != (id)[NSNull null] || maxHeight != (id)[NSNull null]) {
image = [self scaledImage:image maxWidth:maxWidth maxHeight:maxHeight];
}
NSData *data = UIImageJPEGRepresentation(image, 1.0);
NSString *tmpDirectory = NSTemporaryDirectory();
NSString *guid = [[NSProcessInfo processInfo] globallyUniqueString];
// TODO(jackson): Using the cache directory might be better than temporary
// directory.
NSString *tmpFile = [NSString stringWithFormat:#"image_picker_%#.jpg", guid];
NSString *tmpPath = [tmpDirectory stringByAppendingPathComponent:tmpFile];
if ([[NSFileManager defaultManager] createFileAtPath:tmpPath contents:data attributes:nil]) {
_result(tmpPath);
} else {
_result([FlutterError errorWithCode:#"create_error"
message:#"Temporary file could not be created"
details:nil]);
}
_result = nil;
_arguments = nil;
}
// The way we save images to the tmp dir currently throws away all EXIF data
// (including the orientation of the image). That means, pics taken in portrait
// will not be orientated correctly as is. To avoid that, we rotate the actual
// image data.
// TODO(goderbauer): investigate how to preserve EXIF data.
- (UIImage *)normalizedImage:(UIImage *)image {
if (image.imageOrientation == UIImageOrientationUp) return image;
UIGraphicsBeginImageContextWithOptions(image.size, NO, image.scale);
[image drawInRect:(CGRect){0, 0, image.size}];
UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return normalizedImage;
}
- (UIImage *)scaledImage:(UIImage *)image
maxWidth:(NSNumber *)maxWidth
maxHeight:(NSNumber *)maxHeight {
double originalWidth = image.size.width;
double originalHeight = image.size.height;
bool hasMaxWidth = maxWidth != (id)[NSNull null];
bool hasMaxHeight = maxHeight != (id)[NSNull null];
double width = hasMaxWidth ? MIN([maxWidth doubleValue], originalWidth) : originalWidth;
double height = hasMaxHeight ? MIN([maxHeight doubleValue], originalHeight) : originalHeight;
bool shouldDownscaleWidth = hasMaxWidth && [maxWidth doubleValue] < originalWidth;
bool shouldDownscaleHeight = hasMaxHeight && [maxHeight doubleValue] < originalHeight;
bool shouldDownscale = shouldDownscaleWidth || shouldDownscaleHeight;
if (shouldDownscale) {
double downscaledWidth = (height / originalHeight) * originalWidth;
double downscaledHeight = (width / originalWidth) * originalHeight;
if (width < height) {
if (!hasMaxWidth) {
width = downscaledWidth;
} else {
height = downscaledHeight;
}
} else if (height < width) {
if (!hasMaxHeight) {
height = downscaledHeight;
} else {
width = downscaledWidth;
}
} else {
if (originalWidth < originalHeight) {
width = downscaledWidth;
} else if (originalHeight < originalWidth) {
height = downscaledHeight;
}
}
}
UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), NO, 1.0);
[image drawInRect:CGRectMake(0, 0, width, height)];
UIImage *scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
#end
Thanks
Here is the IOS code that I have completed, I am still working on the Android if anyone wants to assist, I will post where I am at so far. This code replaces what is in the .m file of the IOS folder, no other changes are needed for this to work to pick and capture video along with images. You must figure out how to display the selected video/image in your app, but that's however you want to handle it. Again let me know if you want to assist with finishing he Android side.
#import UIKit;
#import <MobileCoreServices/MobileCoreServices.h>
#import "MediaPickerPlugin.h"
#interface MediaPickerPlugin ()<UINavigationControllerDelegate, UIImagePickerControllerDelegate>
#end
static const int SOURCE_ASK_USER = 0;
//static const int SOURCE_CAMERA = 0;
//static const int SOURCE_GALLERY = 0;
#implementation MediaPickerPlugin {
FlutterResult _result;
NSDictionary *_arguments;
UIImagePickerController *_imagePickerController;
UIViewController *_viewController;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
FlutterMethodChannel *channel =
[FlutterMethodChannel methodChannelWithName:#"media_picker"
binaryMessenger:[registrar messenger]];
UIViewController *viewController =
[UIApplication sharedApplication].delegate.window.rootViewController;
MediaPickerPlugin *instance =
[[MediaPickerPlugin alloc] initWithViewController:viewController];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithViewController:(UIViewController *)viewController {
self = [super init];
if (self) {
_viewController = viewController;
_imagePickerController = [[UIImagePickerController alloc] init];
}
return self;
}
- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
if (_result) {
_result([FlutterError errorWithCode:#"multiple_request"
message:#"Cancelled by a second request"
details:nil]);
_result = nil;
_arguments = nil;
}
if ([#"pickImage" isEqualToString:call.method]) {
_imagePickerController.modalPresentationStyle = UIModalPresentationCurrentContext;
_imagePickerController.delegate = self;
_result = result;
_arguments = call.arguments;
int imageSource = [[_arguments objectForKey:#"source"] intValue];
switch (imageSource) {
case SOURCE_ASK_USER:
[self showImageSourceSelector];
break;
default:
result([FlutterError errorWithCode:#"invalid_source"
message:#"Invalid image source."
details:nil]);
break;
}
} else {
result(FlutterMethodNotImplemented);
}
}
- (void)showImageSourceSelector {
UIAlertControllerStyle style = UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad
? UIAlertControllerStyleAlert
: UIAlertControllerStyleActionSheet;
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil message:nil preferredStyle:style];
UIAlertAction *camera = [UIAlertAction actionWithTitle:#"Camera"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showCamera];
}];
UIAlertAction *library = [UIAlertAction actionWithTitle:#"Gallery"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
[self showPhotoLibrary];
}];
UIAlertAction *cancel =
[UIAlertAction actionWithTitle:#"Cancel" style:UIAlertActionStyleCancel handler:nil];
[alert addAction:camera];
[alert addAction:library];
[alert addAction:cancel];
[_viewController presentViewController:alert animated:YES completion:nil];
}
- (void)showCamera {
// Camera is not available on simulators
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
_imagePickerController.sourceType = UIImagePickerControllerSourceTypeCamera;
_imagePickerController.mediaTypes = [NSArray arrayWithObjects:#"public.movie", #"public.image", nil];
_imagePickerController.delegate = self;
_imagePickerController.restoresFocusAfterTransition = false;
_imagePickerController.allowsEditing = NO;
_imagePickerController.videoQuality = UIImagePickerControllerQualityTypeLow;
_imagePickerController.videoMaximumDuration = 30.0f; // 30 seconds
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
} else {
[[[UIAlertView alloc] initWithTitle:#"Error"
message:#"Camera not available."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
}
}
- (void)showPhotoLibrary {
// No need to check if SourceType is available. It always is.
_imagePickerController.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
_imagePickerController.mediaTypes = [NSArray arrayWithObjects:#"public.movie", #"public.image", nil];
//_imagePickerController.mediaTypes =[UIImagePickerController availableMediaTypesForSourceType:_imagePickerController.sourceType];
[_viewController presentViewController:_imagePickerController animated:YES completion:nil];
}
- (void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary<NSString *, id> *)info {
[_imagePickerController dismissViewControllerAnimated:YES completion:nil];
NSString *mediaType = [info objectForKey:UIImagePickerControllerMediaType];
if ([mediaType isEqualToString:#"public.movie"]) {
NSURL *videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
NSString *videoString = [videoURL absoluteString];
NSLog(#"Video File:%#", videoString);
_result(videoString);
} else {
UIImage *image = [info objectForKey:UIImagePickerControllerEditedImage];
if (image == nil) {
image = [info objectForKey:UIImagePickerControllerOriginalImage];
}
image = [self normalizedImage:image];
NSNumber *maxWidth = [_arguments objectForKey:#"maxWidth"];
NSNumber *maxHeight = [_arguments objectForKey:#"maxHeight"];
if (maxWidth != (id)[NSNull null] || maxHeight != (id)[NSNull null]) {
image = [self scaledImage:image maxWidth:maxWidth maxHeight:maxHeight];
}
NSData *data = UIImageJPEGRepresentation(image, 1.0);
NSString *tmpDirectory = NSTemporaryDirectory();
NSString *guid = [[NSProcessInfo processInfo] globallyUniqueString];
// TODO(jackson): Using the cache directory might be better than temporary
// directory.
NSString *tmpFile = [NSString stringWithFormat:#"image_picker_%#.jpg", guid];
NSString *tmpPath = [tmpDirectory stringByAppendingPathComponent:tmpFile];
NSLog(#"Image File:%#", tmpPath);
if ([[NSFileManager defaultManager] createFileAtPath:tmpPath contents:data attributes:nil]) {
_result(tmpPath);
} else {
_result([FlutterError errorWithCode:#"create_error"
message:#"Temporary file could not be created"
details:nil]);
}
_result = nil;
_arguments = nil;
}
_result = nil;
_arguments = nil;
}
// The way we save images to the tmp dir currently throws away all EXIF data
// (including the orientation of the image). That means, pics taken in portrait
// will not be orientated correctly as is. To avoid that, we rotate the actual
// image data.
// TODO(goderbauer): investigate how to preserve EXIF data.
- (UIImage *)normalizedImage:(UIImage *)image {
if (image.imageOrientation == UIImageOrientationUp) return image;
UIGraphicsBeginImageContextWithOptions(image.size, NO, image.scale);
[image drawInRect:(CGRect){0, 0, image.size}];
UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return normalizedImage;
}
//- (NSString *)normalVideo:(NSURL *)videoURL {
// NSString *normalVideo = UIImagePickerControllerMediaURL;
// return normalVideo;
//}
- (UIImage *)scaledImage:(UIImage *)image
maxWidth:(NSNumber *)maxWidth
maxHeight:(NSNumber *)maxHeight {
double originalWidth = image.size.width;
double originalHeight = image.size.height;
bool hasMaxWidth = maxWidth != (id)[NSNull null];
bool hasMaxHeight = maxHeight != (id)[NSNull null];
double width = hasMaxWidth ? MIN([maxWidth doubleValue], originalWidth) : originalWidth;
double height = hasMaxHeight ? MIN([maxHeight doubleValue], originalHeight) : originalHeight;
bool shouldDownscaleWidth = hasMaxWidth && [maxWidth doubleValue] < originalWidth;
bool shouldDownscaleHeight = hasMaxHeight && [maxHeight doubleValue] < originalHeight;
bool shouldDownscale = shouldDownscaleWidth || shouldDownscaleHeight;
if (shouldDownscale) {
double downscaledWidth = (height / originalHeight) * originalWidth;
double downscaledHeight = (width / originalWidth) * originalHeight;
if (width < height) {
if (!hasMaxWidth) {
width = downscaledWidth;
} else {
height = downscaledHeight;
}
} else if (height < width) {
if (!hasMaxHeight) {
height = downscaledHeight;
} else {
width = downscaledWidth;
}
} else {
if (originalWidth < originalHeight) {
width = downscaledWidth;
} else if (originalHeight < originalWidth) {
height = downscaledHeight;
}
}
}
UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), NO, 1.0);
[image drawInRect:CGRectMake(0, 0, width, height)];
UIImage *scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
#end
I would probably add another method pickVideo to image_picker.dart, and then add the corresponding Android and iOS implementations of that in imagePickerPlugin.m and ImagePickerPlugin.java.
Inside the latter two, I would use the iOS and Android APIs for videos, e.g. something like this on iOS:
Objective c - ios : How to pick video from Camera Roll?
as of version 0.4.2, the plugin allows video to be selected
Added support for picking videos. Updated example app to show video
preview.
You can do that now using pickVideo available in image_picker
final _picker = ImagePicker();
PickedFile video = await _picker.getVideo(...)
...
Ref - https://pub.dev/packages/image_picker
you can use the image picker for recording video, and chewie library to show a video via video controller.
for more reference use this video link -
https://www.youtube.com/watch?time_continue=17&v=XSn5EwWBG-4&feature=emb_logo
I'm running a thread to fetch drivers location every 10 seconds and want to remove the added markers from the map but it doesn't work..
My code:
-(void)APiResponse:(id)returnJson
{
[googleMapsDriverPin setMap:nil];
googleMapsDriverPin = nil;
NSMutableArray *driverPins = [[NSMutableArray alloc]init];
for (int x = 0; x < [[returnJson valueForKey:#"drivers"] count]; x++) {
CLLocation *driverLocations = [[CLLocation alloc]initWithLatitude:[[[[returnJson valueForKey:#"drivers"] objectAtIndex:x] valueForKey:#"driver_latitude"] doubleValue] longitude:[[[[detail valueForKey:#"drivers"] objectAtIndex:x] valueForKey:#"driver_longitude"] doubleValue]];
[driverPins addObject:driverLocations];
}
for (CLLocation *newLocation in driverPins) {
googleMapsDriverPin = [[GMSMarker alloc] init];
[googleMapsDriverPin setPosition:newLocation.coordinate];
[googleMapsDriverPin setAnimated:YES];
[googleMapsDriverPin setTitle:#"title"];
[googleMapsDriverPin setSnippet:#"snippet"];
[googleMapsDriverPin setIcon:[GMSMarker markerImageWithColor:[UIColor blackColor]]];
[googleMapsDriverPin setMap:googleMaps];
}
}
It just keeps adding and adding every 10 seconds and not removing, please help!
Thanks!
Its a kind of quick and dirty option but if you wanted to go that way GMSMarker has a userData property which you could use to tag the driver pins
- (void)apiResponse:(id)returnJson
{
for (GMSMarker *pin in self.googleMaps.markers) {
if (pin.userData == #"Driver Pin"){
pin.map = nil;
}
}
...
for (CLLocation *newLocation in driverPins) {
googleMapsDriverPin = [[GMSMarker alloc] init];
...
[googleMapsDriverPin setUserData:#"Driver Pin"];
}
}
Update:
[self.googleMapsView clear];
On the based on pin id you can also delete pin.
Here deletePinId integer is for selected pin id.
for(GMSMarker *pin in self.mapView_.markers) {
NSLog(#"pin.userData : %#",pin.userData);
int pinId1 = [[pin.userData valueForKey:#"pin_id"] integerValue];
if(deltePinId == pinId1 ){
pin.map = nil;
}
}
you currently only store ONE marker, but you want to add N markers -- so (as saxon said) you need an array to hold all the pins :)
#interface YouClass
...
#property(nonatomic, retain) NSMutableArray *googleMapsDriverPins;
#end
#implementation YourClass
...
-(void)APiResponse:(id)returnJson
{
for(GMSMarker *pin in self.googleMapsDriverPins) {
pin.map = nil;
}
self.googleMapsDriverPins = nil;
NSMutableArray *driverPins = [[NSMutableArray alloc]init];
for (int x = 0; x < [[returnJson valueForKey:#"drivers"] count]; x++) {
CLLocation *driverLocations = [[CLLocation alloc]initWithLatitude:[[[[returnJson valueForKey:#"drivers"] objectAtIndex:x] valueForKey:#"driver_latitude"] doubleValue] longitude:[[[[detail valueForKey:#"drivers"] objectAtIndex:x] valueForKey:#"driver_longitude"] doubleValue]];
[driverPins addObject:driverLocations];
}
self.googleMapsDriverPins = [NSMutableArray arrayWithCapacity:driverPins.count];
for (CLLocation *newLocation in driverPins) {
GMSMarker *googleMapsDriverPin = [[GMSMarker alloc] init];
[googleMapsDriverPin setPosition:newLocation.coordinate];
[googleMapsDriverPin setAnimated:YES];
[googleMapsDriverPin setTitle:#"title"];
[googleMapsDriverPin setSnippet:#"snippet"];
[googleMapsDriverPin setIcon:[GMSMarker markerImageWithColor:[UIColor blackColor]]];
[googleMapsDriverPin setMap:googleMaps];
[self.googleMapsDriverPins addObject:googleMapsDriverPin];
}
}
#end
It looks like you have a loop adding multiple drivers, each of which assigns to the member variable googleMapsDriverPin. Then next time it removes the googleMapsDriverPin - but that will only be the last pin you added, not all of them.
For this to work you would need to add each marker inside your loop to an array, and then remove all of them from the map on your next update.
In Swift 2:
Create an outlet for your map:
#IBOutlet weak var mapView: GMSMapView!
Create an array to store all markers
var markers = [GMSMarker]()
Create markers like this:
func funcName() {
let position = CLLocationCoordinate2DMake(lat, lon)
let marker = GMSMarker(position: position)
for pin: GMSMarker in self.markers {
if pin.userData as! String == "from" {
pin.map = nil
}
}
marker.icon = UIImage(named: "navigation-red")
marker.userData = "from"
marker.map = self.mapView
self.markers.append(marker)
}
You can set the userData property to anything you want and later on use that string to delete that marker.When the funcName function is executed, all markers with userData as "from" are removed from the map.Let me know if you have any queries.
assume that i have two sequential methods, methodA and methodB (methodB is called at the end of methodA) and i need to finish completely with the process of methodA before starting methodB :
-(void)methodA {
//do some very heavy computation here
[self methodB];
}
-(void)methodB {
//this method performs some animation
}
methodA requires alot of processing power and takes time to finish while methodB has animations. The animations are starting laggy at first and im guessing this has to do with the overhead of method A. So how can i make the second method start only after the first has finished completely?
this is the actual code:
the code is very messy and unorganized. there is alot of code that has been commented out so please ingore that. I test some things sometimes and then comment them out incase i need them later.
here is what is happening: it starts with (void)checkMoves method. This method calls either one of two similar methods which are
-(void)getMovesForJourney:(int)journey withRolls:(NSArray *)rolls or
-(void) getEntryMovesForJourney:(int)journey withRolls:(NSArray *)rolls
these methods are wrappers for a long recursive process (which is most probably where the multithreading is happening).
at the end of (void)checkMoves another method -(void)analyseMoves is called. This is also a heavy process.
and finally, at the end of -(void)analyseMoves the animation method -(void)move: is called. this last method is starting with a lag.
finally at the end of -(void)move: the process will start again until no more 'moves' are available.
the methods below are not in order so please refer to the description above to know whats going on.
-(void) checkMoves {
GameScene *game = [GameScene sharedGameScene];
CCLOG(#"start");
for (Stone *stone in stones) {
if (!stone.onBoard) {
if ([game EntryAvailable]) {
[self getEntryMovesForJourney:stone.journey withRolls:[game rollsAsIntegers]];
}
}
else {
[self getMovesForJourney:stone.journey withRolls:[game rollsAsIntegers]];
}
}
CCLOG(#"finish");
[self analyseMoves];
}
-(NSMutableArray *) otherPlayerLocations {
GameScene *game = [GameScene sharedGameScene];
Board *board = [game board];
Player *otherPlayer = [game playerOne];
NSMutableArray *locations = [[[NSMutableArray alloc] init] autorelease];
for (Stone *stone in otherPlayer.stones) {
if (stone.journey < 77) {
int location;
if (stone.onBoard)
location = [[board tileForATag:[self convertJourneyToTileTag:stone.journey]] bTag];
else
location = 0;
[locations addObject:[NSNumber numberWithInt:location]];
}
}
return locations;
}
-(void) analyseMoves {
if (moves.count > 0) {
NSMutableArray *killMoves = [[[NSMutableArray alloc] init] autorelease];
NSMutableArray *safeMoves = [[[NSMutableArray alloc] init] autorelease];
int mostThreatened;
float highestThreat = -1.0;
float lowestThreat = 100.0;
AIMove *safestMove;
//get kill and safe moves in seperate arrays
for (AIMove *move in moves) {
if (move.moveType == killMoveType)
[killMoves addObject:move];
else if (move.moveType == safeMoveType)
[safeMoves addObject:move];
// get highest threatened stone
//if (move.potThreat > highestThreat) {
// mostThreatened = move.journey;
// highestThreat = move.potThreat;
if (move.initThreat < lowestThreat) {
lowestThreat = move.initThreat;
safestMove = [move copy];
CCLOG(#"safest move assigned");
}
//}
}
//MOVE SELECTION:
//___________________________________
//choose best kill move
int killJourney;
if (killMoves.count > 0) {
//leave one move in killMoves with highest journey
killJourney = [[killMoves objectAtIndex:0] tileTag];
for (AIMove *killMove in killMoves) {
if (killMove.tileTag < killJourney)
[killMoves removeObject:killMove];
else
killJourney = killMove.tileTag;
}
}
//select correct move
if (killMoves.count > 0) {
[self move:[killMoves objectAtIndex:0]];
CCLOG(#"Kill move chosen");
}
else {
CCLOG(#"this is called!!!!");
CCLOG(#"safest move with initThreat: %i, potThreat: %i",safestMove.journey, safestMove.tileTag);
[self move:safestMove];
CCLOG(#"Success");
}
/*
else if (safeMoves.count > 0) {
[self move:[safeMoves objectAtIndex:0]];
CCLOG(#"safe move chosen");
}
else {
//temp random move generation
randomMove = CCRANDOM_0_1()*[moves count];
AIMove *move = [moves objectAtIndex:randomMove];
[self move:move];
CCLOG(#"Random move chosen");
}
*/
}
}
-(void) assignThreatLevel:(AIMove *)move {
NSArray *otherPlayerStones = [self otherPlayerLocations];
NSMutableArray *initThreats = [[NSMutableArray alloc] init];
NSMutableArray *potThreats = [[NSMutableArray alloc] initWithCapacity:4];
for (NSNumber *location in otherPlayerStones) {
//get threat levels for potential moves
if (move.tileTag > [location intValue]) {
int dif = move.tileTag - [location intValue];
CCLOG(#"dif: %i",dif);
//threat level conditions:
// 1 >> 5 = 70% of journey
// 6 >> 9 = 40% of journey
// 10 & 11 = 50% of journey
// 12 >> 24 = 20% of journey
// 25 && 26 = 50% of journey
// 27+ = 20% of journey
if (move.tileTag < 9) {
[initThreats addObject:[NSNumber numberWithFloat:0.0f]];
}
else if (dif >= 1 && dif <= 5) {
[initThreats addObject:[NSNumber numberWithFloat:k1to5]];
}
else if (dif >= 6 && dif <= 9) {
[initThreats addObject:[NSNumber numberWithFloat:k6to9]];
}
else if (dif == 10 || dif == 11) {
[initThreats addObject:[NSNumber numberWithFloat:k10to11]];
}
else if (dif >= 12 && dif <= 24) {
[initThreats addObject:[NSNumber numberWithFloat:k12to24]];
}
else if (dif == 25 || dif == 26) {
[initThreats addObject:[NSNumber numberWithFloat:k25to26]];
}
else if (dif > 26) {
[initThreats addObject:[NSNumber numberWithFloat:k27plus]];
}
//-------------------------------------
}
//get Threat levels for current positions
if (move.tileTag > [location intValue]) {
int dif = move.tileTag - [location intValue];
//threat level conditions:
// 1 >> 5 = 70% of journey
// 6 >> 9 = 40% of journey
// 10 & 11 = 50% of journey
// 12 >> 24 = 20% of journey
// 25 && 26 = 50% of journey
// 27+ = 20% of journey
if (move.journey < 8 || move.journey > 75)
[potThreats addObject:[NSNumber numberWithFloat:0.0f]];
else if (dif >= 1 && dif <= 5)
[potThreats addObject:[NSNumber numberWithFloat:k1to5]];
else if (dif >= 6 && dif <= 9)
[potThreats addObject:[NSNumber numberWithFloat:k6to9]];
else if (dif == 10 || dif == 11)
[potThreats addObject:[NSNumber numberWithFloat:k10to11]];
else if (dif >= 12 && dif <= 24)
[potThreats addObject:[NSNumber numberWithFloat:k12to24]];
else if (dif == 25 || dif == 26)
[potThreats addObject:[NSNumber numberWithFloat:k25to26]];
else if (dif > 26)
[potThreats addObject:[NSNumber numberWithFloat:k27plus]];
//-------------------------------------
}
}
float initThreat = 0.0f;
float potThreat = 0.0f;
for (NSNumber *number in initThreats) {
if ([number floatValue] > initThreat)
initThreat = [number floatValue];
}
for (NSNumber *number in potThreats) {
if ([number floatValue] > potThreat)
potThreat = [number floatValue];
}
move.initThreat = initThreat;
move.potThreat = potThreat;
[initThreats release];
}
-(void) move:(AIMove *)move {
CCLOG(#"Moves count: %i",[moves count]);
if ([moves count] > 0) {
BOOL isOtherStoneOnPreviousTile = NO;
int total;
if (move.tileTag > 8)
total = move.tileTag - move.journey;
else if (move.tileTag < 9 && move.journey > 8)
total = ((9-move.tileTag)+75) - move.journey;
else if (move.tileTag < 9)
total = move.tileTag - move.journey;
CCLOG(#"Total: %i",total);
GameScene *game = [GameScene sharedGameScene];
Board *board = [game board];
BoardTile *tile = [[game board] tileForBTag:move.tileTag];
CCSequence *sequence;
NSMutableArray *actions = [[[NSMutableArray alloc] init] autorelease];
id delay = [CCDelayTime actionWithDuration:0.5];
[actions addObject:delay];
if (move.journey > 0) {
for (int i = 1; i < total+1; i++) {
if (move.journey+i < 76)
tile = [board tileForBTag:move.journey+i];
else
tile = [board tileForBTag:9-((move.journey+i)-75)];
id moveAction = [CCMoveTo actionWithDuration:0.2 position:tile.position];
[actions addObject:moveAction];
}
}
else {
id moveAction = [CCMoveTo actionWithDuration:0.2 position:tile.position];
[actions addObject:moveAction];
}
// id moveFunc = [CCCallFunc actionWithTarget:self selector:#selector(moveMotion)];
//id moveAction = [CCMoveTo actionWithDuration:0.3 position:tile.position];
id killFunc = [CCCallFuncND actionWithTarget:self selector:#selector(killStonesForTile:data:) data:tile];
//id callfunc = [CCCallFunc actionWithTarget:self selector:#selector(move)];
[actions addObject:killFunc];
//[actions addObject:callfunc];
sequence = [CCSequence actionMutableArray:actions];
[actions removeAllObjects];
CGPoint exitPos = ccp(exitPosition.x-(completeStones*30),exitPosition.y-(completeStones*30));
id move2 = [CCMoveTo actionWithDuration:0.2f position:exitPos];
id sequence2 = [CCSequence actions:move2, nil];
Stone *stone = [self getStoneForJourney:move.journey];
//-
//------------------------------------------
//clean tracks
for (Stone *stone in stones) {
if (stone.journey == (move.tileTag - move.journey))
isOtherStoneOnPreviousTile = YES;
}
if (!isOtherStoneOnPreviousTile) {
BoardTile *prevTile = [board tileForBTag:[self convertJourneyToTileTag:move.journey]];
prevTile.occupiedBy = 0;
}
//===========================================
//-------------------------------------------
//set stone status
if (move.tileTag < 9 && stone.crossedEntry)
stone.journey = (9 - move.tileTag) + 75;
else
stone.journey = move.tileTag;
stone.onBoard = YES;
tile.occupiedBy = player2;
if (stone.journey > 8 && !stone.crossedEntry)
stone.crossedEntry = YES;
//============================================
if (stone.journey < 84)
[stone runAction:sequence];
else {
[stone runAction:sequence2];
completeStones++;
stone.isJourneyComplete = YES;
}
CCLOG(#"Stone Journey:%i",stone.journey);
NSArray *rollTypesArray = [move rollTypes];
[self removeRollTypes:rollTypesArray];
[moves removeAllObjects];
[game updateRollResults];
[self updateMoveAvailability];
}
else {
GameScene *game = [GameScene sharedGameScene];
[moves removeAllObjects];
[game nextTurn];
[game updateRollResults];
}
}
-(Stone *)getStoneForJourney:(int)journey {
Stone *theStone;
for (Stone *stone in stones) {
if (stone.journey == journey)
theStone = stone;
}
return theStone;
}
-(void)dealloc {
[moves release];
[rollTypes release];
[results release];
[super dealloc];
}
-(void)killStonesForTile:(id)sender data:(BoardTile *)tile {
GameScene *game = [GameScene sharedGameScene];
int tileTag;
Player *otherPlayer;
if (playerNumber == player1) {
tileTag = tile.aTag;
otherPlayer = [game playerTwo];
}
else {
tileTag = tile.bTag;
otherPlayer = [game playerOne];
}
CCArray *currentStones = [otherPlayer stones];
for (Stone *stone in currentStones) {
if (!stone.isJourneyComplete) {
int journey = stone.journey;
if (tileTag == tile.aTag) {
if (journey > 0 && [self convertJourneyToTileTag:journey] == tile.bTag) {
CCLOG(#"blue stone killed");
[self returnStoneToOrigin:stone];
}
}
if (tileTag == tile.bTag) {
if (journey > 0 && [self convertJourneyToTileTag:journey] == tile.aTag) {
CCLOG(#"gold stone killed");
[self returnStoneToOrigin:stone];
}
}
}
}
}
-(void)removeRollTypes:(NSArray *)theRollTypes {
GameScene *game = [GameScene sharedGameScene];
NSMutableArray *rolls = [game rolls];
for (NSNumber *roll in theRollTypes) {
NSUInteger index = [rolls indexOfObject:[game convertIntToRoll:roll]];
CCLOG(#"rolltypes count: %i",[theRollTypes count]);
CCLOG(#"roll integer: %i",[roll intValue]);
[rolls removeObjectAtIndex:index];
}
}
#pragma mark -
#pragma mark enumerations
- (NSArray*)getSums:(NSArray*)numbers {
NSMutableArray *result = [self getSumsHelper:numbers startingFrom:0];
[result removeObjectAtIndex:0];
return result;
}
- (NSMutableArray*)getSumsHelper:(NSArray*)numbers startingFrom:(NSUInteger)index {
/* (1) */
if (index >= [numbers count])
return [NSMutableArray arrayWithObject:[NSNumber numberWithFloat:0]];
/* (2) Generate all the subsets where the `index`th element is not included */
NSMutableArray* result = [self getSumsHelper:numbers startingFrom:index+1];
// NSSortDescriptor *mySorter = [[NSSortDescriptor alloc] initWithKey:#"floatValue" ascending:YES];
// [result sortUsingDescriptors:[NSArray arrayWithObject:mySorter]];
/* (3) Add all the cases where the `index`th element is included */
NSUInteger i, n = [result count];
float element = [[numbers objectAtIndex:index] floatValue];
for (i = 0; i < n; i++) {
float element2 = [[result objectAtIndex:i] floatValue];
float sum = element+element2;
BOOL numberPresent = NO;
for (NSNumber *number in result) {
if ([number floatValue] == sum)
numberPresent = YES;
}
if (!numberPresent)
[result addObject:[NSNumber numberWithFloat:sum]];
}
return result;
}
-(NSArray *) getCombsforNumbers:(NSArray *)numbers withTarget:(int)target{
NSMutableArray *partial = [[NSMutableArray alloc] init];
[partial addObject:[NSNumber numberWithInt:0]];
[results removeAllObjects];
NSArray *result = [self getCombsHelper:numbers target:target partial:partial];
NSUInteger minCount = [[result objectAtIndex:0] count];
NSUInteger index = 0;
NSMutableArray *combArray = [result objectAtIndex:0];
for (NSMutableArray *array in result) {
if ([array count] < minCount) {
minCount = [array count];
index = [result indexOfObject:array];
combArray = array;
}
}
//remove number 0 from array
[combArray removeObject:[NSNumber numberWithInt:0]];
return combArray;
}
-(NSMutableArray *) getCombsHelper:(NSArray *)numbers target:(int)target partial:(NSMutableArray *)partial {
int s = 0;
for (NSNumber *number in partial) {
s += [number intValue];
}
if (s == target) {
[results addObject:partial];
}
if (s >= target) {
return results;
}
for (int i = 0; i < [numbers count]; i++) {
NSMutableArray *remaining = [[[NSMutableArray alloc] init] autorelease];
int n = [[numbers objectAtIndex:i] intValue];
for (int j = i+1; j<[numbers count]; j++) {
[remaining addObject:[numbers objectAtIndex:j]];
}
NSMutableArray *partialRec = [[[NSMutableArray alloc] init] autorelease];
[partialRec addObjectsFromArray:partial];
[partialRec addObject:[NSNumber numberWithInt:n]];
[self getCombsHelper:remaining target:target partial:partialRec];
}
return results;
}
-(void) getMovesForJourney:(int)journey withRolls:(NSArray *)rolls {
GameScene *game = [GameScene sharedGameScene];
Board *board = [game board];
NSArray *sums = [self getSums:rolls];
for (NSNumber *number in sums) {
if ([number intValue]+journey <= 84) {
BoardTile *tile = [board tileForBTag:[self convertJourneyToTileTag:journey+[number intValue]]];
if (tile.isSafeTile) {
if (tile.occupiedBy != player1) {
AIMove *move = [AIMove moveWithJourney:journey tileTag:tile.bTag];
NSArray *theRollTypes = [[self getCombsforNumbers:rolls withTarget:[number intValue]] copy];
//Checking rolltypes, remove later
NSLog(#"%i = ",[number intValue]);
for (NSNumber *comb in theRollTypes) {
NSLog(#"%i",[comb intValue]);
}
NSLog(#"-----------");
//----------------------------------
move.moveType = safeMoveType;
move.initThreat = 0.0;
CCLOG(#"move initThreat: %f",move.initThreat);
[move setRollTypes:theRollTypes];
[moves addObject:move];
}
}
else {
AIMove *move = [AIMove moveWithJourney:journey tileTag:tile.bTag];
NSArray *theRollTypes = [[self getCombsforNumbers:rolls withTarget:[number intValue]] copy];
//Checking rolltypes, remove later
NSLog(#"%i = ",[number intValue]);
for (NSNumber *comb in theRollTypes) {
NSLog(#"%i",[comb intValue]);
}
NSLog(#"-----------");
//-----------------------------------
[move setRollTypes:theRollTypes];
//assing threat level
[self assignThreatLevel:move];
CCLOG(#"move initThreat: %f",move.initThreat);
//check for kill move
NSArray *otherPlayerPositions = (NSArray *)[self otherPlayerLocations];
for (NSNumber *location in otherPlayerPositions) {
if (move.tileTag == [location intValue])
move.moveType = killMoveType;
}
[moves addObject:move];
}
}
//int i = [number intValue];
//NSArray *combs = [self getCombsforNumbers:numbers withTarget:i];
}
}
-(void) getEntryMovesForJourney:(int)journey withRolls:(NSArray *)rolls {
GameScene *game = [GameScene sharedGameScene];
Board *board = [game board];
NSArray *sums = [self getSums:rolls];
for (NSNumber *number in sums) {
if ([number intValue]+journey <= 84) {
BoardTile *tile = [board tileForBTag:[self convertJourneyToTileTag:journey+[number intValue]]];
if (tile.isSafeTile) {
if (tile.occupiedBy != player1) {
NSArray *theRollTypes = [[self getCombsforNumbers:rolls withTarget:[number intValue]] copy];
BOOL containsEntry = NO;
for (NSNumber *rollType in theRollTypes) {
if ([rollType intValue] == 1) {
containsEntry = YES;
}
}
if (containsEntry) {
AIMove *move = [AIMove moveWithJourney:journey tileTag:tile.bTag];
//Checking rolltypes, remove later
NSLog(#"%i = ",[number intValue]);
for (NSNumber *comb in theRollTypes) {
NSLog(#"%i",[comb intValue]);
}
NSLog(#"-----------");
move.moveType = safeMoveType;
move.initThreat = 0.0;
CCLOG(#"move initThreat: %f",move.initThreat);
[move setRollTypes:theRollTypes];
[moves addObject:move];
//----------------------------------
}
}
}
else {
NSArray *theRollTypes = [[self getCombsforNumbers:rolls withTarget:[number intValue]] copy];
BOOL containsEntry = NO;
for (NSNumber *rollType in theRollTypes) {
if ([rollType intValue] == 1) {
containsEntry = YES;
}
}
if (containsEntry) {
AIMove *move = [AIMove moveWithJourney:journey tileTag:tile.bTag];
[move setRollTypes:theRollTypes];
//check for kill move
NSArray *otherPlayerPositions = (NSArray *)[self otherPlayerLocations];
for (NSNumber *location in otherPlayerPositions) {
if (move.tileTag == [location intValue])
move.moveType = killMoveType;
}
//assing threat level
[self assignThreatLevel:move];
[moves addObject:move];
//Checking rolltypes, remove later
NSLog(#"%i = ",[number intValue]);
for (NSNumber *comb in theRollTypes) {
NSLog(#"%i",[comb intValue]);
}
NSLog(#"-----------");
CCLOG(#"move initThreat: %f",move.initThreat);
[move setRollTypes:theRollTypes];
[moves addObject:move];
//----------------------------------
}
}
}
}
}
Technically, methodB is called after methodA is finished. In methodA you are apparently doing things that are backgrounded, meaning that the method finishes while some tasks are performed on another thread (or scheduled on the runloop). There's nothing we can say about this unless you do share the inner workings of methodA.
I reading song from iPod library using assetUrl (in code it named audioUrl)
I can play it many ways, I can cut it, I can make some precessing with this but...
I really don't understand what I gonna do with this CMSampleBufferRef to get data for drawing waveform! I need info about peak values, how I can get it this (maybe another) way?
AVAssetTrack * songTrack = [audioUrl.tracks objectAtIndex:0];
AVAssetReaderTrackOutput * output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:nil];
[reader addOutput:output];
[output release];
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput =
(AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){/* what I gonna do with this? */}
Please help me!
I was searching for a similar thing and decided to "roll my own."
I realize this is an old post, but in case anyone else is in search of this, here is my solution. it is relatively quick and dirty and normalizes the image to "full scale".
the images it creates are "wide" ie you need to put them in a UIScrollView or otherwise manage the display.
this is based on some answers given to this question
Sample Output
EDIT: I have added a logarithmic version of the averaging and render methods, see the end of this message for the alternate version & comparison outputs. I personally prefer the original linear version, but have decided to post it, in case someone can improve on the algorithm used.
You'll need these imports:
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
First, a generic rendering method that takes a pointer to averaged sample data,
and returns a UIImage. Note these samples are not playable audio samples.
-(UIImage *) audioImageGraph:(SInt16 *) samples
normalizeMax:(SInt16) normalizeMax
sampleCount:(NSInteger) sampleCount
channelCount:(NSInteger) channelCount
imageHeight:(float) imageHeight {
CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;
CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];
CGContextFillRect(context, rect);
CGContextSetLineWidth(context, 1.0);
float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ;
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (float) normalizeMax;
for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
SInt16 left = *samples++;
float pixels = (float) left;
pixels *= sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerLeft-pixels);
CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
CGContextSetStrokeColorWithColor(context, leftcolor);
CGContextStrokePath(context);
if (channelCount==2) {
SInt16 right = *samples++;
float pixels = (float) right;
pixels *= sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerRight - pixels);
CGContextAddLineToPoint(context, intSample, centerRight + pixels);
CGContextSetStrokeColorWithColor(context, rightcolor);
CGContextStrokePath(context);
}
}
// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
// Tidy up
UIGraphicsEndImageContext();
return newImage;
}
Next, a method that takes a AVURLAsset, and returns PNG image data
- (NSData *) renderPNGAudioPictogramForAsset:(AVURLAsset *)songAsset {
NSError * error = nil;
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];
NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
// [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
// [NSNumber numberWithInt: 2],AVNumberOfChannelsKey, /*Not Supported*/
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];
[reader addOutput:output];
[output release];
UInt32 sampleRate,channelCount;
NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
if(fmtDesc ) {
sampleRate = fmtDesc->mSampleRate;
channelCount = fmtDesc->mChannelsPerFrame;
// NSLog(#"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
}
}
UInt32 bytesPerSample = 2 * channelCount;
SInt16 normalizeMax = 0;
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
UInt64 totalBytes = 0;
SInt64 totalLeft = 0;
SInt64 totalRight = 0;
NSInteger sampleTally = 0;
NSInteger samplesPerPixel = sampleRate / 50;
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
totalBytes += length;
NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];
NSMutableData * data = [NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
SInt16 * samples = (SInt16 *) data.mutableBytes;
int sampleCount = length / bytesPerSample;
for (int i = 0; i < sampleCount ; i ++) {
SInt16 left = *samples++;
totalLeft += left;
SInt16 right;
if (channelCount==2) {
right = *samples++;
totalRight += right;
}
sampleTally++;
if (sampleTally > samplesPerPixel) {
left = totalLeft / sampleTally;
SInt16 fix = abs(left);
if (fix > normalizeMax) {
normalizeMax = fix;
}
[fullSongData appendBytes:&left length:sizeof(left)];
if (channelCount==2) {
right = totalRight / sampleTally;
SInt16 fix = abs(right);
if (fix > normalizeMax) {
normalizeMax = fix;
}
[fullSongData appendBytes:&right length:sizeof(right)];
}
totalLeft = 0;
totalRight = 0;
sampleTally = 0;
}
}
[wader drain];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
NSData * finalData = nil;
if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
// Something went wrong. return nil
return nil;
}
if (reader.status == AVAssetReaderStatusCompleted){
NSLog(#"rendering output graphics using normalizeMax %d",normalizeMax);
UIImage *test = [self audioImageGraph:(SInt16 *)
fullSongData.bytes
normalizeMax:normalizeMax
sampleCount:fullSongData.length / 4
channelCount:2
imageHeight:100];
finalData = imageToData(test);
}
[fullSongData release];
[reader release];
return finalData;
}
Advanced Option:
Finally, if you want to be able to play the audio using AVAudioPlayer, you'll need to cache
it to your apps's bundle cache folder. Since I was doing that, i decided to cache the image data
also, and wrapped the whole thing into a UIImage category. you need to include this open source offering to extract the audio, and some code from here to handle some background threading features.
first, some defines, and a few generic class methods for handling path names etc
//#define imgExt #"jpg"
//#define imageToData(x) UIImageJPEGRepresentation(x,4)
#define imgExt #"png"
#define imageToData(x) UIImagePNGRepresentation(x)
+ (NSString *) assetCacheFolder {
NSArray *assetFolderRoot = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
return [NSString stringWithFormat:#"%#/audio", [assetFolderRoot objectAtIndex:0]];
}
+ (NSString *) cachedAudioPictogramPathForMPMediaItem:(MPMediaItem*) item {
NSString *assetFolder = [[self class] assetCacheFolder];
NSNumber * libraryId = [item valueForProperty:MPMediaItemPropertyPersistentID];
NSString *assetPictogramFilename = [NSString stringWithFormat:#"asset_%#.%#",libraryId,imgExt];
return [NSString stringWithFormat:#"%#/%#", assetFolder, assetPictogramFilename];
}
+ (NSString *) cachedAudioFilepathForMPMediaItem:(MPMediaItem*) item {
NSString *assetFolder = [[self class] assetCacheFolder];
NSURL * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
NSNumber * libraryId = [item valueForProperty:MPMediaItemPropertyPersistentID];
NSString *assetFileExt = [[[assetURL path] lastPathComponent] pathExtension];
NSString *assetFilename = [NSString stringWithFormat:#"asset_%#.%#",libraryId,assetFileExt];
return [NSString stringWithFormat:#"%#/%#", assetFolder, assetFilename];
}
+ (NSURL *) cachedAudioURLForMPMediaItem:(MPMediaItem*) item {
NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];
return [NSURL fileURLWithPath:assetFilepath];
}
Now the init method that does "the business"
- (id) initWithMPMediaItem:(MPMediaItem*) item
completionBlock:(void (^)(UIImage* delayedImagePreparation))completionBlock {
NSFileManager *fman = [NSFileManager defaultManager];
NSString *assetPictogramFilepath = [[self class] cachedAudioPictogramPathForMPMediaItem:item];
if ([fman fileExistsAtPath:assetPictogramFilepath]) {
NSLog(#"Returning cached waveform pictogram: %#",[assetPictogramFilepath lastPathComponent]);
self = [self initWithContentsOfFile:assetPictogramFilepath];
return self;
}
NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];
NSURL *assetFileURL = [NSURL fileURLWithPath:assetFilepath];
if ([fman fileExistsAtPath:assetFilepath]) {
NSLog(#"scanning cached audio data to create UIImage file: %#",[assetFilepath lastPathComponent]);
[assetFileURL retain];
[assetPictogramFilepath retain];
[NSThread MCSM_performBlockInBackground: ^{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
NSData *waveFormData = [self renderPNGAudioPictogramForAsset:asset];
[waveFormData writeToFile:assetPictogramFilepath atomically:YES];
[assetFileURL release];
[assetPictogramFilepath release];
if (completionBlock) {
[waveFormData retain];
[NSThread MCSM_performBlockOnMainThread:^{
UIImage *result = [UIImage imageWithData:waveFormData];
NSLog(#"returning rendered pictogram on main thread (%d bytes %# data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);
completionBlock(result);
[waveFormData release];
}];
}
}];
return nil;
} else {
NSString *assetFolder = [[self class] assetCacheFolder];
[fman createDirectoryAtPath:assetFolder withIntermediateDirectories:YES attributes:nil error:nil];
NSLog(#"Preparing to import audio asset data %#",[assetFilepath lastPathComponent]);
[assetPictogramFilepath retain];
[assetFileURL retain];
TSLibraryImport* import = [[TSLibraryImport alloc] init];
NSURL * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
[import importAsset:assetURL toURL:assetFileURL completionBlock:^(TSLibraryImport* import) {
//check the status and error properties of
//TSLibraryImport
if (import.error) {
NSLog (#"audio data import failed:%#",import.error);
} else{
NSLog (#"Creating waveform pictogram file: %#", [assetPictogramFilepath lastPathComponent]);
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
NSData *waveFormData = [self renderPNGAudioPictogramForAsset:asset];
[waveFormData writeToFile:assetPictogramFilepath atomically:YES];
if (completionBlock) {
[waveFormData retain];
[NSThread MCSM_performBlockOnMainThread:^{
UIImage *result = [UIImage imageWithData:waveFormData];
NSLog(#"returning rendered pictogram on main thread (%d bytes %# data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);
completionBlock(result);
[waveFormData release];
}];
}
}
[assetPictogramFilepath release];
[assetFileURL release];
} ];
return nil;
}
}
An example of invoking this :
-(void) importMediaItem {
MPMediaItem* item = [self mediaItem];
// since we will be needing this for playback, save the url to the cached audio.
[url release];
url = [[UIImage cachedAudioURLForMPMediaItem:item] retain];
[waveFormImage release];
waveFormImage = [[UIImage alloc ] initWithMPMediaItem:item completionBlock:^(UIImage* delayedImagePreparation){
waveFormImage = [delayedImagePreparation retain];
[self displayWaveFormImage];
}];
if (waveFormImage) {
[waveFormImage retain];
[self displayWaveFormImage];
}
}
Logarithmic version of averaging and render methods
#define absX(x) (x<0?0-x:x)
#define minMaxX(x,mn,mx) (x<=mn?mn:(x>=mx?mx:x))
#define noiseFloor (-90.0)
#define decibel(amplitude) (20.0 * log10(absX(amplitude)/32767.0))
-(UIImage *) audioImageLogGraph:(Float32 *) samples
normalizeMax:(Float32) normalizeMax
sampleCount:(NSInteger) sampleCount
channelCount:(NSInteger) channelCount
imageHeight:(float) imageHeight {
CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;
CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];
CGContextFillRect(context, rect);
CGContextSetLineWidth(context, 1.0);
float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ;
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (normalizeMax - noiseFloor) / 2;
for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
Float32 left = *samples++;
float pixels = (left - noiseFloor) * sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerLeft-pixels);
CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
CGContextSetStrokeColorWithColor(context, leftcolor);
CGContextStrokePath(context);
if (channelCount==2) {
Float32 right = *samples++;
float pixels = (right - noiseFloor) * sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerRight - pixels);
CGContextAddLineToPoint(context, intSample, centerRight + pixels);
CGContextSetStrokeColorWithColor(context, rightcolor);
CGContextStrokePath(context);
}
}
// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
// Tidy up
UIGraphicsEndImageContext();
return newImage;
}
- (NSData *) renderPNGAudioPictogramLogForAsset:(AVURLAsset *)songAsset {
NSError * error = nil;
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];
NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
// [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
// [NSNumber numberWithInt: 2],AVNumberOfChannelsKey, /*Not Supported*/
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];
[reader addOutput:output];
[output release];
UInt32 sampleRate,channelCount;
NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
if(fmtDesc ) {
sampleRate = fmtDesc->mSampleRate;
channelCount = fmtDesc->mChannelsPerFrame;
// NSLog(#"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
}
}
UInt32 bytesPerSample = 2 * channelCount;
Float32 normalizeMax = noiseFloor;
NSLog(#"normalizeMax = %f",normalizeMax);
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
UInt64 totalBytes = 0;
Float64 totalLeft = 0;
Float64 totalRight = 0;
Float32 sampleTally = 0;
NSInteger samplesPerPixel = sampleRate / 50;
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
totalBytes += length;
NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];
NSMutableData * data = [NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
SInt16 * samples = (SInt16 *) data.mutableBytes;
int sampleCount = length / bytesPerSample;
for (int i = 0; i < sampleCount ; i ++) {
Float32 left = (Float32) *samples++;
left = decibel(left);
left = minMaxX(left,noiseFloor,0);
totalLeft += left;
Float32 right;
if (channelCount==2) {
right = (Float32) *samples++;
right = decibel(right);
right = minMaxX(right,noiseFloor,0);
totalRight += right;
}
sampleTally++;
if (sampleTally > samplesPerPixel) {
left = totalLeft / sampleTally;
if (left > normalizeMax) {
normalizeMax = left;
}
// NSLog(#"left average = %f, normalizeMax = %f",left,normalizeMax);
[fullSongData appendBytes:&left length:sizeof(left)];
if (channelCount==2) {
right = totalRight / sampleTally;
if (right > normalizeMax) {
normalizeMax = right;
}
[fullSongData appendBytes:&right length:sizeof(right)];
}
totalLeft = 0;
totalRight = 0;
sampleTally = 0;
}
}
[wader drain];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
NSData * finalData = nil;
if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
// Something went wrong. Handle it.
}
if (reader.status == AVAssetReaderStatusCompleted){
// You're done. It worked.
NSLog(#"rendering output graphics using normalizeMax %f",normalizeMax);
UIImage *test = [self audioImageLogGraph:(Float32 *) fullSongData.bytes
normalizeMax:normalizeMax
sampleCount:fullSongData.length / (sizeof(Float32) * 2)
channelCount:2
imageHeight:100];
finalData = imageToData(test);
}
[fullSongData release];
[reader release];
return finalData;
}
comparison outputs
Linear plot for start of "Warm It Up" by Acme Swing Company
Logarithmic plot for start of "Warm It Up" by Acme Swing Company
You should be able to get a buffer of audio from your sampleBuffRef and then iterate through those values to build your waveform:
CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer( sampleBufferRef );
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(sampleBufferRef);
AudioBufferList audioBufferList;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBufferRef,
NULL,
&audioBufferList,
sizeof(audioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&buffer
);
// this copies your audio out to a temp buffer but you should be able to iterate through this buffer instead
SInt32* readBuffer = (SInt32 *)malloc(numSamplesInBuffer * sizeof(SInt32));
memcpy( readBuffer, audioBufferList.mBuffers[0].mData, numSamplesInBuffer*sizeof(SInt32));
Another approach using Swift 5 and using AVAudioFile:
///Gets the audio file from an URL, downsaples and draws into the sound layer.
func drawSoundWave(fromURL url:URL, fromPosition:Int64, totalSeconds:UInt32, samplesSecond:CGFloat) throws{
print("\(logClassName) Drawing sound from \(url)")
do{
waveViewInfo.samplesSeconds = samplesSecond
//Get audio file and format from URL
let audioFile = try AVAudioFile(forReading: url)
waveViewInfo.format = audioFile.processingFormat
audioFile.framePosition = fromPosition * Int64(waveViewInfo.format.sampleRate)
//Getting the buffer
let frameCapacity:UInt32 = totalSeconds * UInt32(waveViewInfo.format.sampleRate)
guard let audioPCMBuffer = AVAudioPCMBuffer(pcmFormat: waveViewInfo.format, frameCapacity: frameCapacity) else{ throw AppError("Unable to get the AVAudioPCMBuffer") }
try audioFile.read(into: audioPCMBuffer, frameCount: frameCapacity)
let audioPCMBufferFloatValues:[Float] = Array(UnsafeBufferPointer(start: audioPCMBuffer.floatChannelData?.pointee,
count: Int(audioPCMBuffer.frameLength)))
waveViewInfo.points = []
waveViewInfo.maxValue = 0
for index in stride(from: 0, to: audioPCMBufferFloatValues.count, by: Int(audioFile.fileFormat.sampleRate) / Int(waveViewInfo.samplesSeconds)){
let aSample = CGFloat(audioPCMBufferFloatValues[index])
waveViewInfo.points.append(aSample)
let fix = abs(aSample)
if fix > waveViewInfo.maxValue{
waveViewInfo.maxValue = fix
}
}
print("\(logClassName) Finished the points - Count = \(waveViewInfo.points.count) / Max = \(waveViewInfo.maxValue)")
populateSoundImageView(with: waveViewInfo)
}
catch{
throw error
}
}
///Converts the sound wave in to a UIImage
func populateSoundImageView(with waveViewInfo:WaveViewInfo){
let imageSize:CGSize = CGSize(width: CGFloat(waveViewInfo.points.count),//CGFloat(waveViewInfo.points.count) * waveViewInfo.sampleSpace,
height: frame.height)
let drawingRect = CGRect(origin: .zero, size: imageSize)
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0)
defer {
UIGraphicsEndImageContext()
}
print("\(logClassName) Converting sound view in rect \(drawingRect)")
guard let context:CGContext = UIGraphicsGetCurrentContext() else{ return }
context.setFillColor(waveViewInfo.backgroundColor.cgColor)
context.setAlpha(1.0)
context.fill(drawingRect)
context.setLineWidth(1.0)
// context.setLineWidth(waveViewInfo.lineWidth)
let sampleAdjustFactor = imageSize.height / waveViewInfo.maxValue
for pointIndex in waveViewInfo.points.indices{
let pixel = waveViewInfo.points[pointIndex] * sampleAdjustFactor
context.move(to: CGPoint(x: CGFloat(pointIndex), y: middleY - pixel))
context.addLine(to: CGPoint(x: CGFloat(pointIndex), y: middleY + pixel))
context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
context.strokePath()
}
// for pointIndex in waveViewInfo.points.indices{
//
// let pixel = waveViewInfo.points[pointIndex] * sampleAdjustFactor
//
// context.move(to: CGPoint(x: CGFloat(pointIndex) * waveViewInfo.sampleSpace, y: middleY - pixel))
// context.addLine(to: CGPoint(x: CGFloat(pointIndex) * waveViewInfo.sampleSpace, y: middleY + pixel))
//
// context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
// context.strokePath()
//
// }
// var xIncrement:CGFloat = 0
// for point in waveViewInfo.points{
//
// let normalizedPoint = point * sampleAdjustFactor
//
// context.move(to: CGPoint(x: xIncrement, y: middleY - normalizedPoint))
// context.addLine(to: CGPoint(x: xIncrement, y: middleX + normalizedPoint))
// context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
// context.strokePath()
//
// xIncrement += waveViewInfo.sampleSpace
//
// }
guard let soundWaveImage = UIGraphicsGetImageFromCurrentImageContext() else{ return }
soundWaveImageView.image = soundWaveImage
// //In case of handling sample space in for
// updateWidthConstraintValue(soundWaveImage.size.width)
updateWidthConstraintValue(soundWaveImage.size.width * waveViewInfo.sampleSpace)
}
WHERE
class WaveViewInfo {
var format:AVAudioFormat!
var samplesSeconds:CGFloat = 50
var lineWidth:CGFloat = 0.20
var sampleSpace:CGFloat = 0.20
var strokeColor:UIColor = .red
var backgroundColor:UIColor = .clear
var maxValue:CGFloat = 0
var points:[CGFloat] = [CGFloat]()
}
At the moment only prints one sound wave but it can be extended. The good part is that you can print an audio track by parts
A little bit refactoring from the above answers (using AVAudioFile)
import AVFoundation
import CoreGraphics
import Foundation
import UIKit
class WaveGenerator {
private func readBuffer(_ audioUrl: URL) -> UnsafeBufferPointer<Float> {
let file = try! AVAudioFile(forReading: audioUrl)
let audioFormat = file.processingFormat
let audioFrameCount = UInt32(file.length)
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount)
else { return UnsafeBufferPointer<Float>(_empty: ()) }
do {
try file.read(into: buffer)
} catch {
print(error)
}
// let floatArray = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength)))
let floatArray = UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength))
return floatArray
}
private func generateWaveImage(
_ samples: UnsafeBufferPointer<Float>,
_ imageSize: CGSize,
_ strokeColor: UIColor,
_ backgroundColor: UIColor
) -> UIImage? {
let drawingRect = CGRect(origin: .zero, size: imageSize)
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0)
let middleY = imageSize.height / 2
guard let context: CGContext = UIGraphicsGetCurrentContext() else { return nil }
context.setFillColor(backgroundColor.cgColor)
context.setAlpha(1.0)
context.fill(drawingRect)
context.setLineWidth(0.25)
let max: CGFloat = CGFloat(samples.max() ?? 0)
let heightNormalizationFactor = imageSize.height / max / 2
let widthNormalizationFactor = imageSize.width / CGFloat(samples.count)
for index in 0 ..< samples.count {
let pixel = CGFloat(samples[index]) * heightNormalizationFactor
let x = CGFloat(index) * widthNormalizationFactor
context.move(to: CGPoint(x: x, y: middleY - pixel))
context.addLine(to: CGPoint(x: x, y: middleY + pixel))
context.setStrokeColor(strokeColor.cgColor)
context.strokePath()
}
guard let soundWaveImage = UIGraphicsGetImageFromCurrentImageContext() else { return nil }
UIGraphicsEndImageContext()
return soundWaveImage
}
func generateWaveImage(from audioUrl: URL, in imageSize: CGSize) -> UIImage? {
let samples = readBuffer(audioUrl)
let img = generateWaveImage(samples, imageSize, UIColor.blue, UIColor.white)
return img
}
}
Usage
let url = Bundle.main.url(forResource: "TEST1.mp3", withExtension: "")!
let img = waveGenerator.generateWaveImage(from: url, in: CGSize(width: 600, height: 200))
Is there a way to get the visible part of text in word wrapped UILabel? I mean exactly the last visible character?
I'd like to make two labels rounding the image and would like to continue the text which was out of rect for first label on the second one.
I know [NSString sizeWithFont...] but are there something reversing like [NSString stringVisibleInRect: withFont:...] ? :-)
Thank you in advance.
You could use a category to extend NSString and create the method you mention
#interface NSString (visibleText)
- (NSString*)stringVisibleInRect:(CGRect)rect withFont:(UIFont*)font;
#end
#implementation NSString (visibleText)
- (NSString*)stringVisibleInRect:(CGRect)rect withFont:(UIFont*)font
{
NSString *visibleString = #"";
for (int i = 1; i <= self.length; i++)
{
NSString *testString = [self substringToIndex:i];
CGSize stringSize = [testString sizeWithFont:font];
if (stringSize.height > rect.size.height || stringSize.width > rect.size.width)
break;
visibleString = testString;
}
return visibleString;
}
#end
Here's a O(log n) method with iOS 7 APIs. Only superficially tested, please comment if you find any bugs.
- (NSRange)hp_visibleRange
{
NSString *text = self.text;
NSRange visibleRange = NSMakeRange(NSNotFound, 0);
const NSInteger max = text.length - 1;
if (max >= 0)
{
NSInteger next = max;
const CGSize labelSize = self.bounds.size;
const CGSize maxSize = CGSizeMake(labelSize.width, CGFLOAT_MAX);
NSMutableParagraphStyle *paragraphStyle = [[NSParagraphStyle defaultParagraphStyle] mutableCopy];
paragraphStyle.lineBreakMode = self.lineBreakMode;
NSDictionary * attributes = #{NSFontAttributeName:self.font, NSParagraphStyleAttributeName:paragraphStyle};
NSInteger right;
NSInteger best = 0;
do
{
right = next;
NSRange range = NSMakeRange(0, right + 1);
NSString *substring = [text substringWithRange:range];
CGSize textSize = [substring boundingRectWithSize:maxSize
options:NSStringDrawingUsesLineFragmentOrigin
attributes:attributes
context:nil].size;
if (textSize.width <= labelSize.width && textSize.height <= labelSize.height)
{
visibleRange = range;
best = right;
next = right + (max - right) / 2;
} else if (right > 0)
{
next = right - (right - best) / 2;
}
} while (next != right);
}
return visibleRange;
}