How to decode the Google Directions API polylines field into lat long points in Objective-C for iPhone? - iphone

I want to draw routes on a map corresponding to directions JSON which I am getting through the Google Directions API: https://developers.google.com/maps/documentation/directions/start
I have figured out how to extract the latitude and longitude from the steps field, however this doesn't follow curvy roads very well. I think what I need is to decode the polyline information, I found Googles instructions on how to encode polylines: https://developers.google.com/maps/documentation/utilities/polylinealgorithm
I did find some code here for Android and also Javascript on decoding the polylines, for example:
Map View draw directions using google Directions API - decoding polylines
android get and parse Google Directions
But I can't find same for Objective-C iPhone code, can anybody help me with this? I'm sure I can do it myself if I have to, but it sure would save me some time if it's already available somewhere.
EDIT: the key here is being able to decode the base64 encoding on a character by character basis. To be more specific, I get something like this in JSON from Google which is encoded using base64 encoding among other things:
... "overview_polyline" : {
"points" : "ydelDz~vpN_#NO#QEKWIYIIO?YCS#WFGBEBICCAE?G#y#RKBEBEBAD?HTpB#LALALCNEJEFSP_#LyDv#aB\\GBMB"
},
...
Note: I should mention that this question refers to Google Maps API v1, it is much easier to do this in v2 using GMSPolyLine polyLineWithPath as many answers below will tell you (thanks to #cdescours).

I hope it's not against the rules to link to my own blog post if it's relevant to the question, but I've solved this problem in the past. Stand-alone answer from linked post:
#implementation MKPolyline (MKPolyline_EncodedString)
+ (MKPolyline *)polylineWithEncodedString:(NSString *)encodedString {
const char *bytes = [encodedString UTF8String];
NSUInteger length = [encodedString lengthOfBytesUsingEncoding:NSUTF8StringEncoding];
NSUInteger idx = 0;
NSUInteger count = length / 4;
CLLocationCoordinate2D *coords = calloc(count, sizeof(CLLocationCoordinate2D));
NSUInteger coordIdx = 0;
float latitude = 0;
float longitude = 0;
while (idx < length) {
char byte = 0;
int res = 0;
char shift = 0;
do {
byte = bytes[idx++] - 63;
res |= (byte & 0x1F) << shift;
shift += 5;
} while (byte >= 0x20);
float deltaLat = ((res & 1) ? ~(res >> 1) : (res >> 1));
latitude += deltaLat;
shift = 0;
res = 0;
do {
byte = bytes[idx++] - 0x3F;
res |= (byte & 0x1F) << shift;
shift += 5;
} while (byte >= 0x20);
float deltaLon = ((res & 1) ? ~(res >> 1) : (res >> 1));
longitude += deltaLon;
float finalLat = latitude * 1E-5;
float finalLon = longitude * 1E-5;
CLLocationCoordinate2D coord = CLLocationCoordinate2DMake(finalLat, finalLon);
coords[coordIdx++] = coord;
if (coordIdx == count) {
NSUInteger newCount = count + 10;
coords = realloc(coords, newCount * sizeof(CLLocationCoordinate2D));
count = newCount;
}
}
MKPolyline *polyline = [MKPolyline polylineWithCoordinates:coords count:coordIdx];
free(coords);
return polyline;
}
#end

The best and lightest answer should be to use the method provided by Google in the framework :
[GMSPolyline polylineWithPath:[GMSPath pathFromEncodedPath:encodedPath]]

If you are working with Google Map on iOS and want to draw the route including the polylines, google itself provides an easier way to get the GMSPath from polyline as,
GMSPath *pathFromPolyline = [GMSPath pathFromEncodedPath:polyLinePoints];
Here is the complete code:
+ (void)callGoogleServiceToGetRouteDataFromSource:(CLLocation *)sourceLocation toDestination:(CLLocation *)destinationLocation onMap:(GMSMapView *)mapView_{
NSString *baseUrl = [NSString stringWithFormat:#"http://maps.googleapis.com/maps/api/directions/json?origin=%f,%f&destination=%f,%f&sensor=false", sourceLocation.coordinate.latitude, sourceLocation.coordinate.longitude, destinationLocation.coordinate.latitude, destinationLocation.coordinate.longitude];
NSURL *url = [NSURL URLWithString:[baseUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]];
NSLog(#"Url: %#", url);
NSURLRequest *request = [NSURLRequest requestWithURL:url];
[NSURLConnection sendAsynchronousRequest:request queue:[NSOperationQueue mainQueue] completionHandler:^(NSURLResponse *response, NSData *data, NSError *connectionError) {
GMSMutablePath *path = [GMSMutablePath path];
NSError *error = nil;
NSDictionary *result = [NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSArray *routes = [result objectForKey:#"routes"];
NSDictionary *firstRoute = [routes objectAtIndex:0];
NSDictionary *leg = [[firstRoute objectForKey:#"legs"] objectAtIndex:0];
NSArray *steps = [leg objectForKey:#"steps"];
int stepIndex = 0;
CLLocationCoordinate2D stepCoordinates[1 + [steps count] + 1];
for (NSDictionary *step in steps) {
NSDictionary *start_location = [step objectForKey:#"start_location"];
stepCoordinates[++stepIndex] = [self coordinateWithLocation:start_location];
[path addCoordinate:[self coordinateWithLocation:start_location]];
NSString *polyLinePoints = [[step objectForKey:#"polyline"] objectForKey:#"points"];
GMSPath *polyLinePath = [GMSPath pathFromEncodedPath:polyLinePoints];
for (int p=0; p<polyLinePath.count; p++) {
[path addCoordinate:[polyLinePath coordinateAtIndex:p]];
}
if ([steps count] == stepIndex){
NSDictionary *end_location = [step objectForKey:#"end_location"];
stepCoordinates[++stepIndex] = [self coordinateWithLocation:end_location];
[path addCoordinate:[self coordinateWithLocation:end_location]];
}
}
GMSPolyline *polyline = nil;
polyline = [GMSPolyline polylineWithPath:path];
polyline.strokeColor = [UIColor grayColor];
polyline.strokeWidth = 3.f;
polyline.map = mapView_;
}];
}
+ (CLLocationCoordinate2D)coordinateWithLocation:(NSDictionary*)location
{
double latitude = [[location objectForKey:#"lat"] doubleValue];
double longitude = [[location objectForKey:#"lng"] doubleValue];
return CLLocationCoordinate2DMake(latitude, longitude);
}

Swift 3.0
let polyline = GMSPolyline(path: GMSPath.init(fromEncodedPath: encodedPolyline))

Python Implementation
This isn't in Objective-C, but this thread is where Google drops you if you're looking to decode polyline strings from Google Maps. In case anyone else needs it (much like I did), here's a Python implementation for decoding polyline strings. This is ported from the Mapbox JavaScript version; more info found on my repo page.
def decode_polyline(polyline_str):
index, lat, lng = 0, 0, 0
coordinates = []
changes = {'latitude': 0, 'longitude': 0}
# Coordinates have variable length when encoded, so just keep
# track of whether we've hit the end of the string. In each
# while loop iteration, a single coordinate is decoded.
while index < len(polyline_str):
# Gather lat/lon changes, store them in a dictionary to apply them later
for unit in ['latitude', 'longitude']:
shift, result = 0, 0
while True:
byte = ord(polyline_str[index]) - 63
index+=1
result |= (byte & 0x1f) << shift
shift += 5
if not byte >= 0x20:
break
if (result & 1):
changes[unit] = ~(result >> 1)
else:
changes[unit] = (result >> 1)
lat += changes['latitude']
lng += changes['longitude']
coordinates.append((lat / 100000.0, lng / 100000.0))
return coordinates

- (MKPolyline *)polylineWithEncodedString:(NSString *)encodedString {
const char *bytes = [encodedString UTF8String];
NSUInteger length = [encodedString lengthOfBytesUsingEncoding:NSUTF8StringEncoding];
NSUInteger idx = 0;
NSUInteger count = length / 4;
CLLocationCoordinate2D *coords = calloc(count, sizeof(CLLocationCoordinate2D));
NSUInteger coordIdx = 0;
float latitude = 0;
float longitude = 0;
while (idx < length) {
char byte = 0;
int res = 0;
char shift = 0;
do {
byte = bytes[idx++] - 63;
res |= (byte & 0x1F) << shift;
shift += 5;
} while (byte >= 0x20);
float deltaLat = ((res & 1) ? ~(res >> 1) : (res >> 1));
latitude += deltaLat;
shift = 0;
res = 0;
do {
byte = bytes[idx++] - 0x3F;
res |= (byte & 0x1F) << shift;
shift += 5;
} while (byte >= 0x20);
float deltaLon = ((res & 1) ? ~(res >> 1) : (res >> 1));
longitude += deltaLon;
float finalLat = latitude * 1E-5;
float finalLon = longitude * 1E-5;
CLLocationCoordinate2D coord = CLLocationCoordinate2DMake(finalLat, finalLon);
coords[coordIdx++] = coord;
if (coordIdx == count) {
NSUInteger newCount = count + 10;
coords = realloc(coords, newCount * sizeof(CLLocationCoordinate2D));
count = newCount;
}
}
MKPolyline *polyline = [MKPolyline polylineWithCoordinates:coords count:coordIdx];
free(coords);
return polyline;
}
- (MKPolygonRenderer *)mapView:(MKMapView *)mapView viewForOverlay:(id <MKOverlay>)overlay {
// MKPolygonRenderer *polylineView = [[MKPolygonRenderer alloc] initWithOverlay:overlay];
MKPolylineView *polylineView = [[MKPolylineView alloc] initWithPolyline:overlay];
polylineView.strokeColor = [UIColor redColor];
polylineView.lineWidth = 4.0;
[self zoomToPolyLine:mapview polyline:overlay animated:YES];
return polylineView;
}
-(void)zoomToPolyLine: (MKMapView*)map polyline: (MKPolyline*)polyline animated: (BOOL)animated
{
[map setVisibleMapRect:[polyline boundingMapRect] edgePadding:UIEdgeInsetsMake(25.0, 25.0, 25.0, 25.0) animated:animated];
}
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation
{
// NSLog(#"didUpdateToLocation: %#", newLocation);
CLLocation *currentLocation = newLocation;
if (currentLocation != nil) {
currlong = [NSString stringWithFormat:#"%.8f", currentLocation.coordinate.longitude];
currlt = [NSString stringWithFormat:#"%.8f", currentLocation.coordinate.latitude];
}
NSString *origin = [NSString stringWithFormat:#"%#%#%#",currlt,#",",currlong];
//I have just mention static location
NSString *drivein = #"23.0472963,72.52757040000006";
NSString *apikey = [NSString stringWithFormat:#"https://maps.googleapis.com/maps/api/directions/json?origin=%#&destination=%#",origin,drivein];
NSURL *url = [NSURL URLWithString:apikey];
NSURLRequest *request = [NSURLRequest requestWithURL:url];
NSURLResponse *response;
NSError *error;
NSData *responseData = [NSURLConnection sendSynchronousRequest:request returningResponse:&response error:&error];
NSString *responseString = [[NSString alloc] initWithData:responseData encoding:NSUTF8StringEncoding];
if(!error)
{
NSData *data = [responseString dataUsingEncoding:NSUTF8StringEncoding];
NSDictionary *jsonResponse = [NSJSONSerialization JSONObjectWithData:data
options:kNilOptions
error:&error];
NSArray *routesArray = [jsonResponse objectForKey:#"routes"];
NSLog(#"route array %#",routesArray);
if ([routesArray count] > 0)
{
NSDictionary *routeDict = [routesArray objectAtIndex:0];
NSDictionary *routeOverviewPolyline = [routeDict objectForKey:#"overview_polyline"];
NSString *points = [routeOverviewPolyline objectForKey:#"points"];
MKPolyline *line = [self polylineWithEncodedString:points];
[mapview addOverlay:line];
}
}
MKCoordinateRegion viewRegion = MKCoordinateRegionMakeWithDistance(currentLocation.coordinate, 500, 500);
MKCoordinateRegion adjustedRegion = [mapview regionThatFits:viewRegion];
[mapview setRegion:adjustedRegion animated:YES];
mapview.showsUserLocation = YES;
MKPointAnnotation *point = [[MKPointAnnotation alloc] init];
point.coordinate = currentLocation.coordinate;
point.title = #"Your current Locations";
point.subtitle = #"You are here!";
[mapview addAnnotation:point];
[locationmanger stopUpdatingLocation];
}

Here's how I do it in my directions app. keyPlace is your destination object
- (void)getDirections {
CLLocation *newLocation;// = currentUserLocation;
MKPointAnnotation *annotation = [[[MKPointAnnotation alloc] init] autorelease];
annotation.coordinate = CLLocationCoordinate2DMake(newLocation.coordinate.latitude, newLocation.coordinate.longitude);
annotation.title = #"You";
[mapView addAnnotation:annotation];
CLLocationCoordinate2D endCoordinate;
NSURL *url = [NSURL URLWithString:[NSString stringWithFormat:#"https://maps.googleapis.com/maps/api/directions/json?origin=%f,%f&destination=%f,%f&sensor=false&mode=walking", newLocation.coordinate.latitude, newLocation.coordinate.longitude, keyPlace.lat, keyPlace.lon]];
ASIHTTPRequest *request = [ASIHTTPRequest requestWithURL:url];
[request startSynchronous];
if ([[request.responseString.JSONValue valueForKey:#"status"] isEqualToString:#"ZERO_RESULTS"]) {
[[[[UIAlertView alloc] initWithTitle:#"Error"
message:#"Could not route path from your current location"
delegate:nil
cancelButtonTitle:#"Close"
otherButtonTitles:nil, nil] autorelease] show];
self.navigationController.navigationBar.userInteractionEnabled = YES;
return;
}
int points_count = 0;
if ([[request.responseString.JSONValue objectForKey:#"routes"] count])
points_count = [[[[[[request.responseString.JSONValue objectForKey:#"routes"] objectAtIndex:0] objectForKey:#"legs"] objectAtIndex:0] objectForKey:#"steps"] count];
if (!points_count) {
[[[[UIAlertView alloc] initWithTitle:#"Error"
message:#"Could not route path from your current location"
delegate:nil
cancelButtonTitle:#"Close"
otherButtonTitles:nil, nil] autorelease] show];
self.navigationController.navigationBar.userInteractionEnabled = YES;
return;
}
CLLocationCoordinate2D points[points_count * 2];
int j = 0;
NSArray *steps = nil;
if (points_count && [[[[request.responseString.JSONValue objectForKey:#"routes"] objectAtIndex:0] objectForKey:#"legs"] count])
steps = [[[[[request.responseString.JSONValue objectForKey:#"routes"] objectAtIndex:0] objectForKey:#"legs"] objectAtIndex:0] objectForKey:#"steps"];
for (int i = 0; i < points_count; i++) {
double st_lat = [[[[steps objectAtIndex:i] objectForKey:#"start_location"] valueForKey:#"lat"] doubleValue];
double st_lon = [[[[steps objectAtIndex:i] objectForKey:#"start_location"] valueForKey:#"lng"] doubleValue];
//NSLog(#"lat lon: %f %f", st_lat, st_lon);
if (st_lat > 0.0f && st_lon > 0.0f) {
points[j] = CLLocationCoordinate2DMake(st_lat, st_lon);
j++;
}
double end_lat = [[[[steps objectAtIndex:i] objectForKey:#"end_location"] valueForKey:#"lat"] doubleValue];
double end_lon = [[[[steps objectAtIndex:i] objectForKey:#"end_location"] valueForKey:#"lng"] doubleValue];
if (end_lat > 0.0f && end_lon > 0.0f) {
points[j] = CLLocationCoordinate2DMake(end_lat, end_lon);
endCoordinate = CLLocationCoordinate2DMake(end_lat, end_lon);
j++;
}
}
MKPolyline *polyline = [MKPolyline polylineWithCoordinates:points count:points_count * 2];
[mapView addOverlay:polyline];
}
#pragma mark - MapKit
- (MKAnnotationView *)mapView:(MKMapView *)mapView viewForAnnotation:(id <MKAnnotation>)annotation {
MKPinAnnotationView *annView = [[[MKPinAnnotationView alloc] initWithAnnotation:annotation reuseIdentifier:#"currentloc"] autorelease];
annView.canShowCallout = YES;
annView.animatesDrop = YES;
return annView;
}
- (MKOverlayView *)mapView:(MKMapView *)mapView
viewForOverlay:(id<MKOverlay>)overlay {
MKPolylineView *overlayView = [[[MKPolylineView alloc] initWithOverlay:overlay] autorelease];
overlayView.lineWidth = 5;
overlayView.strokeColor = [UIColor purpleColor];
overlayView.fillColor = [[UIColor purpleColor] colorWithAlphaComponent:0.5f];
return overlayView;
}

In case anyone would need the decoding-code in VBA, here is a (working) port:
Function decodeGeopoints(encoded)
decodeGeopoints = ""
' This code is a port to VBA from code published here:
' http://blog.synyx.de/2010/06/routing-driving-directions-on-android-part-1-get-the-route/
'//decoding
'List poly = new ArrayList();
'// replace two backslashes by one (some error from the transmission)
'encoded = encoded.replace("\\", "\");
encoded = Replace(encoded, "\\", "\")
'int index = 0, len = encoded.length();
Dim index As Long
index = 0
Dim leng As Long
leng = Len(encoded)
'int lat = 0, lng = 0;
Dim lat As Long
lat = 0
Dim lng As Long
lng = 0
'while (index < len) {
While (index < leng)
'int b, shift = 0, result = 0;
Dim b, shift, result As Long
b = 0
shift = 0
result = 0
'do {
Do
'b = encoded.charAt(index++) - 63;
index = index + 1
b = Asc(Mid(encoded, index, 1)) - 63
'result |= (b & 0x1f) << shift;
result = result Or ((b And 31) * (2 ^ shift))
'shift += 5;
shift = shift + 5
'} while (b >= 0x20);
Loop While (b >= 32)
'int dlat = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1));
Dim dlat As Long
If (result And 1) <> 0 Then
dlat = Not Int(result / 2)
Else
dlat = Int(result / 2)
End If
'lat += dlat;
lat = lat + dlat
'shift = 0;
shift = 0
'result = 0;
result = 0
'do {
Do
'b = encoded.charAt(index++) - 63;
index = index + 1
b = Asc(Mid(encoded, index, 1)) - 63
'result |= (b & 0x1f) << shift;
result = result Or ((b And 31) * (2 ^ shift))
'shift += 5;
shift = shift + 5
'} while (b >= 0x20);
Loop While (b >= 32)
'int dlng = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1));
Dim dlng As Long
If (result And 1) <> 0 Then
dlng = Not Int(result / 2)
Else
dlng = Int(result / 2)
End If
'lng += dlng;
lng = lng + dlng
'GeoPoint p = new GeoPoint((int) (((double) lat / 1E5) * 1E6), (int) (((double) lng / 1E5) * 1E6));
Dim myLat, myLng As Double
myLat = (lat / 100000)
'myLat = myLat * 1000000
myLng = (lng / 100000)
'myLng = myLng * 1000000
'poly.add(p);
decodeGeopoints = decodeGeopoints & Comma2Dot(myLng) & "," & Comma2Dot(myLat) & ",0 "
'}
Wend
End Function

For Google maps it already have a straight forward method , polylineWithPath, so I prefer this snippet.
-(void)drawPathFrom:(CLLocation*)source toDestination:(CLLocation*)destination{
NSString *baseUrl = [NSString stringWithFormat:#"http://maps.googleapis.com/maps/api/directions/json?origin=%f,%f&destination=%f,%f&sensor=true", source.coordinate.latitude, source.coordinate.longitude, destination.coordinate.latitude, destination.coordinate.longitude];
NSURL *url = [NSURL URLWithString:[baseUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]];
NSLog(#"Url: %#", url);
NSURLRequest *request = [NSURLRequest requestWithURL:url];
[NSURLConnection sendAsynchronousRequest:request queue:[NSOperationQueue mainQueue] completionHandler:^(NSURLResponse *response, NSData *data, NSError *connectionError) {
if(!connectionError){
NSDictionary *result = [NSJSONSerialization JSONObjectWithData:data options:0 error:nil];
NSArray *routes = [result objectForKey:#"routes"];
NSDictionary *firstRoute = [routes objectAtIndex:0];
NSString *encodedPath = [firstRoute[#"overview_polyline"] objectForKey:#"points"];
GMSPolyline *polyPath = [GMSPolyline polylineWithPath:[GMSPath pathFromEncodedPath:encodedPath]];
polyPath.strokeColor = [UIColor redColor];
polyPath.strokeWidth = 3.5f;
polyPath.map = _mapView;
}
}];
}

Swift 4.2 / Swift 5
let gmsPolyline = GMSPolyline(path: GMSPath(fromEncodedPath: encodedPolyline))
gmsPolyline.map = map

This is my own revisitation of Sedate Alien's answer.
It is the same implementation save for removing duplicated code and using NSMutableData instead of manually allocating stuff.
#implementation MKPolyline (EncodedString)
+ (float)decodeBytes:(const char *)bytes atPos:(NSUInteger *)idx toValue:(float *)value {
char byte = 0;
int res = 0;
char shift = 0;
do {
byte = bytes[(*idx)++] - 0x3F;
res |= (byte & 0x1F) << shift;
shift += 5;
}
while (byte >= 0x20);
(*value) += ((res & 1) ? ~(res >> 1) : (res >> 1));
return (*value) * 1E-5;
}
+ (MKPolyline *)polylineWithEncodedString:(NSString *)encodedString {
const char *bytes = [encodedString UTF8String];
NSUInteger length = [encodedString lengthOfBytesUsingEncoding:NSUTF8StringEncoding];
NSUInteger idx = 0;
NSMutableData *data = [NSMutableData data];
float lat = 0;
float lon = 0;
CLLocationCoordinate2D coords = CLLocationCoordinate2DMake(0, 0);
while (idx < length) {
coords.latitude = [self decodeBytes:bytes atPos:&idx toValue:&lat];
coords.longitude = [self decodeBytes:bytes atPos:&idx toValue:&lon];
[data appendBytes:&coords length:sizeof(CLLocationCoordinate2D)];
}
return [MKPolyline polylineWithCoordinates:(CLLocationCoordinate2D *)data.bytes count:data.length / sizeof(CLLocationCoordinate2D)];
}
#end

The other answers here seem to be about using Apple Maps, for using Google Maps I found I had to make some modifications to #SedateAlien's great category.
MODIFIED CATEGORY
+ (GMSPolyline *)polylineWithEncodedString:(NSString *)encodedString {
const char *bytes = [encodedString UTF8String];
NSUInteger length = [encodedString lengthOfBytesUsingEncoding:NSUTF8StringEncoding];
NSUInteger idx = 0;
NSUInteger count = length / 4;
CLLocationCoordinate2D *coords = calloc(count, sizeof(CLLocationCoordinate2D));
NSUInteger coordIdx = 0;
float latitude = 0;
float longitude = 0;
while (idx < length) {
char byte = 0;
int res = 0;
char shift = 0;
do {
byte = bytes[idx++] - 63;
res |= (byte & 0x1F) << shift;
shift += 5;
} while (byte >= 0x20);
float deltaLat = ((res & 1) ? ~(res >> 1) : (res >> 1));
latitude += deltaLat;
shift = 0;
res = 0;
do {
byte = bytes[idx++] - 0x3F;
res |= (byte & 0x1F) << shift;
shift += 5;
} while (byte >= 0x20);
float deltaLon = ((res & 1) ? ~(res >> 1) : (res >> 1));
longitude += deltaLon;
float finalLat = latitude * 1E-5;
float finalLon = longitude * 1E-5;
CLLocationCoordinate2D coord = CLLocationCoordinate2DMake(finalLat, finalLon);
coords[coordIdx++] = coord;
if (coordIdx == count) {
NSUInteger newCount = count + 10;
coords = realloc(coords, newCount * sizeof(CLLocationCoordinate2D));
count = newCount;
}
}
GMSMutablePath *path = [[GMSMutablePath alloc] init];
int i;
for (i = 0; i < coordIdx; i++)
{
[path addCoordinate:coords[i]];
}
GMSPolyline *polyline = [GMSPolyline polylineWithPath:path];
free(coords);
return polyline;
}
USAGE
// Here I make the call to the Google Maps API to get the routes between two points...
....
// Get the encoded array of points.
NSString *points = routes[#"routes"][0][#"overview_polyline"][#"points"];
// Use the modified category to get a polyline from the points.
GMSPolyline *polyline = [GMSPolyline polylineWithEncodedString:points];
// Add the polyline to the map.
polyline.strokeColor = [UIColor redColor];
polyline.strokeWidth = 10.f;
polyline.map = theMapView;
}

If anybody else is trying to do this in swift, here's #RootCode's answer adapted to swift (2.3):
let path = GMSMutablePath()
let steps = directionsToShowOnMap.steps
for (idx, step) in steps.enumerate() {
path.addCoordinate(coordinateFromJson(step["start_location"]))
if let polylinePoints = step["polyline"].string, subpath = GMSPath(fromEncodedPath: polylinePoints) {
for c in 0 ..< subpath.count() {
path.addCoordinate(subpath.coordinateAtIndex(c))
}
}
if idx == steps.count - 1 {
path.addCoordinate(coordinateFromJson(step["end_location"]))
}
}
let polyline = GMSPolyline(path: path)
polyline.strokeColor = UIColor.blueColor()
polyline.strokeWidth = 3
polyline.map = mapView
and then:
private func coordinateFromJson(location: JSON) -> CLLocationCoordinate2D {
return CLLocationCoordinate2DMake(location["lat"].double!, location["lng"].double!)
}

Related

how to access Salesforce Attachment Body (base64 binary data) in ios?

I am working on iOS native app for getting attachments from salesforce.
I have to show the salesforce attachments in my iPhone app for particular object like Leads,Contacts etc. For that i am using Rest Api and got response body. But in response body there is url but i want binary data of attachment body.
Here is my code:
My rest api request
NSString *attachments=[NSString stringWithFormat:#"select Name,Body, ContentType from Attachment"];
SFRestRequest *request = [[SFRestAPI sharedInstance] requestForQuery:attachments];
[[SFRestAPI sharedInstance] send:request delegate:self];
I get response in body in following format:
{
Body = "/services/data/v23.0/sobjects/Attachment/00P90000004SRFlEAO/Body";
ContentType = "application/video";
Name = "Video.MOV";
attributes = {
type = Attachment;
url = "/services/data/v23.0/sobjects/Attachment/00P90000004SRFlEAO";
};
}
Using this code to download after get body url:
SFRestRequest* downloadRequest = [self requestForFileContents:#"/services/data/v23.0/sobjects/Attachment/00P90000004SRFlEAO/Body"];
- (SFRestRequest *) requestForFileContents:(NSString *) path {
NSMutableDictionary *params = [NSMutableDictionary dictionary];
SFRestRequest *request = [SFRestRequest requestWithMethod:SFRestMethodGET path:path queryParams:params];
request.parseResponse = NO;
return request;}
You have to make a GET request to the URL returned in the Body field to fetch the actual binary content.
Check this code:
id url = #"http://blogs.independent.co.uk/wp-content/uploads/2012/12/google-zip.jpg";
[self getImageBase64:url];
-( NSString *) AFBase64EncodedStringFromString: (NSData*) data
{
NSUInteger length = [data length];
NSMutableData *mutableData = [NSMutableData dataWithLength:((length + 2) / 3) * 4];
uint8_t *input = (uint8_t *)[data bytes];
uint8_t *output = (uint8_t *)[mutableData mutableBytes];
for (NSUInteger i = 0; i < length; i += 3) {
NSUInteger value = 0;
for (NSUInteger j = i; j < (i + 3); j++) {
value <<= 8;
if (j < length) {
value |= (0xFF & input[j]);
}
}
static uint8_t const kAFBase64EncodingTable[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
NSUInteger idx = (i / 3) * 4;
output[idx + 0] = kAFBase64EncodingTable[(value >> 18) & 0x3F];
output[idx + 1] = kAFBase64EncodingTable[(value >> 12) & 0x3F];
output[idx + 2] = (i + 1) < length ? kAFBase64EncodingTable[(value >> 6) & 0x3F] : '=';
output[idx + 3] = (i + 2) < length ? kAFBase64EncodingTable[(value >> 0) & 0x3F] : '=';
}
return [[NSString alloc] initWithData:mutableData encoding:NSASCIIStringEncoding];
}
-(NSString *) getImageBase64:(NSString *) url
{
NSURLRequest * imageUrlRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:url]];
NSURLResponse * response=nil;
NSError * error =nil;
NSData * data = [NSURLConnection sendSynchronousRequest:imageUrlRequest returningResponse:&response error:&error];
if(error == nil)
{
return [self AFBase64EncodedStringFromString:data];
}
return nil;
}

-[NSCFDictionary length]: unrecognized selector

I have the next problem with this code:
NSDictionary * imagen = [[NSDictionary alloc] initWithDictionary:[envio resultValue]];
NSString *imagenS = [imagen valueForKey:#"/Result"];
[Base64 initialize];
NSData * imagenDecode = [Base64 decode:imagenS];
NSLog(#"%#", [imagenS length]);
//SAVE IMAGE
NSArray *sysPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *docDirectory = [sysPaths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#david.png",docDirectory];
[imagenDecode writeToFile:filePath atomically:YES];
[envio resultValue] --> return a NSDictionary with one image in Base 64 codification.
I want decoder and save this image but in my console I have showed this message:
2011-08-23 20:15:36.539 WSStub[39226:a0f] -[NSCFDictionary length]: unrecognized selector sent to instance 0xd00ee0
The Base 64 code is:
//
// Base64.m
// CryptTest
//
// Created by Kiichi Takeuchi on 4/20/10.
// Copyright 2010 ObjectGraph LLC. All rights reserved.
//
#import "Base64.h"
#implementation Base64
#define ArrayLength(x) (sizeof(x)/sizeof(*(x)))
static char encodingTable[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static char decodingTable[128];
+ (void) initialize
{
if (self == [Base64 class])
{
memset(decodingTable, 0, ArrayLength(decodingTable));
for (NSInteger i = 0; i < ArrayLength(encodingTable); i++) {
decodingTable[encodingTable[i]] = i;
}
}
}
+ (NSData*) decode:(const char*) string length:(NSInteger) inputLength
{
if ((string == NULL) || (inputLength % 4 != 0))
{
return nil;
}
while (inputLength > 0 && string[inputLength - 1] == '=')
{
inputLength--;
}
NSInteger outputLength = inputLength * 3 / 4;
NSMutableData* data = [NSMutableData dataWithLength:outputLength];
uint8_t* output = data.mutableBytes;
NSInteger inputPoint = 0;
NSInteger outputPoint = 0;
while (inputPoint < inputLength)
{
char i0 = string[inputPoint++];
char i1 = string[inputPoint++];
char i2 = inputPoint < inputLength ? string[inputPoint++] : 'A'; /* 'A' will decode to \0 */
char i3 = inputPoint < inputLength ? string[inputPoint++] : 'A';
output[outputPoint++] = (decodingTable[i0] << 2) | (decodingTable[i1] >> 4);
if (outputPoint < outputLength)
{
output[outputPoint++] = ((decodingTable[i1] & 0xf) << 4) | (decodingTable[i2] >> 2);
}
if (outputPoint < outputLength)
{
output[outputPoint++] = ((decodingTable[i2] & 0x3) << 6) | decodingTable[i3];
}
}
return data;
}
+ (NSData*) decode:(NSString*) string
{
return [self decode:[string cStringUsingEncoding:NSASCIIStringEncoding] length:[string length]];
}
#end
The line
NSString *imagenS = [imagen valueForKey:#"/Result"];
is returning a dictionary, not a string. You'll have to inspect your data source to determine whether this is correct or not.
Your NSLog() call is wrong. To show a length, it should be:
NSLog(#"%lu", [imagenS length]);
But that is probably not the problem.
You seem to be invoking length on an NSDictionary. Hard to tell where you do that, since you don't show the piece of code where that happens. It could be that imagenS is not an NSString, but an NSDictionary instead.
Try to do:
NSLog(#"%#", [imagenS class]);
and see what is displayed. If probably tells you that imagenS is not a string, but an NSCFDictionary or similar instead.
FWIW, NSCFDictionary is one of the subclasses of NSDictionary that actually implement the different versions of the main class. This is called a class cluster.

iOS : Access JPEG metadata at Marker APP3

I have been using the Exif tags to store some data on jpeg files in the following manner:
CGImageSourceRef source = CGImageSourceCreateWithURL(baseURL, NULL);
NSDictionary *metadata = (NSDictionary *) CGImageSourceCopyPropertiesAtIndex(source,0,NULL);
NSMutableDictionary *metadataAsMutable = [[metadata mutableCopy]autorelease];
NSMutableDictionary *EXIFDictionary = [[[metadataAsMutable objectForKey:(NSString *)kCGImagePropertyExifDictionary]mutableCopy]autorelease];
[EXIFDictionary setObject:[NSString stringWithFormat:#"%d",tag] forKey:(NSString *)kCGImagePropertyExifUserComment];
Now, I would like to use a custom Application Marker (APP3 at 0xFFE3) instead of Exif Marker.
(Refer - http://www.ozhiker.com/electronics/pjmt/jpeg_info/app_segments.html)
Could someone point me in the right direction.
PS: I am using a jailbroken iPad for this app.
Okay, it seems we have to go via file handler way for this. Here is what I did, though there may be a better way to do this.
Create a File handle:
NSString *filePath = currentImageObject.myFilePath;
NSFileHandle *fileHandle = [NSFileHandle fileHandleForUpdatingAtPath:filePath];
if(!fileHandle){
return;
}
[fileHandle seekToEndOfFile];
unsigned long long eofOffset = [fileHandle offsetInFile];
Then iterate the file contents till you find a desired tag:
BOOL markerFound = NO;
BOOL dqtFound = NO;
while ((!markerFound) && (!dqtFound) && ([fileHandle offsetInFile] < eofOffset)) {
currentOffset += 1;
[fileHandle seekToFileOffset:currentOffset];
NSData *markerData = [fileHandle readDataOfLength:1];
currentOffset += 1;
NSInteger markerValue = (unsigned char)*(unsigned char *)[markerData bytes];
if (0xe0 == markerValue) {
currentOffset += 14;
[fileHandle seekToFileOffset:currentOffset];
NSData *xThumbnailData = [fileHandle readDataOfLength:1];
currentOffset += 1;
NSData *yThumbnailData = [fileHandle readDataOfLength:1];
currentOffset += 1;
NSInteger xThumbnail = (unsigned char)*(unsigned char *)[xThumbnailData bytes];
NSInteger yThumbnail = (unsigned char)*(unsigned char *)[yThumbnailData bytes];
NSInteger thumbnailSize = 3 * xThumbnail * yThumbnail;
currentOffset += thumbnailSize;
[fileHandle seekToFileOffset:currentOffset];
} else if (0xe3 == markerValue) {
markerFound = YES;
break;
} else if (0xdb == markerValue) {
dqtFound = YES;
break;
} else {
NSData *lengthData = [fileHandle readDataOfLength:2];
currentOffset += 2;
NSInteger length = (unsigned short)*(unsigned short *)[lengthData bytes];
length = NSSwapBigShortToHost(length);
length -= 2;
currentOffset += length;
[fileHandle seekToFileOffset:currentOffset];
}
}
This gives you the offset to the APP3 marker, incase you need to add your own app3 marker you can use a similar approach.

Drawing waveform with AVAssetReader

I reading song from iPod library using assetUrl (in code it named audioUrl)
I can play it many ways, I can cut it, I can make some precessing with this but...
I really don't understand what I gonna do with this CMSampleBufferRef to get data for drawing waveform! I need info about peak values, how I can get it this (maybe another) way?
AVAssetTrack * songTrack = [audioUrl.tracks objectAtIndex:0];
AVAssetReaderTrackOutput * output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:nil];
[reader addOutput:output];
[output release];
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput =
(AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){/* what I gonna do with this? */}
Please help me!
I was searching for a similar thing and decided to "roll my own."
I realize this is an old post, but in case anyone else is in search of this, here is my solution. it is relatively quick and dirty and normalizes the image to "full scale".
the images it creates are "wide" ie you need to put them in a UIScrollView or otherwise manage the display.
this is based on some answers given to this question
Sample Output
EDIT: I have added a logarithmic version of the averaging and render methods, see the end of this message for the alternate version & comparison outputs. I personally prefer the original linear version, but have decided to post it, in case someone can improve on the algorithm used.
You'll need these imports:
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
First, a generic rendering method that takes a pointer to averaged sample data,
and returns a UIImage. Note these samples are not playable audio samples.
-(UIImage *) audioImageGraph:(SInt16 *) samples
normalizeMax:(SInt16) normalizeMax
sampleCount:(NSInteger) sampleCount
channelCount:(NSInteger) channelCount
imageHeight:(float) imageHeight {
CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;
CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];
CGContextFillRect(context, rect);
CGContextSetLineWidth(context, 1.0);
float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ;
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (float) normalizeMax;
for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
SInt16 left = *samples++;
float pixels = (float) left;
pixels *= sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerLeft-pixels);
CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
CGContextSetStrokeColorWithColor(context, leftcolor);
CGContextStrokePath(context);
if (channelCount==2) {
SInt16 right = *samples++;
float pixels = (float) right;
pixels *= sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerRight - pixels);
CGContextAddLineToPoint(context, intSample, centerRight + pixels);
CGContextSetStrokeColorWithColor(context, rightcolor);
CGContextStrokePath(context);
}
}
// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
// Tidy up
UIGraphicsEndImageContext();
return newImage;
}
Next, a method that takes a AVURLAsset, and returns PNG image data
- (NSData *) renderPNGAudioPictogramForAsset:(AVURLAsset *)songAsset {
NSError * error = nil;
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];
NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
// [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
// [NSNumber numberWithInt: 2],AVNumberOfChannelsKey, /*Not Supported*/
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];
[reader addOutput:output];
[output release];
UInt32 sampleRate,channelCount;
NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
if(fmtDesc ) {
sampleRate = fmtDesc->mSampleRate;
channelCount = fmtDesc->mChannelsPerFrame;
// NSLog(#"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
}
}
UInt32 bytesPerSample = 2 * channelCount;
SInt16 normalizeMax = 0;
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
UInt64 totalBytes = 0;
SInt64 totalLeft = 0;
SInt64 totalRight = 0;
NSInteger sampleTally = 0;
NSInteger samplesPerPixel = sampleRate / 50;
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
totalBytes += length;
NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];
NSMutableData * data = [NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
SInt16 * samples = (SInt16 *) data.mutableBytes;
int sampleCount = length / bytesPerSample;
for (int i = 0; i < sampleCount ; i ++) {
SInt16 left = *samples++;
totalLeft += left;
SInt16 right;
if (channelCount==2) {
right = *samples++;
totalRight += right;
}
sampleTally++;
if (sampleTally > samplesPerPixel) {
left = totalLeft / sampleTally;
SInt16 fix = abs(left);
if (fix > normalizeMax) {
normalizeMax = fix;
}
[fullSongData appendBytes:&left length:sizeof(left)];
if (channelCount==2) {
right = totalRight / sampleTally;
SInt16 fix = abs(right);
if (fix > normalizeMax) {
normalizeMax = fix;
}
[fullSongData appendBytes:&right length:sizeof(right)];
}
totalLeft = 0;
totalRight = 0;
sampleTally = 0;
}
}
[wader drain];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
NSData * finalData = nil;
if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
// Something went wrong. return nil
return nil;
}
if (reader.status == AVAssetReaderStatusCompleted){
NSLog(#"rendering output graphics using normalizeMax %d",normalizeMax);
UIImage *test = [self audioImageGraph:(SInt16 *)
fullSongData.bytes
normalizeMax:normalizeMax
sampleCount:fullSongData.length / 4
channelCount:2
imageHeight:100];
finalData = imageToData(test);
}
[fullSongData release];
[reader release];
return finalData;
}
Advanced Option:
Finally, if you want to be able to play the audio using AVAudioPlayer, you'll need to cache
it to your apps's bundle cache folder. Since I was doing that, i decided to cache the image data
also, and wrapped the whole thing into a UIImage category. you need to include this open source offering to extract the audio, and some code from here to handle some background threading features.
first, some defines, and a few generic class methods for handling path names etc
//#define imgExt #"jpg"
//#define imageToData(x) UIImageJPEGRepresentation(x,4)
#define imgExt #"png"
#define imageToData(x) UIImagePNGRepresentation(x)
+ (NSString *) assetCacheFolder {
NSArray *assetFolderRoot = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
return [NSString stringWithFormat:#"%#/audio", [assetFolderRoot objectAtIndex:0]];
}
+ (NSString *) cachedAudioPictogramPathForMPMediaItem:(MPMediaItem*) item {
NSString *assetFolder = [[self class] assetCacheFolder];
NSNumber * libraryId = [item valueForProperty:MPMediaItemPropertyPersistentID];
NSString *assetPictogramFilename = [NSString stringWithFormat:#"asset_%#.%#",libraryId,imgExt];
return [NSString stringWithFormat:#"%#/%#", assetFolder, assetPictogramFilename];
}
+ (NSString *) cachedAudioFilepathForMPMediaItem:(MPMediaItem*) item {
NSString *assetFolder = [[self class] assetCacheFolder];
NSURL * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
NSNumber * libraryId = [item valueForProperty:MPMediaItemPropertyPersistentID];
NSString *assetFileExt = [[[assetURL path] lastPathComponent] pathExtension];
NSString *assetFilename = [NSString stringWithFormat:#"asset_%#.%#",libraryId,assetFileExt];
return [NSString stringWithFormat:#"%#/%#", assetFolder, assetFilename];
}
+ (NSURL *) cachedAudioURLForMPMediaItem:(MPMediaItem*) item {
NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];
return [NSURL fileURLWithPath:assetFilepath];
}
Now the init method that does "the business"
- (id) initWithMPMediaItem:(MPMediaItem*) item
completionBlock:(void (^)(UIImage* delayedImagePreparation))completionBlock {
NSFileManager *fman = [NSFileManager defaultManager];
NSString *assetPictogramFilepath = [[self class] cachedAudioPictogramPathForMPMediaItem:item];
if ([fman fileExistsAtPath:assetPictogramFilepath]) {
NSLog(#"Returning cached waveform pictogram: %#",[assetPictogramFilepath lastPathComponent]);
self = [self initWithContentsOfFile:assetPictogramFilepath];
return self;
}
NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];
NSURL *assetFileURL = [NSURL fileURLWithPath:assetFilepath];
if ([fman fileExistsAtPath:assetFilepath]) {
NSLog(#"scanning cached audio data to create UIImage file: %#",[assetFilepath lastPathComponent]);
[assetFileURL retain];
[assetPictogramFilepath retain];
[NSThread MCSM_performBlockInBackground: ^{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
NSData *waveFormData = [self renderPNGAudioPictogramForAsset:asset];
[waveFormData writeToFile:assetPictogramFilepath atomically:YES];
[assetFileURL release];
[assetPictogramFilepath release];
if (completionBlock) {
[waveFormData retain];
[NSThread MCSM_performBlockOnMainThread:^{
UIImage *result = [UIImage imageWithData:waveFormData];
NSLog(#"returning rendered pictogram on main thread (%d bytes %# data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);
completionBlock(result);
[waveFormData release];
}];
}
}];
return nil;
} else {
NSString *assetFolder = [[self class] assetCacheFolder];
[fman createDirectoryAtPath:assetFolder withIntermediateDirectories:YES attributes:nil error:nil];
NSLog(#"Preparing to import audio asset data %#",[assetFilepath lastPathComponent]);
[assetPictogramFilepath retain];
[assetFileURL retain];
TSLibraryImport* import = [[TSLibraryImport alloc] init];
NSURL * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
[import importAsset:assetURL toURL:assetFileURL completionBlock:^(TSLibraryImport* import) {
//check the status and error properties of
//TSLibraryImport
if (import.error) {
NSLog (#"audio data import failed:%#",import.error);
} else{
NSLog (#"Creating waveform pictogram file: %#", [assetPictogramFilepath lastPathComponent]);
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
NSData *waveFormData = [self renderPNGAudioPictogramForAsset:asset];
[waveFormData writeToFile:assetPictogramFilepath atomically:YES];
if (completionBlock) {
[waveFormData retain];
[NSThread MCSM_performBlockOnMainThread:^{
UIImage *result = [UIImage imageWithData:waveFormData];
NSLog(#"returning rendered pictogram on main thread (%d bytes %# data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);
completionBlock(result);
[waveFormData release];
}];
}
}
[assetPictogramFilepath release];
[assetFileURL release];
} ];
return nil;
}
}
An example of invoking this :
-(void) importMediaItem {
MPMediaItem* item = [self mediaItem];
// since we will be needing this for playback, save the url to the cached audio.
[url release];
url = [[UIImage cachedAudioURLForMPMediaItem:item] retain];
[waveFormImage release];
waveFormImage = [[UIImage alloc ] initWithMPMediaItem:item completionBlock:^(UIImage* delayedImagePreparation){
waveFormImage = [delayedImagePreparation retain];
[self displayWaveFormImage];
}];
if (waveFormImage) {
[waveFormImage retain];
[self displayWaveFormImage];
}
}
Logarithmic version of averaging and render methods
#define absX(x) (x<0?0-x:x)
#define minMaxX(x,mn,mx) (x<=mn?mn:(x>=mx?mx:x))
#define noiseFloor (-90.0)
#define decibel(amplitude) (20.0 * log10(absX(amplitude)/32767.0))
-(UIImage *) audioImageLogGraph:(Float32 *) samples
normalizeMax:(Float32) normalizeMax
sampleCount:(NSInteger) sampleCount
channelCount:(NSInteger) channelCount
imageHeight:(float) imageHeight {
CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;
CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];
CGContextFillRect(context, rect);
CGContextSetLineWidth(context, 1.0);
float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ;
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (normalizeMax - noiseFloor) / 2;
for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
Float32 left = *samples++;
float pixels = (left - noiseFloor) * sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerLeft-pixels);
CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
CGContextSetStrokeColorWithColor(context, leftcolor);
CGContextStrokePath(context);
if (channelCount==2) {
Float32 right = *samples++;
float pixels = (right - noiseFloor) * sampleAdjustmentFactor;
CGContextMoveToPoint(context, intSample, centerRight - pixels);
CGContextAddLineToPoint(context, intSample, centerRight + pixels);
CGContextSetStrokeColorWithColor(context, rightcolor);
CGContextStrokePath(context);
}
}
// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
// Tidy up
UIGraphicsEndImageContext();
return newImage;
}
- (NSData *) renderPNGAudioPictogramLogForAsset:(AVURLAsset *)songAsset {
NSError * error = nil;
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];
NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
// [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
// [NSNumber numberWithInt: 2],AVNumberOfChannelsKey, /*Not Supported*/
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];
[reader addOutput:output];
[output release];
UInt32 sampleRate,channelCount;
NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
if(fmtDesc ) {
sampleRate = fmtDesc->mSampleRate;
channelCount = fmtDesc->mChannelsPerFrame;
// NSLog(#"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
}
}
UInt32 bytesPerSample = 2 * channelCount;
Float32 normalizeMax = noiseFloor;
NSLog(#"normalizeMax = %f",normalizeMax);
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];
UInt64 totalBytes = 0;
Float64 totalLeft = 0;
Float64 totalRight = 0;
Float32 sampleTally = 0;
NSInteger samplesPerPixel = sampleRate / 50;
while (reader.status == AVAssetReaderStatusReading){
AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (sampleBufferRef){
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
totalBytes += length;
NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];
NSMutableData * data = [NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
SInt16 * samples = (SInt16 *) data.mutableBytes;
int sampleCount = length / bytesPerSample;
for (int i = 0; i < sampleCount ; i ++) {
Float32 left = (Float32) *samples++;
left = decibel(left);
left = minMaxX(left,noiseFloor,0);
totalLeft += left;
Float32 right;
if (channelCount==2) {
right = (Float32) *samples++;
right = decibel(right);
right = minMaxX(right,noiseFloor,0);
totalRight += right;
}
sampleTally++;
if (sampleTally > samplesPerPixel) {
left = totalLeft / sampleTally;
if (left > normalizeMax) {
normalizeMax = left;
}
// NSLog(#"left average = %f, normalizeMax = %f",left,normalizeMax);
[fullSongData appendBytes:&left length:sizeof(left)];
if (channelCount==2) {
right = totalRight / sampleTally;
if (right > normalizeMax) {
normalizeMax = right;
}
[fullSongData appendBytes:&right length:sizeof(right)];
}
totalLeft = 0;
totalRight = 0;
sampleTally = 0;
}
}
[wader drain];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
NSData * finalData = nil;
if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
// Something went wrong. Handle it.
}
if (reader.status == AVAssetReaderStatusCompleted){
// You're done. It worked.
NSLog(#"rendering output graphics using normalizeMax %f",normalizeMax);
UIImage *test = [self audioImageLogGraph:(Float32 *) fullSongData.bytes
normalizeMax:normalizeMax
sampleCount:fullSongData.length / (sizeof(Float32) * 2)
channelCount:2
imageHeight:100];
finalData = imageToData(test);
}
[fullSongData release];
[reader release];
return finalData;
}
comparison outputs
Linear plot for start of "Warm It Up" by Acme Swing Company
Logarithmic plot for start of "Warm It Up" by Acme Swing Company
You should be able to get a buffer of audio from your sampleBuffRef and then iterate through those values to build your waveform:
CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer( sampleBufferRef );
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(sampleBufferRef);
AudioBufferList audioBufferList;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBufferRef,
NULL,
&audioBufferList,
sizeof(audioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&buffer
);
// this copies your audio out to a temp buffer but you should be able to iterate through this buffer instead
SInt32* readBuffer = (SInt32 *)malloc(numSamplesInBuffer * sizeof(SInt32));
memcpy( readBuffer, audioBufferList.mBuffers[0].mData, numSamplesInBuffer*sizeof(SInt32));
Another approach using Swift 5 and using AVAudioFile:
///Gets the audio file from an URL, downsaples and draws into the sound layer.
func drawSoundWave(fromURL url:URL, fromPosition:Int64, totalSeconds:UInt32, samplesSecond:CGFloat) throws{
print("\(logClassName) Drawing sound from \(url)")
do{
waveViewInfo.samplesSeconds = samplesSecond
//Get audio file and format from URL
let audioFile = try AVAudioFile(forReading: url)
waveViewInfo.format = audioFile.processingFormat
audioFile.framePosition = fromPosition * Int64(waveViewInfo.format.sampleRate)
//Getting the buffer
let frameCapacity:UInt32 = totalSeconds * UInt32(waveViewInfo.format.sampleRate)
guard let audioPCMBuffer = AVAudioPCMBuffer(pcmFormat: waveViewInfo.format, frameCapacity: frameCapacity) else{ throw AppError("Unable to get the AVAudioPCMBuffer") }
try audioFile.read(into: audioPCMBuffer, frameCount: frameCapacity)
let audioPCMBufferFloatValues:[Float] = Array(UnsafeBufferPointer(start: audioPCMBuffer.floatChannelData?.pointee,
count: Int(audioPCMBuffer.frameLength)))
waveViewInfo.points = []
waveViewInfo.maxValue = 0
for index in stride(from: 0, to: audioPCMBufferFloatValues.count, by: Int(audioFile.fileFormat.sampleRate) / Int(waveViewInfo.samplesSeconds)){
let aSample = CGFloat(audioPCMBufferFloatValues[index])
waveViewInfo.points.append(aSample)
let fix = abs(aSample)
if fix > waveViewInfo.maxValue{
waveViewInfo.maxValue = fix
}
}
print("\(logClassName) Finished the points - Count = \(waveViewInfo.points.count) / Max = \(waveViewInfo.maxValue)")
populateSoundImageView(with: waveViewInfo)
}
catch{
throw error
}
}
///Converts the sound wave in to a UIImage
func populateSoundImageView(with waveViewInfo:WaveViewInfo){
let imageSize:CGSize = CGSize(width: CGFloat(waveViewInfo.points.count),//CGFloat(waveViewInfo.points.count) * waveViewInfo.sampleSpace,
height: frame.height)
let drawingRect = CGRect(origin: .zero, size: imageSize)
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0)
defer {
UIGraphicsEndImageContext()
}
print("\(logClassName) Converting sound view in rect \(drawingRect)")
guard let context:CGContext = UIGraphicsGetCurrentContext() else{ return }
context.setFillColor(waveViewInfo.backgroundColor.cgColor)
context.setAlpha(1.0)
context.fill(drawingRect)
context.setLineWidth(1.0)
// context.setLineWidth(waveViewInfo.lineWidth)
let sampleAdjustFactor = imageSize.height / waveViewInfo.maxValue
for pointIndex in waveViewInfo.points.indices{
let pixel = waveViewInfo.points[pointIndex] * sampleAdjustFactor
context.move(to: CGPoint(x: CGFloat(pointIndex), y: middleY - pixel))
context.addLine(to: CGPoint(x: CGFloat(pointIndex), y: middleY + pixel))
context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
context.strokePath()
}
// for pointIndex in waveViewInfo.points.indices{
//
// let pixel = waveViewInfo.points[pointIndex] * sampleAdjustFactor
//
// context.move(to: CGPoint(x: CGFloat(pointIndex) * waveViewInfo.sampleSpace, y: middleY - pixel))
// context.addLine(to: CGPoint(x: CGFloat(pointIndex) * waveViewInfo.sampleSpace, y: middleY + pixel))
//
// context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
// context.strokePath()
//
// }
// var xIncrement:CGFloat = 0
// for point in waveViewInfo.points{
//
// let normalizedPoint = point * sampleAdjustFactor
//
// context.move(to: CGPoint(x: xIncrement, y: middleY - normalizedPoint))
// context.addLine(to: CGPoint(x: xIncrement, y: middleX + normalizedPoint))
// context.setStrokeColor(waveViewInfo.strokeColor.cgColor)
// context.strokePath()
//
// xIncrement += waveViewInfo.sampleSpace
//
// }
guard let soundWaveImage = UIGraphicsGetImageFromCurrentImageContext() else{ return }
soundWaveImageView.image = soundWaveImage
// //In case of handling sample space in for
// updateWidthConstraintValue(soundWaveImage.size.width)
updateWidthConstraintValue(soundWaveImage.size.width * waveViewInfo.sampleSpace)
}
WHERE
class WaveViewInfo {
var format:AVAudioFormat!
var samplesSeconds:CGFloat = 50
var lineWidth:CGFloat = 0.20
var sampleSpace:CGFloat = 0.20
var strokeColor:UIColor = .red
var backgroundColor:UIColor = .clear
var maxValue:CGFloat = 0
var points:[CGFloat] = [CGFloat]()
}
At the moment only prints one sound wave but it can be extended. The good part is that you can print an audio track by parts
A little bit refactoring from the above answers (using AVAudioFile)
import AVFoundation
import CoreGraphics
import Foundation
import UIKit
class WaveGenerator {
private func readBuffer(_ audioUrl: URL) -> UnsafeBufferPointer<Float> {
let file = try! AVAudioFile(forReading: audioUrl)
let audioFormat = file.processingFormat
let audioFrameCount = UInt32(file.length)
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount)
else { return UnsafeBufferPointer<Float>(_empty: ()) }
do {
try file.read(into: buffer)
} catch {
print(error)
}
// let floatArray = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength)))
let floatArray = UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength))
return floatArray
}
private func generateWaveImage(
_ samples: UnsafeBufferPointer<Float>,
_ imageSize: CGSize,
_ strokeColor: UIColor,
_ backgroundColor: UIColor
) -> UIImage? {
let drawingRect = CGRect(origin: .zero, size: imageSize)
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0)
let middleY = imageSize.height / 2
guard let context: CGContext = UIGraphicsGetCurrentContext() else { return nil }
context.setFillColor(backgroundColor.cgColor)
context.setAlpha(1.0)
context.fill(drawingRect)
context.setLineWidth(0.25)
let max: CGFloat = CGFloat(samples.max() ?? 0)
let heightNormalizationFactor = imageSize.height / max / 2
let widthNormalizationFactor = imageSize.width / CGFloat(samples.count)
for index in 0 ..< samples.count {
let pixel = CGFloat(samples[index]) * heightNormalizationFactor
let x = CGFloat(index) * widthNormalizationFactor
context.move(to: CGPoint(x: x, y: middleY - pixel))
context.addLine(to: CGPoint(x: x, y: middleY + pixel))
context.setStrokeColor(strokeColor.cgColor)
context.strokePath()
}
guard let soundWaveImage = UIGraphicsGetImageFromCurrentImageContext() else { return nil }
UIGraphicsEndImageContext()
return soundWaveImage
}
func generateWaveImage(from audioUrl: URL, in imageSize: CGSize) -> UIImage? {
let samples = readBuffer(audioUrl)
let img = generateWaveImage(samples, imageSize, UIColor.blue, UIColor.white)
return img
}
}
Usage
let url = Bundle.main.url(forResource: "TEST1.mp3", withExtension: "")!
let img = waveGenerator.generateWaveImage(from: url, in: CGSize(width: 600, height: 200))

OBJ-C: Getting the minimum/maximum value in a NSMutableArray

I want to get the maximum and minimum values of a NSMutableArray so I can create a core-plot plotspace and graph based around those max and min values.
My code is as follows:
NSMutableArray *contentArray = [NSMutableArray arrayWithCapacity:100];
NSUInteger i;
for (i=0; i<60; i++){
id x = [NSNumber numberWithFloat:i*0.05];
id y = [NSNumber numberWithFloat:1.2*rand()/(float)RAND_Max + 0.6];
[contentArray addObject:[NSMutableDictionary dictionaryWithObjectsAndKeys:x, #"x", y, #"y", nil]];
}
self.dataForPlot = contentArray;
CPXYPlotSpace *plotSpace = (CPXYPlotSpace *)graph.defaultPlotSpace;
plotSpace.xRange = [CPPlotRange plotRangeWithLocation:CPDecimalFromFloat() length:CPDecimalFromFloat()];
plotSpace.yRange = [CPPlotRange plotRangeWithLocation:CPDecimalFromFloat() length:CPDecimalFromFloat()];
What do I fill in the blanks for xRange and yRange assignment if I want the graph to span only the space of what's specified in contentArray?
In this case, you should use -[CPPlotSpace scaleToFitPlots:]. For more general calculations on array values, read on...
This is an ideal use for Key-Value coding and associated array operators. In your example,
float maxX = [[contentArray valueForKeyPath:#"#max.x"] floatValue];
float minY = [[contentArray valueForKeyPath:#"#min.x"] floatValue];
float maxY = [[contentArray valueForKeyPath:#"#max.y"] floatValue];
float minY = [[contentArray valueForKeyPath:#"#min.y"] floatValue];
The array operators call valueForKeyPath: on the contentArray which gives an array built by calling the same valueForKeyPath: on each member of the array with the key path to the right of the array operator (i.e. #min and #max). The orignal call then applies the given operator to the resulting array. You could easily define a category on NSArray to give you back a struct of min/max values:
typedef struct {
float minX;
float minY;
float maxX;
float maxY;
} ArrayValueSpace;
#implementation NSArray (PlotSpaceAdditions)
- (ArrayValueSpace)psa_arrayValueSpace {
ArrayValueSpace result;
result.maxX = [[contentArray valueForKeyPath:#"#max.x"] floatValue];
result.minX = [[contentArray valueForKeyPath:#"#min.x"] floatValue];
result.maxY = [[contentArray valueForKeyPath:#"#max.y"] floatValue];
result.minY = [[contentArray valueForKeyPath:#"#min.y"] floatValue];
return result;
}
You can find max and min values when you fill it, but it would work just when used in your snippet. If you want something more generic you should simply go through the list and searching for it.
// contentArray already defined
int maxX = 0, minX = 1000, maxY = 0, minY = 1000;
for (int i = 0; i < [contentArray count]; ++1)
{
int curX = [[[contentArray objectForKey:#"x"] objectAtIndex:i] integerValue];
int curY = [[[contentArray objectForKey:#"y"] objectAtIndex:i] integerValue];
if (curX < minX)
minX = curX;
else if (curX > maxX)
maxX = curX;
if (curY < minY)
minY = curY;
else if (curY > maxY)
maxY = curY;
}
You could use fast enumeration through the array.
.
NSUInteger maxX = 0;
NSUInteger minX = 0xFFFFFFFF; //(for 32 bit)
NSUInteger maxY = 0;
NSUInteger minY = 0xFFFFFFFF; //(for 32 bit)
for ( NSDictionary *dict in contentArray )
{
NSUInteger newX = [[dict objectForKey:#"x"] integerValue];
NSUInteger newY = [[dict objectForKey:#"y"] integerValue];
if (maxX > newX) maxX = newX;
if (minX > newX) minX = newX;
if (maxY > newY) maxY = newY;
if (minY > newY) minX = newY;
}
if you have NSMutableArray of NSDictionary objects you can use this:
-(float)findMax:array arrayKey:obj {
float max = [[[array objectAtIndex:0] objectForKey:obj] floatValue];
for ( NSDictionary *dict in array ) {
if(max<[[dict objectForKey:obj] floatValue])max=[[dict objectForKey:obj] floatValue];
}
return max;
}
-(float)findMin:array arrayKey:obj {
float min = [[[array objectAtIndex:0] objectForKey:obj] floatValue];
for ( NSDictionary *dict in array ) {
if (min > [[dict objectForKey:obj] floatValue])min = [[dict objectForKey:obj] floatValue];
}
return min;
}
You would access methods like this if in another class:
GraphData *c=[[GraphData alloc] init];
float maxY=[c findMax:plotData arrayKey:[NSNumber numberWithInt:1]]; //1 or 0 depending on axis
[c release];
//This gives Max and Min Value in NSMutableArrray
-(void)FindingMinAndMaxValueInNSMutableArray{
NSMutableArray *array=[[NSMutableArray alloc]initWithObjects:#"0.987",#"0.951",#"0.881",#"0.784",#"0.662",#"0.522",#"0.381",#"-0.265",#"-0.197", #"0.189",#"-0.233",#"0.310",#"0.402",#"0.402",#"0.988",#"0.633",#"0.661",#"0.656",#"0.617",#"0.634",#"0.690",#"0.767",#"0.836",nil];
NSLog(#"The Array Value is %#",array);
NSLog(#"The Array Count is %lu",(unsigned long)array.count);
NSNumber *maxValue = [array valueForKeyPath:#"#max.doubleValue"];
NSLog(#"The maxValue is %#",maxValue);
NSString *str=[NSString stringWithFormat:#"%#",maxValue];
NSInteger path=[array indexOfObject:str];
NSIndexPath *indexpath=[NSIndexPath indexPathForItem:path inSection:0];
NSLog(#"Max Value = %# and index = %ld",maxValue,(long)indexpath.row);
NSNumber *minValue = [array valueForKeyPath:#"#min.doubleValue"];
NSLog(#"The minValue is %#",minValue);
NSString *str1 = [NSString stringWithFormat:#"%#",minValue];
NSInteger path1 = [array indexOfObject:str1];
NSIndexPath *indexPath1 = [NSIndexPath indexPathForItem:path1 inSection:0];
NSLog(#"Min Value =%# and index = %ld",minValue,(long)indexPath1.row);
}