Issue with file manager in swift 3.1 - swift

I have the below code that worked fine, up until the release of swift 3.1.
func loadImage() {
id = userPhotoModel.id
let fileManager = FileManager.default
let imagePath = (self.getDirectoryPath() as NSString).appendingPathComponent(photoName)
if fileManager.fileExists(atPath: imagePath){
let imageFromPath = resizeImage(named: (contentsOfFile: imagePath))
print("name of photo retrieved: \(photoName)")
self.userPhoto.image = imageFromPath
}else{
print("No Image")
}
}
Now, swift 3.1 wants to add as! String to:
let imageFromPath = resizeImage(named: (contentsOfFile: imagePath) as! String)
However, when I run the app, it crashes in this location with no error message as per the below image.
What is causing this?
EDIT: Here is the resizeImage func
fileprivate func resizeImage(named name: String) -> UIImage
{
var image = UIImage(named: name)
if image!.size.height > image!.size.width
{
self.userPhoto.contentMode = .scaleAspectFit
}
else
{
image = image!.resizeTo(self.userPhoto.bounds)
}
return image!
}

The issue is the confusing syntax in the line:
let imageFromPath = resizeImage(named: (contentsOfFile: imagePath))
That should simply be:
let imageFromPath = resizeImage(named: imagePath)
No cast required and correct in any Swift 3.x.

Related

How to fetch Images from CoreData correct?

this is the code how I save images in CoreData:
lazy var dataImage = profileIcon.jpegData(compressionQuality: 1.0)
//MARK: - Saving the Workout Image
func savePUValues() {
// Kontext Identifiziern
guard let appDelegate = UIApplication.shared.delegate as? AppDelegate else {
return
}
let context = appDelegate.persistentContainer.viewContext
let entityName = "PushUps" //Tabellenname im DatenModell
//Neuen Datensatz anlegen:
guard let newEntity = NSEntityDescription.entity(forEntityName: entityName, in: context)
else {
return
}
let newSet = NSManagedObject(entity: newEntity, insertInto: context)
let savingImage = dataImage
newSet.setValue(savingImage, forKey: "image")
// Datensatz speichern
do {
try context.save()
} catch {
print("An error appeared")
}
}
Now I want to fetch the image data and load it into a UITableView:
With this code:
func loadValues() -> [CellData] {
//Kontext identifizieren
let appDelegate = UIApplication.shared.delegate as! AppDelegate
//Anfrage stellen
let context = appDelegate.persistentContainer.viewContext
let entityName = "PushUps"
//MARK: - Request
// Use a specific fetch request
let request = NSFetchRequest<PushUps>(entityName: entityName)
// add a sort descriptor to sort the items by highScore descending
request.sortDescriptors = [NSSortDescriptor(key: "count", ascending: false)]
do {
// results is an array of PushUps instances, no type cast needed
let results = try context.fetch(request)
for r in results {
if let result = r as? NSManagedObject {
let titletext = "\(count!) Push-Ups"
var cellTitleLabel = UILabel()
cellTitleLabel.text = titletext
let workoutImage = result.value(forKey: "image") as? UIImageView
print("Image was loaded into the array")
var Data1 = CellData(imageData: workoutImage ?? oImageView, titleData: cellTitleLabel)
print("Image: \(String(describing: workoutImage)), TitLe: \(cellTitleLabel) ")
cellDataArray.append(Data1)
}
}
} catch {
print(error)
}
return cellDataArray
}
The problem is that the it's loading the image as nil.
The console is giving me this back: "Image: nil, TitLe: <UILabel: 0x115f17980; frame = (0 0; 0 0); text = '0.0 Push-Ups'; userInteractionEnabled = NO; layer = <_UILabelLayer: 0x28249cfa0>> "
So where is the problem? What is wrong with my Code?
Thanks for your help!
The problem is that you are saving Data and trying to retrieve it as a UIImageView, which is doomed to failure:
lazy var dataImage = profileIcon.jpegData(compressionQuality: 1.0)
// Data
let savingImage = dataImage
newSet.setValue(savingImage, forKey: "image")
// still Data
// ---------------------
let workoutImage = result.value(forKey: "image") as? UIImageView
// UIImageView?????
However, don't save image data into Core Data in the first place. Save to disk and store the partial path (e.g. the file name).

Rewriting metadata into .jpg file by using Swift 2.2 OS X

I'm try rewrite metadata from JPG file. I want addition one keyword to metadata. Xcode don't give any errors, but file not changed.
Here is my code:
var pathToOpenFile:NSURL?
Next I write path from file to variable "pathToOpenFile".
If user pushed ENTER button into NSTextField, then work action:
#IBAction func endEditKeys(sender: AnyObject) {
if (pathToOpenFile != nil) {
let imageSource = CGImageSourceCreateWithURL(pathToOpenFile!, nil)
let imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource!, 0, nil)! as NSDictionary;
let exifDict = imageProperties.valueForKey("{IPTC}") as! NSDictionary;
var Keywords:[String] = exifDict.valueForKey("Keywords") as! [String];
Keywords.append("ANY")
exifDict.setValue(Keywords, forKey: "Keywords")
let type = CGImageSourceGetType(imageSource!)
let count = CGImageSourceGetCount(imageSource!)
let mutableData = NSMutableData(contentsOfURL: pathToOpenFile!)
let destination = CGImageDestinationCreateWithData(mutableData!, type!, count, nil)
let removeExifProperties: CFDictionary = exifDict
for i in 0..<count {
CGImageDestinationAddImageFromSource(destination!, imageSource!, i, removeExifProperties)
}
CGImageDestinationFinalize(destination!)
}
}
Can you help me, why it isn't work (not change metadata)? Thank you!
Thanks all!
It is working, just need rewrite .JPG file in end.
if let _ = try? mutableData!.writeToURL(pathToOpenFile!, options: NSDataWritingOptions.AtomicWrite) {
// if you need, do anything. For example "print ("Savig file")"
}

Swift Save images (screenshots) to nsuserdefaults

I have a program, where the user "creates" an image, and then the program takes a screenshot of the screen. I would then like to save this screenshot to a database, prefferebly nsuserdefaults, since I am accessing it later in a table view. Any other suggestions on how to approach this, are more than welcome :)
The code is like this
let screenshot = getScreenshot() // saves the screenshot
var imagePaths = [String]()
// get the array of previous screenshots
if let _ = NSUserDefaults.standardUserDefaults().objectForKey(theKey)
{
imagePaths = NSUserDefaults.standardDefaults().objectForKey(theKey) as! [String]
}
// then I want to get a path to the image something like
let imagePath = screenshot.getPath() // although this is not a valid method, this is basically what I want
// add the imagePath
imagePaths.append(imagePath)
// finally I save the image
NSUserDefaults.standardUserDefaults().setObject(imagePaths, forKey: theKey)
You can create directory in Documents and save there screenshots as usual files. Filename can be generated from date and time for uniqueness.
func saveImage(imageData: NSData)
{
let dateFormatter = NSDateFormatter()
dateFormatter.dateFormat = "dd-MM-yyyy hh.mm.ss"
let filename = "\(dateFormatter.stringFromDate(NSDate())).png"
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first as! String
let imagesDirectory = documentsDirectory.stringByAppendingPathComponent("Images")
let filePath = imagesDirectory.stringByAppendingPathComponent(filename)
if !NSFileManager.defaultManager().fileExistsAtPath(imagesDirectory)
{
var error: NSError?
NSFileManager.defaultManager().createDirectoryAtPath(imagesDirectory, withIntermediateDirectories: false, attributes: nil, error: &error)
if error != nil
{
println("\(error!.localizedDescription)")
return
}
}
imageData.writeToFile(filePath, atomically: true)
}
func getImagesPaths() -> [String]?
{
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first as! String
let imagesDirectory = documentsDirectory.stringByAppendingPathComponent("Images")
if let filenames = NSFileManager.defaultManager().contentsOfDirectoryAtPath(imagesDirectory, error: nil)
{
let imagePaths = filenames.map{"\(imagesDirectory)/\($0)"}.filter(){$0.pathExtension == "png"}
return imagePaths.count > 0 ? imagePaths : nil
}
return nil
}
To save image simply use saveImage(data). To get images paths use getImagesPaths().
If you need array of UIImage, you can get it by follow way:
var images : [UIImage] = [ ]
if let imagePaths = getImagesPaths()
{
for path in imagePaths
{
if let image = UIImage(contentsOfFile: path)
{
images.append(image)
}
}
}

Displaying Artwork for .MP3 file

I am trying to currently display the album artwork for a locally stored .MP3 track in an ImageView. Does anyone know how to fetch this artwork in Swift in order to accomplish this?
I have found this solution (iOS AVFoundation: How do I fetch artwork from an mp3 file?) but the code is written in Objective C. I simply want to grab the image embedded in my MP3 and display it in my ImageView.
I've looked at the API documentation for the MPMediaItemArtwork and found an example that also accomplishes what I am trying to accomplish in Objective C as well here(http://www.codeitive.com/0zHjkUjUWX/not-able-to-get-the-uiimage-from-mpmediaitempropertyartwork.html) but cannot come up with a solution. My code is as follows:
import UIKit
import AVFoundation
import MediaPlayer
class ViewController: UIViewController {
let audioPath:NSURL! = NSBundle.mainBundle().URLForResource("SippinOnFire", withExtension: "mp3")
#IBOutlet var artistImage: UIImageView!
#IBOutlet var trackLabel: UILabel!
#IBOutlet var artistLabel: UILabel!
#IBOutlet var sliderValue: UISlider!
var player:AVAudioPlayer = AVAudioPlayer()
#IBAction func play(sender: AnyObject) {
let audioInfo = MPNowPlayingInfoCenter.defaultCenter()
println(audioInfo)
player.play()
//println("Playing \(audioPath)")
let playerItem = AVPlayerItem(URL: audioPath)
let metadataList = playerItem.asset.metadata as! [AVMetadataItem]
for item in metadataList {
if let stringValue = item.value {
println(item.commonKey)
if item.commonKey == "title" {
trackLabel.text = stringValue as? String
}
if item.commonKey == "artist" {
artistLabel.text = stringValue as? String
}
if item.commonKey == "artwork" {
if let audioImage = UIImage(data: item.value as! NSData) {
let audioArtwork = MPMediaItemArtwork(image: audioImage)
println(audioImage.description)
}
}
}
}
}
#IBAction func pause(sender: AnyObject) {
player.pause()
}
#IBAction func stop(sender: AnyObject) {
player.stop()
player.currentTime = 0;
}
#IBAction func sliderChanged(sender: AnyObject) {
player.volume = sliderValue.value
}
override func viewDidLoad() {
super.viewDidLoad()
var error:NSError? = nil
player = AVAudioPlayer(contentsOfURL: audioPath!, error: &error)
player.volume = 0.5;
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Here is a screen shot of my sample .mp3 file. As you can see there is indeed album artwork that is both visible in the "get info" section of Finder. I've also opened the .mp3 in my iTunes to make sure and have confirmed there is artwork in the "get info" section of it there as well as under the "artwork" tab.
However, when trying to use the commonKey to assign the image to my imageView I find that there is no commonKey for "artwork".
Thanks
Change your snippet of code into this (I already tested it):
I added println lines commented in places of interest, Feel free to uncomment in order to see what is happening.
for item in metadataList {
if item.commonKey == nil{
continue
}
if let key = item.commonKey, let value = item.value {
//println(key)
//println(value)
if key == "title" {
trackLabel.text = value as? String
}
if key == "artist" {
artistLabel.text = value as? String
}
if key == "artwork" {
if let audioImage = UIImage(data: value as! NSData) {
//println(audioImage.description)
artistImage.image = audioImage
}
}
}
}
UPDATE: A bit of clean up of this code
for item in metadataList {
guard let key = item.commonKey, let value = item.value else{
continue
}
switch key {
case "title" : trackLabel.text = value as? String
case "artist": artistLabel.text = value as? String
case "artwork" where value is NSData : artistImage.image = UIImage(data: value as! NSData)
default:
continue
}
}
UPDATE: For Swift 4
for item in metadataList {
guard let key = item.commonKey?.rawValue, let value = item.value else{
continue
}
switch key {
case "title" : trackLabel.text = value as? String
case "artist": artistLabel.text = value as? String
case "artwork" where value is Data : artistImage.image = UIImage(data: value as! Data)
default:
continue
}
}
edit/update Swift 4 or later:
import MediaPlayer
var nowPlayingInfo: [String: Any] = [:]
let playerItem = AVPlayerItem(url: url)
let metadataList = playerItem.asset.metadata
for item in metadataList {
switch item.commonKey {
case .commonKeyTitle?:
nowPlayingInfo[MPMediaItemPropertyTitle] = item.stringValue ?? ""
case .commonKeyType?:
nowPlayingInfo[MPMediaItemPropertyGenre] = item.stringValue ?? ""
case .commonKeyAlbumName?:
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = item.stringValue ?? ""
case .commonKeyArtist?:
nowPlayingInfo[MPMediaItemPropertyArtist] = item.stringValue ?? ""
case .commonKeyArtwork?:
if let data = item.dataValue,
let image = UIImage(data: data) {
nowPlayingInfo[MPMediaItemPropertyArtwork] = MPMediaItemArtwork(boundsSize: image.size) { _ in image }
}
case .none: break
default: break
}
}
let audioInfo = MPNowPlayingInfoCenter.default()
audioInfo.nowPlayingInfo = nowPlayingInfo
Note: You will have to invoke beginReceivingRemoteControlEvents() otherwise it will not work on the actual device. You will also need to set your app Background Modes (Audio and AirPlay) and set your AVAudioSession category to AVAudioSessionCategoryPlayback and set it active:
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
Try this:
It appears that sometimes iOS 8 returns nil at first attempt of obtaining this info:
if let audioCenter = MPNowPlayingInfoCenter.defaultCenter(){
if let audioInfo = audioCenter.nowPlayingInfo{
if let artwork = audioInfo[MPMediaItemPropertyArtwork] as? MPMediaItemArtwork
{
var image: UIImage? = artwork.imageWithSize(artwork.bounds.size)
if image == nil {
image = artwork.imageWithSize(artwork.bounds.size);
}
if image != nil{
println("image loaded")
}
}
}
}

Swift iOS google Map, path to coordinate

I am trying to create a function in my app that will guide the user to a marker I have created.
This is the code I am using, it works great, It gets the users current location and show it on the map. But how can I get a directions to a marker?
Any awnser will be helpful
class Karta: UIViewController, CLLocationManagerDelegate {
#IBOutlet var mapView: GMSMapView!
let locationManager = CLLocationManager()
override func viewDidLoad() {
super.viewDidLoad()
//allow app to track user
locationManager.delegate = self
locationManager.requestWhenInUseAuthorization()
//set out a marker on the map
var marker = GMSMarker()
marker.position = CLLocationCoordinate2DMake(56.675907, 12.858798)
marker.appearAnimation = kGMSMarkerAnimationPop
marker.icon = UIImage(named: "flag_icon")
marker.map = mapView
}
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if segue.identifier == "Types Segue" {
let navigationController = segue.destinationViewController as UINavigationController
}
}
func locationManager(manager: CLLocationManager!, didChangeAuthorizationStatus status: CLAuthorizationStatus) {
//If map is being used
if status == .AuthorizedWhenInUse {
var myLocation = mapView
locationManager.startUpdatingLocation()
mapView.myLocationEnabled = true
mapView.settings.myLocationButton = true
}
}
func locationManager(manager: CLLocationManager!, didUpdateLocations locations: [AnyObject]!) {
if let location = locations.first as? CLLocation {
mapView.camera = GMSCameraPosition(target: location.coordinate, zoom: 15, bearing: 0, viewingAngle: 0)
locationManager.stopUpdatingLocation()
}
}
}
So i recently just solved this issue, here is my Swift 3 implementation using the latest version of Alamofire (4.3)
func fetchMapData() {
let directionURL = "https://maps.googleapis.com/maps/api/directions/json?" +
"origin=\(originAddressLat),\(originAddressLng)&destination=\(destinationAddressLat),\(destinationAddressLong)&" +
"key=YOUROWNSERVERKEY"
Alamofire.request(directionURL).responseJSON
{ response in
if let JSON = response.result.value {
let mapResponse: [String: AnyObject] = JSON as! [String : AnyObject]
let routesArray = (mapResponse["routes"] as? Array) ?? []
let routes = (routesArray.first as? Dictionary<String, AnyObject>) ?? [:]
let overviewPolyline = (routes["overview_polyline"] as? Dictionary<String,AnyObject>) ?? [:]
let polypoints = (overviewPolyline["points"] as? String) ?? ""
let line = polypoints
self.addPolyLine(encodedString: line)
}
}
}
func addPolyLine(encodedString: String) {
let path = GMSMutablePath(fromEncodedPath: encodedString)
let polyline = GMSPolyline(path: path)
polyline.strokeWidth = 5
polyline.strokeColor = .blue
polyline.map = whateverYourMapViewObjectIsCalled
}
Disclaimer:Swift 2
func addOverlayToMapView(){
let directionURL = "https://maps.googleapis.com/maps/api/directions/json?origin=\(srcLocation.coordinate.latitude),\(srcLocation.coordinate.longitude)&destination=\(destLocation.coordinate.latitude),\(destLocation.coordinate.longitude)&key=Your Server Key"
Alamofire.request(.GET, directionURL, parameters: nil).responseJSON { response in
switch response.result {
case .Success(let data):
let json = JSON(data)
print(json)
let errornum = json["error"]
if (errornum == true){
}else{
let routes = json["routes"].array
if routes != nil{
let overViewPolyLine = routes![0]["overview_polyline"]["points"].string
print(overViewPolyLine)
if overViewPolyLine != nil{
self.addPolyLineWithEncodedStringInMap(overViewPolyLine!)
}
}
}
case .Failure(let error):
print("Request failed with error: \(error)")
}
}
}
Using the points, we now create the Path from two points using fromEncodedPath
func addPolyLineWithEncodedStringInMap(encodedString: String) {
let path = GMSMutablePath(fromEncodedPath: encodedString)
let polyLine = GMSPolyline(path: path)
polyLine.strokeWidth = 5
polyLine.strokeColor = UIColor.yellowColor()
polyLine.map = mapView
}
Unlike Apple's MapKit, the Google Maps SDK for iOS does not natively include a way to perform route calculations.
Instead, you need to use the Google Directions API: https://developers.google.com/maps/documentation/directions/. It is an HTTP-only API, and Google does not provide any SDK as of today, but you can easily write your own wrapper yourself, or choose one of the many options available on Github:
https://github.com/sudeepjaiswal/GoogleDirections
https://github.com/sebk/GoogleDirections
https://github.com/iamamused/iOS-DirectionKit
https://github.com/marciniwanicki/OCGoogleDirectionsAPI
and many others...
Its Very Simple
if you added a Google map SDK in your iOS Project and if you want to implement get google map direction lines between two different directions i have made as demo code in simple way understand using swift 2.3 try it, modify it and use it.!!!
Note: Dont Forgot to change your lat long you want and API Key (You may Use API key with None Restrictions on Google API Manager Credential Section)
func callWebService(){
let url = NSURL(string: "https://maps.googleapis.com/maps/api/directions/json?origin=\(18.5235),\(73.7184)&destination=\(18.7603),\(73.8630)&key=AIzaSyDxSgGQX6jrn4iq6dyIWAKEOTneZ3Z8PtU")
let request = NSURLRequest(URL: url!)
let config = NSURLSessionConfiguration.defaultSessionConfiguration()
let session = NSURLSession(configuration: config)
let task = session.dataTaskWithRequest(request, completionHandler: {(data, response, error) in
// notice that I can omit the types of data, response and error
do{
if let jsonResult = try NSJSONSerialization.JSONObjectWithData(data!, options: NSJSONReadingOptions.MutableContainers) as? NSDictionary {
//print(jsonResult)
let routes = jsonResult.valueForKey("routes")
//print(routes)
let overViewPolyLine = routes![0]["overview_polyline"]!!["points"] as! String
print(overViewPolyLine)
if overViewPolyLine != ""{
//Call on Main Thread
dispatch_async(dispatch_get_main_queue()) {
self.addPolyLineWithEncodedStringInMap(overViewPolyLine)
}
}
}
}
catch{
print("Somthing wrong")
}
});
// do whatever you need with the task e.g. run
task.resume()
}
func addPolyLineWithEncodedStringInMap(encodedString: String) {
let camera = GMSCameraPosition.cameraWithLatitude(18.5204, longitude: 73.8567, zoom: 10.0)
let mapView = GMSMapView.mapWithFrame(CGRect.zero, camera: camera)
mapView.myLocationEnabled = true
let path = GMSMutablePath(fromEncodedPath: encodedString)
let polyLine = GMSPolyline(path: path)
polyLine.strokeWidth = 5
polyLine.strokeColor = UIColor.yellowColor()
polyLine.map = mapView
let smarker = GMSMarker()
smarker.position = CLLocationCoordinate2D(latitude: 18.5235, longitude: 73.7184)
smarker.title = "Lavale"
smarker.snippet = "Maharshtra"
smarker.map = mapView
let dmarker = GMSMarker()
dmarker.position = CLLocationCoordinate2D(latitude: 18.7603, longitude: 73.8630)
dmarker.title = "Chakan"
dmarker.snippet = "Maharshtra"
dmarker.map = mapView
view = mapView
}