swift5 how to only display limited photo library after user has granted limited access to select some photos - swift

In WWDC2020, new PHPickerViewController and new PHPhotoLibrary.authorizationStatus(limited) were introduced. But I got below issue:
when user tapped a button to show the apple's multiple images picker and display the requestAuthorization as code:
let requiredAccessLevel: PHAccessLevel = .readWrite
PHPhotoLibrary.requestAuthorization(for: requiredAccessLevel) { (authorizationStatus) in
switch authorizationStatus {
case .authorized:
DispatchQueue.main.async {
self.presentImagePicker()
}
case .limited:
DispatchQueue.main.async {
self.presentImagePicker()
}
default:
break
}
}
self.presentImagePicker() functions:
func presentImagePicker() {
var configuration = PHPickerConfiguration(photoLibrary: .shared())
configuration.filter = .images
configuration.selectionLimit = self.imageCountMax - self.images.count
let picker = PHPickerViewController(configuration: configuration)
picker.delegate = self
let accessLevel: PHAccessLevel = .readWrite
let authorizationStatus = PHPhotoLibrary.authorizationStatus(for: accessLevel)
switch authorizationStatus {
case .authorized:
DispatchQueue.main.async {
self.present(picker, animated: true)
}
case .limited:
DispatchQueue.main.async {
// Here I don't know how to display only limited photo library to users (after user has selected some photos through the limited access)
}
default:
break
}
}
my issue: please see code 2, case .limited: DispatchQueue.main.async { }, I think I should put the limited photo library in this block, but I don't know how to display only limited photo library to users.

You can use this PHPhotoLibrary.shared().presentLimitedLibraryPicker(from: self) method into .limited
DispatchQueue.main.async {
// Here I don't know how to display only limited photo library to users (after user has selected some photos through the limited access)
PHPhotoLibrary.shared().register(self)
PHPhotoLibrary.shared().presentLimitedLibraryPicker(from: self)
}
and after that you have to use the delegate method for get the updated images.
func photoLibraryDidChange(_ changeInstance: PHChange) {
let fetchOptions = PHFetchOptions()
self.allPhotos = PHAsset.fetchAssets(with: .image, options: fetchOptions)
}

Related

Images not saving in Custom Folder in Gallery in iOS swift

I am trying to save my images in custom folder in iPhone gallery but instead of saving original image it saves only white blank image on real device. but when I run same code on simulator its working fine.
the code I am trying is :
import Photos
class CustomPhotoAlbum: NSObject {
static let albumName = "App Name" // here put your album name
static let sharedInstance = CustomPhotoAlbum()
var assetCollection: PHAssetCollection!
private var collection: PHFetchResult<PHAssetCollection>!
override init() {
super.init()
if let assetCollection = fetchAssetCollectionForAlbum() {
self.assetCollection = assetCollection
return
}
if PHPhotoLibrary.authorizationStatus() != PHAuthorizationStatus.authorized {
PHPhotoLibrary.requestAuthorization({ (status: PHAuthorizationStatus) -> Void in
()
})
}
if PHPhotoLibrary.authorizationStatus() == PHAuthorizationStatus.authorized {
self.createAlbum()
} else {
PHPhotoLibrary.requestAuthorization(requestAuthorizationHandler)
}
}
func requestAuthorizationHandler(status: PHAuthorizationStatus) {
if PHPhotoLibrary.authorizationStatus() == PHAuthorizationStatus.authorized {
// ideally this ensures the creation of the photo album even if authorization wasn't prompted till after init was done
print("trying again to create the album")
self.createAlbum()
} else {
print("should really prompt the user to let them know it's failed")
}
}
func createAlbum() {
PHPhotoLibrary.shared().performChanges({
PHAssetCollectionChangeRequest.creationRequestForAssetCollection(withTitle: CustomPhotoAlbum.albumName) // create an asset collection with the album name
}) { success, error in
if success {
self.assetCollection = self.fetchAssetCollectionForAlbum()
} else {
print("error \(error)")
}
}
}
func fetchAssetCollectionForAlbum() -> PHAssetCollection? {
var firstObject: PHAssetCollection?
let fetchOptions = PHFetchOptions()
fetchOptions.predicate = NSPredicate(format: "title = %#", CustomPhotoAlbum.albumName)
DispatchQueue.main.async {
self.collection = PHAssetCollection.fetchAssetCollections(with: .album, subtype: .any, options: fetchOptions)
if let _: AnyObject = self.collection.firstObject {
firstObject = self.collection.firstObject
}
}
if firstObject != nil {
return firstObject
} else {
return nil
}
}
func save(image: UIImage) {
if assetCollection == nil {
return // if there was an error upstream, skip the save
}
PHPhotoLibrary.shared().performChanges({
let assetChangeRequest = PHAssetChangeRequest.creationRequestForAsset(from: image)
let assetPlaceHolder = assetChangeRequest.placeholderForCreatedAsset
let albumChangeRequest = PHAssetCollectionChangeRequest(for: self.assetCollection)
let enumeration: NSArray = [assetPlaceHolder!]
albumChangeRequest!.addAssets(enumeration)
}, completionHandler: nil)
}
}
and when I use this code to save the image is:
func saveImage() {
let snapShot:UIView = backgroundImage.snapshotView(afterScreenUpdates: true)!
UIGraphicsBeginImageContext(backgroundImage.bounds.size)
snapShot.drawHierarchy(in: backgroundImage.bounds, afterScreenUpdates: true)
let image:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
CustomPhotoAlbum.sharedInstance.save(image: image)
self.view.snapshotView(afterScreenUpdates: true)
}
Also I add permissions in my info.plist.
but the code is running fine on simulator but not on real device. On real device its also log an error in console:
"Error returned from daemon: Error Domain=com.apple.accounts Code=7
"(null)"" 2022-08-30 17:25:21.043516+0500 App Name[19316:1400881]
[PAAccessLogger] Failed to log access with error: access=<PATCCAccess
0x28247e5b0> accessor:<<PAApplication 0x2809d98b0
identifierType:auditToken identifier:{pid:19316, version:56046}>>
identifier:08A3A297-406E-45A2-8D0D-4C443A3F2835 kind:intervalEnd
timestampAdjustment:0 tccService:kTCCServicePhotos, error=Error
Domain=PAErrorDomain Code=10 "Possibly incomplete access interval
automatically ended by daemon"
I am using Xcode 13.3, Almost same question here asked, I tried every answer but nothing helped.
can someone please help me to fix this issue Thanks.

Image gallery not show after request access permission

i try to show image gallery after user permission to allow all photos, but the gallery is not showing. but when i back to previous controller and navigate back, the gallery show up. but that's not what i want, i want after user allow the image show up.
this my setup
private var allPhotos: [PHAsset] = []
override func viewDidLoad() {
super.viewDidLoad()
PHPhotoLibrary.shared().register(self)
setupCollectionView()
checkPhotoLibraryPermission()
bindViewModel()
}
deinit {
PHPhotoLibrary.shared().unregisterChangeObserver(self)
}
private func bindViewModel() {
let dataSource = Observable.just(allPhotos)
dataSource.asObservable()
.bind(to: collectionView.rx.items(cellIdentifier: GalleryCollectionViewCell.cellId, cellType: GalleryCollectionViewCell.self)) { row, asset, cell in
let imageRequestOptions = PHImageRequestOptions()
imageRequestOptions.resizeMode = .exact
self.imageManager.requestImageDataAndOrientation(for: asset, options: imageRequestOptions) { imageData, _, orientation, info in
guard let imageData = imageData else { return }
cell.setup(imageData: imageData)
}
}.disposed(by: disposedBag)
collectionView.rx.itemSelected
.subscribe(onNext: { [weak self] indexPath in
guard let strongSelf = self else { return }
let asset = strongSelf.allPhotos[indexPath.row]
asset.requestContentEditingInput(with: PHContentEditingInputRequestOptions()) { editingInput, info in
guard let path = editingInput?.fullSizeImageURL?.path.replacingOccurrences(of: "HEIC", with: "PNG") else { return }
self?.imageManager.requestImageDataAndOrientation(for: asset, options: self?.imageRequestOptions) { imageData, _, orientation, info in
guard let imageData = imageData else { return }
self?.goToCropImage(from: imageData, and: path.lastPathComponent)
}
}
}).disposed(by: disposedBag)
}
private func fetchAllPhotos() {
let allPhotosOptions = PHFetchOptions()
allPhotosOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let fetchResult = PHAsset.fetchAssets(with: .image, options: allPhotosOptions)
allPhotos = fetchResult.objects(at: IndexSet(0..<fetchResult.count))
}
private func checkPhotoLibraryPermission() {
let status = PHPhotoLibrary.authorizationStatus()
switch status {
case .authorized:
fetchAllPhotos()
DispatchQueue.main.async {
self.collectionView.reloadData()
}
}
case .denied, .restricted :
//handle denied status
gotoAppSettings()
case .notDetermined:
// ask for permissions
PHPhotoLibrary.requestAuthorization { status in
switch status {
case .authorized:
self.fetchAllPhotos()
case .denied, .restricted:
// as above
self.gotoAppSettings()
case .notDetermined:
// won't happen but still
break
case .limited:
break
#unknown default:
fatalError("Failed to get user permission to access photo")
}
}
case .limited:
fetchAllPhotos()
#unknown default:
fatalError("Failed to get user permission to access photo")
}
}
func photoLibraryDidChange(_ changeInstance: PHChange) {
let allPhotosOptions = PHFetchOptions()
let fetchResult = PHAsset.fetchAssets(with: .image, options: allPhotosOptions)
DispatchQueue.main.async {
self.allPhotos = fetchResult.objects(at: IndexSet(0..<fetchResult.count))
self.collectionView.reloadData()
}
}
I already try to to reload collectionView but it still not show up.
The way that UICollectionView.rx.items works is that it observes its dataSource. When the dataSource emits a new array, the items operator will reload the collection view and call its closure for each item.
Since you are using just as your data source, only one array is emitted and the collection view never changes. You have to tie the source to the change observer to get it to work. Here is a working example:
extension PhotosViewController { // a UICollectionViewController
func connect(disposeBag: DisposeBag) {
// initial fetch result
let allPhotosOptions = PHFetchOptions()
allPhotosOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
let initialFetchResult = PHAsset.fetchAssets(with: allPhotosOptions)
let assets = PHPhotoLibrary.shared().rx.registerChangeObserver()
// when a change is observed, we need to update the fetchResult
.scan(initialFetchResult) { oldResult, change in
guard let changes = change.changeDetails(for: oldResult) else { return oldResult }
return changes.fetchResultAfterChanges
}
// but first send the initial asset fetch to the collection view
.startWith(initialFetchResult)
// and get the assets out of the fetch result.
.map { $0.objects(at: IndexSet(0 ..< $0.count)) }
collectionView.dataSource = nil
assets
.observe(on: MainScheduler.instance)
.bind(to: collectionView.rx.items(cellIdentifier: "GridViewCell", cellType: GridViewCell.self)) { _, asset, cell in
cell.configure(asset: asset)
}
.disposed(by: disposeBag)
}
}
extension Reactive where Base: PHPhotoLibrary {
// not actually needed, but I provided it as an example.
static func requestAuthorization() -> Observable<PHAuthorizationStatus> {
Observable.create { observer in
Base.requestAuthorization { status in
observer.onNext(status)
observer.onCompleted()
}
return Disposables.create()
}
}
// this sets up the change observer. Note, your VC isn't the observer.
func registerChangeObserver() -> Observable<PHChange> {
Observable.create { [base] observer in
let changeObserver: RxPhotoLibraryChangeObserver = .init(observer: observer)
base.register(changeObserver)
return Disposables.create { base.unregisterChangeObserver(changeObserver) }
}
}
}
// this is the change observer used in the above.
final class RxPhotoLibraryChangeObserver: NSObject, PHPhotoLibraryChangeObserver {
let observer: AnyObserver<PHChange>
init(observer: AnyObserver<PHChange>) {
self.observer = observer
}
func photoLibraryDidChange(_ changeInstance: PHChange) {
observer.onNext(changeInstance)
}
}

Where to put Firebase Performance trace

I am trying to determine what the best location would be for putting a firebase Performance trace. I want to see how long it is taking my app to pull data.
In my VC I have the following
func pullAllUsersCards() {
// 1 Start Here?
FirebaseUtility.shared.getCards { (cards, errMessage) in
if let theCards = cards {
if theCards.count < 1 {
if let addVC = self.storyboard?.instantiateViewController(withIdentifier: StoryboardKeys.addCardViewControllerStoryboardID) as? AddCardViewController {
let addNavigation = UINavigationController(rootViewController: addVC)
if UIDevice.current.userInterfaceIdiom == .pad {
self.splitViewController?.present(addNavigation, animated: true, completion: nil)
} else {
self.present(addNavigation, animated: true, completion: nil)
}
}
} else {
// 2 Start Here?
MBProgressHUD.showAdded(to: self.view, animated: true)
self.cardArray = theCards
self.tableView.reloadData()
MBProgressHUD.hide(for: self.view, animated: true)
}
}
}
}
Originally I wanted to put the trace on my singleton class FirebaseUtility where the getCards method is.
func getCards(completion: #escaping (_ cards: [Card]?, _ errorMessage: String?) -> Void) {
// let testTrace = Performance.startTrace(name: "Test")
guard let userID = user?.uid else {
let error = "Unknown error occured! User is not logged in."
completion(nil, error)
return
}
let userCardRef = ref.child(FirebaseKeys.newCards).child(userID)
userCardRef.observe(.value, with: { (snapshot) in // changed from Single Event
let enumerator = snapshot.children
var cards = [Card]()
while let cardSnapshot = enumerator.nextObject() as? DataSnapshot {
if let cardDict = cardSnapshot.value as? [String : Any] {
let card = Card(id: cardSnapshot.key, cardDict: cardDict)
cards.append(card)
}
}
completion(cards, nil)
})
// testTrace?.stop()
}
however when I try to use it there I get an error saying Firebase Performance does not support Extensions at this time
are you using Firebase Performance in the context of an App Extension (e.g. Watch, keyboard, today, etc.)? That message is triggered by this line in the FirebasePerformance.h file:
NS_EXTENSION_UNAVAILABLE("FirebasePerformance does not support app extensions at this time.")
Firebase Performance currently only supports normal applications on iOS.

Post Video to Facebook with swift SDK

I have been trying to figure this out all day and yesterday night, but no luck. I can confirm that the LinkShareContent works but when I try to share a video file. It gives me an error code "reserved" but nothing else.
This is the code for the link
var content = LinkShareContent(url: URL(string: "https://google.com")!)
showShareDialog(content)
and this is the code for the video that does not work at all.
let video = Video(url: url)
var content = VideoShareContent(video: video, previewPhoto: Photo(image: inProgressItem.firstImage, userGenerated: true))
showShareDialog(content)
This will show the share Sheet on the controller
Func showShareDialog<C: ContentProtocol>(_ content: C, mode: ShareDialogMode = .shareSheet) {
let dialog = ShareDialog(content: content)
dialog.presentingViewController = self
dialog.mode = mode
do{
try dialog.show()
}
catch (let error){
print(error)
}
}
I have confirmed that the video is on the local path and I'm testing the app on iPhone 8 11.1.2
Had exactly the same issue. It was working for LinkShareContent but didn't work for VideoShareContent.
The solution:
Make sure you are getting the right URL for the video. The right one is the URL for key "UIImagePickerControllerReferenceURL" from info dictionary that comes from UIImagePickerController delegate method.
Working Code:
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String: Any]) {
picker.dismiss(animated: true)
if let videoURL = info["UIImagePickerControllerReferenceURL"] as? URL {
let video = Video(url: videoURL)
let content = VideoShareContent(video: video)
let dialog = ShareDialog(content: content)
dialog.failsOnInvalidData = true
dialog.mode = .native
dialog.presentingViewController = self
do {
try dialog.show()
} catch {
print(error)
}
}
}
Extra info: initially I did not use this key "UIImagePickerControllerReferenceURL" cuz it's deprecated. Apple advises using UIImagePickerControllerPHAsset instead. But the URL from there also returns reserved error. Another try was to use key "UIImagePickerControllerMediaURL", but it also didn't succeed.
I use PHPickerViewController instead of UIPickerController.
private lazy var videoPickerController: PHPickerViewController = {
let photoLibrary = PHPhotoLibrary.shared()
var configuration = PHPickerConfiguration(photoLibrary: photoLibrary)
configuration.selectionLimit = 1
configuration.filter = .any(of: [.videos])
let picker = PHPickerViewController(configuration: configuration)
picker.delegate = self
return picker
}()
Then using PHAsset for initialisation ShareVideo(videoAsset:).
private func facebookShare(content: Content) {
guard let schemaUrl = URL(string: "fb://") else {
return
}
if UIApplication.shared.canOpenURL(schemaUrl) {
let video = ShareVideo(videoAsset: content)
let content = ShareVideoContent()
content.video = video
let dialog = ShareDialog(
viewController: self,
content: content,
delegate: self
)
do {
try dialog.validate()
} catch let error as NSError {
presentAlert(message: (error.userInfo[ErrorDeveloperMessageKey] as? String) ?? error.localizedDescription)
} catch {
presentAlert(message: error.localizedDescription)
}
if dialog.canShow {
dialog.show()
}
} else {
presentAlert(message: "FB app not installed")
}
}
And PHPickerViewControllerDelegate looks something like this (I always select only 1 asset that's why I use fetchResult.firstObject)
func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
picker.dismiss(animated: true)
let identifiers = results.compactMap(\.assetIdentifier)
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: identifiers, options: nil)
guard let videoAsset = fetchResult.firstObject else { return }
}
This solution works for iOS 14 and higher and if on your device Facebook app installed.
Also before upload video I login via FB.

WKWebView Screenshots

I am trying to capture the image that the webview is displaying to the user, so I can some color analysis of the web page. When I try to get the image from it's parent, I am basically getting a white box, even though the page has rendered:
func makeImageSnapshot()-> (NSImage)
{
let imgSize = self.view.bounds.size
let bir = self.viewbitmapImageRepForCachingDisplayInRect(self.webView!.view.bounds)
bir.size = imgSize
self.webView.cacheDisplayInRect(self.view.bounds, toBitmapImageRep:bir)
let image = NSImage(size:imgSize)
image.addRepresentation(bir)
self.image = image
return image
}
func saveSnapshot()
{
let imgRep = self.image!.representations[0]
let data = imgRep.representationUsingType(NSBitmapImageFileType.NSPNGFileType, properties: nil)
data.writeToFile("/tmp/file.png", atomically: false)
}
It looks to me like I can't get access to the properties of the actual view (in this case the bounds) inside of the webView. When I try to access it, the compiler barfs:
/Users/josh/Canary/MacOsCanary/canary/canary/Modules/Overview/Overview.swift:55:37: '(NSView!, stringForToolTip: NSToolTipTag, point: NSPoint, userData: UnsafePointer<()>) -> String!' does not have a member named 'bounds'
My guess is that this is happening due to the extensions approach used by OS X and iOS. Any ideas, or should I just go back to using the legacy WebView?
I realise the question was for Mac OS X, but I found this page whilst searching for an iOS solution. My answer below doesn't work on Mac OS X as the drawViewHierarchyInRect() API call is currently iOS only, but I put it here for reference for other iOS searchers.
This Stackoverflow answer solved it for me on iOS 8 with a WKWebView. That answer's sample code is in Objective-C but the Swift equivalent to go in a UIView sub-class or extension would be along the lines of the code below. The code ignores the return value of drawViewHierarchyInRect(), but you may want to pay attention to it.
func imageSnapshot() -> UIImage
{
UIGraphicsBeginImageContextWithOptions(self.bounds.size, true, 0);
self.drawViewHierarchyInRect(self.bounds, afterScreenUpdates: true);
let snapshotImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return snapshotImage;
}
Swift 3
extension WKWebView {
func screenshot() -> UIImage? {
UIGraphicsBeginImageContextWithOptions(self.bounds.size, true, 0);
self.drawHierarchy(in: self.bounds, afterScreenUpdates: true);
let snapshotImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return snapshotImage;
}
}
Note: This solution only works on iOS.
Found myself in the same boat today but found a solution (by using private APIs).
If you're not targeting the App Store and generally are not afraid of using private APIs, here's a way to capture screenshots of WKWebView's on OS X:
https://github.com/lemonmojo/WKWebView-Screenshot
You will need to have access to a target writeable place - the snapshotURL ie.., such as the desktop, so we provide a handler for that:
func registerSnaphotsURL(_ sender: NSMenuItem, handler: #escaping (URL) -> Void) {
var targetURL : URL
// 1st around authenticate and cache sandbox data if needed
if isSandboxed, desktopData == nil {
targetURL =
UserSettings.SnapshotsURL.value.count == 0
? getDesktopDirectory()
: URL.init(fileURLWithPath: UserSettings.SnapshotsURL.value, isDirectory: true)
let openPanel = NSOpenPanel()
openPanel.message = "Authorize access to "
openPanel.prompt = "Authorize"
openPanel.canChooseFiles = false
openPanel.canChooseDirectories = true
openPanel.canCreateDirectories = true
openPanel.directoryURL = targetURL
openPanel.begin() { (result) -> Void in
if (result == .OK) {
targetURL = openPanel.url!
// Since we do not have data, clear any bookmark
if self.storeBookmark(url: targetURL, options: self.rwOptions) {
self.desktopData = self.bookmarks[targetURL]
UserSettings.SnapshotsURL.value = targetURL.absoluteString
if !self.saveBookmarks() {
print("Yoink, unable to save snapshot bookmark")
}
self.desktopData = self.bookmarks[targetURL]
handler(targetURL)
}
}
else
{
return
}
}
}
else
{
targetURL =
UserSettings.SnapshotsURL.value.count == 0
? getDesktopDirectory()
: URL.init(fileURLWithPath: UserSettings.SnapshotsURL.value, isDirectory: true)
handler(targetURL)
}
}
we wanted to allow single (view controller) and all current views (app delegate) so two actions in their respective files, both making use of the register handler.
App Delegate
#objc #IBAction func snapshotAllPress(_ sender: NSMenuItem) {
registerSnaphotsURL(sender) { (snapshotURL) in
// If we have a return object just call them, else notify all
if let wvc : WebViewController = sender.representedObject as? WebViewController {
sender.representedObject = snapshotURL
wvc.snapshot(sender)
}
else
{
sender.representedObject = snapshotURL
let notif = Notification(name: Notification.Name(rawValue: "SnapshotAll"), object: sender)
NotificationCenter.default.post(notif)
}
}
}
View Controller
func viewDidLoad() {
NotificationCenter.default.addObserver(
self,
selector: #selector(WebViewController.snapshotAll(_:)),
name: NSNotification.Name(rawValue: "SnapshotAll"),
object: nil)
}
#objc func snapshotAll(_ note: Notification) {
snapshot(note.object as! NSMenuItem)
}
view singleton action
#objc #IBAction func snapshotPress(_ sender: NSMenuItem) {
guard let url = webView.url, url != webView.homeURL else { return }
guard let snapshotURL = sender.representedObject as? URL else {
// Dispatch to app delegate to handle a singleton
sender.representedObject = self
appDelegate.snapshotAllPress(sender)
return
}
sender.representedObject = snapshotURL
snapshot(sender)
}
the webView interaction to capture an image
#objc func snapshot(_ sender: NSMenuItem) {
guard let url = webView.url, url != webView.homeURL else { return }
guard var snapshotURL = sender.representedObject as? URL else { return }
// URL has only destination, so add name and extension
let filename = String(format: "%# Shapshot at %#",
(url.lastPathComponent as NSString).deletingPathExtension,
String.prettyStamp())
snapshotURL.appendPathComponent(filename)
snapshotURL = snapshotURL.appendingPathExtension("png")
webView.takeSnapshot(with: nil) { image, error in
if let image = image {
self.webImageView.image = image
DispatchQueue.main.async {
self.processSnapshotImage(image, to: snapshotURL)
}
}
else
{
self.userAlertMessage("Failed taking snapshot", info: error?.localizedDescription)
self.webImageView.image = nil
}
}
}
and the capture to the targeted area
func processSnapshotImage(_ image: NSImage, to snapshotURL: URL) {
guard let tiffData = image.tiffRepresentation else { NSSound(named: "Sosumi")?.play(); return }
let bitmapImageRep = NSBitmapImageRep(data: tiffData)
do
{
try bitmapImageRep?.representation(using: .png, properties: [:])?.write(to: snapshotURL)
// https://developer.apple.com/library/archive/qa/qa1913/_index.html
if let asset = NSDataAsset(name:"Grab") {
do {
// Use NSDataAsset's data property to access the audio file stored in Sound.
let player = try AVAudioPlayer(data:asset.data, fileTypeHint:"caf")
// Play the above sound file.
player.play()
} catch {
print("no sound for you")
}
}
if snapshotURL.hideFileExtensionInPath(), let name = snapshotURL.lastPathComponent.removingPercentEncoding {
print("snapshot => \(name)")
}
} catch let error {
appDelegate.userAlertMessage("Snapshot failed", info: error.localizedDescription)
}
}