SwiftUI can't get image from download url - swift

I have the following code to load an image from a download url and display it as an UIImage.
I expected it to work, but somehow, solely the placeholder image 'ccc' is being displayed, and not the actual image from the download url. How so?
My urls are being fetched from a database and kind of look like this:
https://firebasestorage.googleapis.com/v0/b/.../o/P...alt=media&token=...-579f...da
struct ShelterView: View {
var title: String
var background: String
var available: Bool
var distance: Double
var gender: String
#ObservedObject private var imageLoader: Loader
init(title: String, background: String, available: Bool, distance: Double, gender: String) {
self.title = title
self.background = background
self.available = available
self.distance = distance
self.gender = gender
self.imageLoader = Loader(background)
}
var image: UIImage? {
imageLoader.data.flatMap(UIImage.init)
}
var body: some View {
VStack {
VStack(alignment: .leading) {
VStack(alignment: .leading, spacing: 0) {
Text(title)
.font(Font.custom("Helvetica Now Display Bold", size: 30))
.foregroundColor(.white)
.padding(15)
.lineLimit(2)
HStack(spacing: 25) {
IconInfo(image: "bed.double.fill", text: String(available), color: .white)
if gender != "" {
IconInfo(image: "person.fill", text: gender, color: .white)
}
}
.padding(.leading, 15)
}
Spacer()
IconInfo(image: "mappin.circle.fill", text: String(distance) + " miles away", color: .white)
.padding(15)
}
Spacer()
}
.background(
Image(uiImage: image ?? UIImage(named: "ccc")!) <-- HERE
.brightness(-0.11)
.frame(width: 255, height: 360)
)
.frame(width: 255, height: 360)
.cornerRadius(30)
.shadow(color: Color("shadow"), radius: 10, x: 0, y: 10)
}
}
final class Loader: ObservableObject {
var task: URLSessionDataTask!
#Published var data: Data? = nil
init(_ urlString: String) {
print(urlString)
let url = URL(string: urlString)
task = URLSession.shared.dataTask(with: url!, completionHandler: { data, _, _ in
DispatchQueue.main.async {
self.data = data
}
})
task.resume()
}
deinit {
task.cancel()
}
}

Your image is a plain old var which happens to be nil when the View is built. SwiftUI only rebuilds itself in response to changes in #ObservedObject, #State, or #Binding, so move your image to an #Published property on your imageLoader and it will work. Here is my caching image View:
import SwiftUI
import Combine
import UIKit
class ImageCache {
enum Error: Swift.Error {
case dataConversionFailed
case sessionError(Swift.Error)
}
static let shared = ImageCache()
private let cache = NSCache<NSURL, UIImage>()
private init() { }
static func image(for url: URL?) -> AnyPublisher<UIImage?, ImageCache.Error> {
guard let url = url else {
return Empty().eraseToAnyPublisher()
}
guard let image = shared.cache.object(forKey: url as NSURL) else {
return URLSession
.shared
.dataTaskPublisher(for: url)
.tryMap { (tuple) -> UIImage in
let (data, _) = tuple
guard let image = UIImage(data: data) else {
throw Error.dataConversionFailed
}
shared.cache.setObject(image, forKey: url as NSURL)
return image
}
.mapError({ error in Error.sessionError(error) })
.eraseToAnyPublisher()
}
return Just(image)
.mapError({ _ in fatalError() })
.eraseToAnyPublisher()
}
}
class ImageModel: ObservableObject {
#Published var image: UIImage? = nil
var cacheSubscription: AnyCancellable?
init(url: URL?) {
cacheSubscription = ImageCache
.image(for: url)
.replaceError(with: nil)
.receive(on: RunLoop.main, options: .none)
.assign(to: \.image, on: self)
}
}
struct RemoteImage : View {
#ObservedObject var imageModel: ImageModel
private let contentMode: ContentMode
init(url: URL?, contentMode: ContentMode = .fit) {
imageModel = ImageModel(url: url)
self.contentMode = contentMode
}
var body: some View {
imageModel
.image
.map { Image(uiImage:$0).resizable().aspectRatio(contentMode: contentMode) }
?? Image(systemName: "questionmark").resizable().aspectRatio(contentMode: contentMode)
}
}

Related

How to use Photo library in SwiftUI

Xcode ver is 14.2.
Target iOS for 16.0.
I want to upload a image from photo library using PHPickerConfiguration().
It can't appear in the UserForm view.
struct UserForm: View {
#State var profileImage: Image?
#Binding var selectedImage: UIImage?
var body: some View {
if profileImage == nil{
Button(action:{
isShowAction.toggle()
}) {
Image(systemName: "person.crop.rectangle.badge.plus.fill")
.resizable()
.scaledToFill()
.frame(width: 100, height: 100)
.foregroundColor(.gray)
.padding(.vertical)
}
}else if let image = profileImage{
image
.resizable()
.scaledToFill()
.frame(width: 100,height: 100)
.clipShape(Circle())
.clipped()
.foregroundColor(.gray)
}
.sheet(isPresented: $imagePickerPresented, onDismiss: loadImage, content:{
AlbumPicker(image: $selectedImage)
}
)
}
}
extension UserForm{
func loadImage(){
guard let selectedImage = selectedImage else{ return }
profileImage = Image(uiImage: selectedImage)
}
}
import SwiftUI
import PhotosUI
struct AlbumPicker: UIViewControllerRepresentable {
#Binding var image: UIImage?
#Environment(\.presentationMode) var mode
func makeUIViewController(context: UIViewControllerRepresentableContext<AlbumPicker>) -> PHPickerViewController {
var configuration = PHPickerConfiguration()
configuration.filter = .images
configuration.selectionLimit = 1
configuration.preferredAssetRepresentationMode = .current
let picker = PHPickerViewController(configuration: configuration)
picker.delegate = context.coordinator
return picker
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
func updateUIViewController(_ uiViewController: PHPickerViewController, context: UIViewControllerRepresentableContext<AlbumPicker>) {}
class Coordinator: NSObject, UINavigationControllerDelegate, PHPickerViewControllerDelegate {
var parent: AlbumPicker
init(_ parent: AlbumPicker) {
self.parent = parent
}
func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
guard let itemProvider = results.first?.itemProvider else {
return
}
let typeChecked = itemProvider.registeredTypeIdentifiers.map { itemProvider.hasItemConformingToTypeIdentifier($0) }
guard !typeChecked.contains(false) else {
return
}
itemProvider.loadFileRepresentation(forTypeIdentifier: UTType.image.identifier) { (url, error) in
guard let url = url else {
return
}
guard let imageData = try? Data(contentsOf: url) else {
return
}
self.parent.image = UIImage(data: imageData)
self.parent.mode.wrappedValue.dismiss()
}
}
}
}
When I push the button, immediately I got findWriterForTypeAndAlternateType:119: unsupported file format 'public.heic' using actual device.
And it can't hold profileImage in the view after I chose a image in photo library.
How does it be fixed?
Thank you

SwiftUI LazyVGrid(MacOS) and AsyncImage loads again when scrolling away and back [duplicate]

I'm new to SwiftUI and was looking how to download images from a URL. I've found out that in iOS15 you can use AsyncImage to handle all the phases of an Image. The code looks like this.
AsyncImage(url: URL(string: urlString)) { phase in
switch phase {
case .success(let image):
image
.someModifers
case .empty:
Image(systemName: "Placeholder Image")
.someModifers
case .failure(_):
Image(systemName: "Error Image")
.someModifers
#unknown default:
Image(systemName: "Placeholder Image")
.someModifers
}
}
I would launch my app and every time I would scroll up & down on my List, it would download the images again. So how would I be able to add a cache. I was trying to add a cache the way I did in Swift. Something like this.
struct DummyStruct {
var imageCache = NSCache<NSString, UIImage>()
func downloadImageFromURLString(_ urlString: String) {
guard let url = URL(string: urlString) else { return }
URLSession.shared.dataTask(with: url) { data, response, error in
if let _ = error {
fatalError()
}
guard let data = data, let image = UIImage(data: data) else { return }
imageCache.setObject(image, forKey: NSString(string: urlString))
}
.resume()
}
}
But it didn't go to good. So I was wondering is there a way to add caching to AsyncImage? Thanks would appreciate any help.
I had the same problem as you. I solved it by writing a CachedAsyncImage that kept the same API as AsyncImage, so that they could be interchanged easily, also in view of future native cache support in AsyncImage.
I made a Swift Package to share it.
CachedAsyncImage has the exact same API and behavior as AsyncImage, so you just have to change this:
AsyncImage(url: logoURL)
to this:
CachedAsyncImage(url: logoURL)
In addition to AsyncImage initializers, you have the possibilities to specify the cache you want to use (by default URLCache.shared is used):
CachedAsyncImage(url: logoURL, urlCache: .imageCache)
// URLCache+imageCache.swift
extension URLCache {
static let imageCache = URLCache(memoryCapacity: 512*1000*1000, diskCapacity: 10*1000*1000*1000)
}
Remember when setting the cache the response (in this case our image) must be no larger than about 5% of the disk cache (See this discussion).
Here is the repo.
Hope this can help others.
I found this great video which talks about using the code below to build a async image cache function for your own use.
import SwiftUI
struct CacheAsyncImage<Content>: View where Content: View{
private let url: URL
private let scale: CGFloat
private let transaction: Transaction
private let content: (AsyncImagePhase) -> Content
init(
url: URL,
scale: CGFloat = 1.0,
transaction: Transaction = Transaction(),
#ViewBuilder content: #escaping (AsyncImagePhase) -> Content
){
self.url = url
self.scale = scale
self.transaction = transaction
self.content = content
}
var body: some View{
if let cached = ImageCache[url]{
let _ = print("cached: \(url.absoluteString)")
content(.success(cached))
}else{
let _ = print("request: \(url.absoluteString)")
AsyncImage(
url: url,
scale: scale,
transaction: transaction
){phase in
cacheAndRender(phase: phase)
}
}
}
func cacheAndRender(phase: AsyncImagePhase) -> some View{
if case .success (let image) = phase {
ImageCache[url] = image
}
return content(phase)
}
}
fileprivate class ImageCache{
static private var cache: [URL: Image] = [:]
static subscript(url: URL) -> Image?{
get{
ImageCache.cache[url]
}
set{
ImageCache.cache[url] = newValue
}
}
}
AsyncImage uses default URLCache under the hood. The simplest way to manage the cache is to change the properties of the default URLCache
URLCache.shared.memoryCapacity = 50_000_000 // ~50 MB memory space
URLCache.shared.diskCapacity = 1_000_000_000 // ~1GB disk cache space
User like this
ImageView(url: URL(string: "https://wallpaperset.com/w/full/d/2/b/115638.jpg"))
.frame(width: 300, height: 300)
.cornerRadius(20)
ImageView(url: URL(string: "https://ba")) {
// Placeholder
Text("⚠️")
.font(.system(size: 120))
}
.frame(width: 300, height: 300)
.cornerRadius(20)
ImageView.swift
import SwiftUI
struct ImageView<Placeholder>: View where Placeholder: View {
// MARK: - Value
// MARK: Private
#State private var image: Image? = nil
#State private var task: Task<(), Never>? = nil
#State private var isProgressing = false
private let url: URL?
private let placeholder: () -> Placeholder?
// MARK: - Initializer
init(url: URL?, #ViewBuilder placeholder: #escaping () -> Placeholder) {
self.url = url
self.placeholder = placeholder
}
init(url: URL?) where Placeholder == Color {
self.init(url: url, placeholder: { Color("neutral9") })
}
// MARK: - View
// MARK: Public
var body: some View {
GeometryReader { proxy in
ZStack {
placholderView
imageView
progressView
}
.frame(width: proxy.size.width, height: proxy.size.height)
.task {
task?.cancel()
task = Task.detached(priority: .background) {
await MainActor.run { isProgressing = true }
do {
let image = try await ImageManager.shared.download(url: url)
await MainActor.run {
isProgressing = false
self.image = image
}
} catch {
await MainActor.run { isProgressing = false }
}
}
}
.onDisappear {
task?.cancel()
}
}
}
// MARK: Private
#ViewBuilder
private var imageView: some View {
if let image = image {
image
.resizable()
.scaledToFill()
}
}
#ViewBuilder
private var placholderView: some View {
if !isProgressing, image == nil {
placeholder()
}
}
#ViewBuilder
private var progressView: some View {
if isProgressing {
ProgressView()
.progressViewStyle(.circular)
}
}
}
#if DEBUG
struct ImageView_Previews: PreviewProvider {
static var previews: some View {
let view = VStack {
ImageView(url: URL(string: "https://wallpaperset.com/w/full/d/2/b/115638.jpg"))
.frame(width: 300, height: 300)
.cornerRadius(20)
ImageView(url: URL(string: "https://wallpaperset.com/w/full/d/2/b/115638")) {
Text("⚠️")
.font(.system(size: 120))
}
.frame(width: 300, height: 300)
.cornerRadius(20)
}
view
.previewDevice("iPhone 11 Pro")
.preferredColorScheme(.light)
}
}
#endif
ImageManager.swift
import SwiftUI
import Combine
import Photos
final class ImageManager {
// MARK: - Singleton
static let shared = ImageManager()
// MARK: - Value
// MARK: Private
private lazy var imageCache = NSCache<NSString, UIImage>()
private var loadTasks = [PHAsset: PHImageRequestID]()
private let queue = DispatchQueue(label: "ImageDataManagerQueue")
private lazy var imageManager: PHCachingImageManager = {
let imageManager = PHCachingImageManager()
imageManager.allowsCachingHighQualityImages = true
return imageManager
}()
private lazy var downloadSession: URLSession = {
let configuration = URLSessionConfiguration.default
configuration.httpMaximumConnectionsPerHost = 90
configuration.timeoutIntervalForRequest = 90
configuration.timeoutIntervalForResource = 90
return URLSession(configuration: configuration)
}()
// MARK: - Initializer
private init() {}
// MARK: - Function
// MARK: Public
func download(url: URL?) async throws -> Image {
guard let url = url else { throw URLError(.badURL) }
if let cachedImage = imageCache.object(forKey: url.absoluteString as NSString) {
return Image(uiImage: cachedImage)
}
let data = (try await downloadSession.data(from: url)).0
guard let image = UIImage(data: data) else { throw URLError(.badServerResponse) }
queue.async { self.imageCache.setObject(image, forKey: url.absoluteString as NSString) }
return Image(uiImage: image)
}
}
Maybe later to the party, but I came up to this exact problem regarding poor performances of AsyncImage when used in conjunction with ScrollView / LazyVStack layouts.
According to this thread, seams that the problem is in someway due to Apple's current implementation and sometime in the future it will be solved.
I think that the most future-proof approach we can use is something similar to the response from Ryan Fung but, unfortunately, it uses an old syntax and miss the overloaded init (with and without placeholder).
I extended the solution, covering the missing cases on this GitHub's Gist. You can use it like current AsyncImage implementation, so that when it will support cache consistently you can swap it out.

Running into the Error: keyNotFound, DecodingError when trying to fetch messages from Firebase Database

I get this Error:
keyNotFound(CodingKeys(stringValue: "email", intValue: nil),
Swift.DecodingError.Context(codingPath: [], debugDescription: "No value associated with
key CodingKeys(stringValue: \"email\", intValue: nil) (\"email\").", underlyingError: nil))
I am not sure what this Error is trying to tell me, I read through this question: Swift Error- keyNotFound(CodingKeys(stringValue:, intValue: nil), Swift.DecodingError.Context, which has the same Error. Though I couldn't figure out how to change my ChatUser so that my App works.
Trying to fetch messages from my Firestore database.
I tried debugging and I am pretty sure
ForEach(vm.recentMessages) { recentMessage in } leads to the error.
This is my ViewModel:
class MessagesViewModel: ObservableObject {
#Published var errorMessage = ""
#Published var chatUser: ChatUser?
init() {
fetchCurrentUser()
fetchRecentMessages()
}
#Published var recentMessages = [RecentMessage]()
private var firestoreListener: ListenerRegistration?
func fetchRecentMessages() {
guard let uid = FirebaseManager.shared.auth.currentUser?.uid else { return }
firestoreListener?.remove()
self.recentMessages.removeAll()
firestoreListener = FirebaseManager.shared.firestore
.collection(FirebaseConstants.recentMessages)
.document(uid)
.collection(FirebaseConstants.messages)
.order(by: FirebaseConstants.timestamp)
.addSnapshotListener { querySnapshot, error in
if let error = error {
self.errorMessage = "Failed to listen for recent messages: \(error)"
print(error)
return
}
querySnapshot?.documentChanges.forEach({ change in
let docId = change.document.documentID
if let index = self.recentMessages.firstIndex(where: { rm in
return rm.id == docId
}) {
self.recentMessages.remove(at: index)
}
do {
let rm = try change.document.data(as: RecentMessage.self)
self.recentMessages.insert(rm, at: 0)
} catch {
print(error)
}
})
}
}
func fetchCurrentUser() {
guard let uid = FirebaseManager.shared.auth.currentUser?.uid else {
self.errorMessage = "Could not find firebase uid"
return
}
FirebaseManager.shared.firestore.collection("users").document(uid).getDocument { snapshot, error in
if let error = error {
self.errorMessage = "Failed to fetch current user: \(error)"
print("Failed to fetch current user:", error)
return
}
self.chatUser = try? snapshot?.data(as: ChatUser.self)
FirebaseManager.shared.currentUser = self.chatUser
}
}
FirebaseManager.shared.firestore.collection("users").document(uid).getDocument { snapshot, error in
if let error = error {
self.errorMessage = "Failed to fetch current user: \(error)"
print("Failed to fetch current user:", error)
return
}
self.chatUser = try? snapshot?.data(as: ChatUser.self)
FirebaseManager.shared.currentUser = self.chatUser
}
}
}
This is the View the messages should appear in:
struct MessagesView: View {
#ObservedObject private var vm = MessagesViewModel()
private var chatLogViewModel = ChatLogViewModel(chatUser: FirebaseManager.shared.currentUser)
#State var chatUser: ChatUser?
var body: some View {
VStack {
HStack {
Button() {
} label: {
Image("search").resizable()
.frame(width: 32, height: 32)
.padding(.leading, 11)
}
Spacer()
Image("AppIcon").resizable()
.frame(width: 32, height: 32)
.scaledToFill()
Spacer()
Button() {
} label: {
Image("dots").resizable()
.renderingMode(.template)
.frame(width: 32, height: 32)
.foregroundColor(Color(.init(red: 0.59, green: 0.62, blue: 0.67, alpha: 1)))
.padding(.trailing, 9)
.offset(y: -4)
}
}.padding(.init(top: 0, leading: 8, bottom: 0, trailing: 8))
NavigationView {
VStack {
messagesView
}
}
}
}
private var messagesView: some View {
ScrollView {
ForEach(vm.recentMessages) { recentMessage in
VStack {
Button {
let uid = FirebaseManager.shared.auth.currentUser?.uid == recentMessage.fromId ? recentMessage.toId : recentMessage.fromId
self.chatUser = .init(id: uid, uid: uid, email: recentMessage.email, profileImageUrl: recentMessage.profileImageUrl)
self.chatLogViewModel.chatUser = self.chatUser
self.chatLogViewModel.fetchMessages()
} label: {
HStack(spacing: 16) {
WebImage(url: URL(string: recentMessage.profileImageUrl))
.resizable()
.scaledToFill()
.frame(width: 64, height: 64)
.clipped()
.cornerRadius(64)
.overlay(RoundedRectangle(cornerRadius: 64)
.stroke(Color.black, lineWidth: 1))
.shadow(radius: 5)
VStack(alignment: .leading, spacing: 8) {
Text(recentMessage.username)
.font(.system(size: 16, weight: .bold))
.foregroundColor(Color(.label))
.multilineTextAlignment(.leading)
Text(recentMessage.text)
.font(.system(size: 14))
.foregroundColor(Color(.darkGray))
.multilineTextAlignment(.leading)
}
Spacer()
Text(recentMessage.timeAgo)
.font(.system(size: 14, weight: .semibold))
.foregroundColor(Color(.label))
}
}
Divider()
.padding(.vertical, 8)
}.padding(.horizontal)
}.padding(.bottom, 50)
}
}
}
ChatUser looks like this:
import FirebaseFirestoreSwift
struct ChatUser: Codable, Identifiable {
#DocumentID var id: String?
let uid, email, profileImageUrl: String
}
struct RecentMessage: Codable, Identifiable {
#DocumentID var id: String?
let text, email: String
let fromId, toId: String
let profileImageUrl: String
let timestamp: Date
var username: String {
email.components(separatedBy: "#").first ?? email
}
var timeAgo: String {
let formatter = RelativeDateTimeFormatter()
formatter.unitsStyle = .abbreviated
return formatter.localizedString(for: timestamp, relativeTo: Date())
}
}
Update
I made some adjustments and added the RecentMessage portion. Now it works entirely for me. I can get the current (ChatUser) from Firestore and I also can get the RecentMessages. Now it loads all RecentMessages, this should be changes to ones that are in relationship with the current user.
Models
ChatUser
import Foundation
import SwiftUI
import Firebase
struct ChatUser: Codable, Identifiable, Hashable {
var id: String?
var email: String
var profileImageUrl: String
var uid: String
init(email: String, profileImageUrl: String, uid: String, id: String?) {
self.id = id
self.email = email
self.profileImageUrl = profileImageUrl
self.uid = uid
}
init?(document: DocumentSnapshot) {
let data = document.data()
let email = data!["email"] as? String ?? ""
let profileImageUrl = data!["profileImageUrl"] as? String ?? ""
let uid = data!["uid"] as? String ?? ""
id = document.documentID
self.email = email
self.profileImageUrl = profileImageUrl
self.uid = uid
}
enum CodingKeys: String, CodingKey {
case id
case email
case profileImageUrl
case uid
}
}
extension ChatUser: Comparable {
static func == (lhs: ChatUser, rhs: ChatUser) -> Bool {
return lhs.id == rhs.id
}
static func < (lhs: ChatUser, rhs: ChatUser) -> Bool {
return lhs.email < rhs.email
}
}
RecentMessage
struct RecentMessage: Codable, Identifiable, Hashable {
var id: String?
var text: String
var email: String
var fromId: String
var toId: String
var profileImageUrl: String
var timestamp: Date
init(text: String, email: String, fromId: String, toId: String, profileImageUrl: String, timestamp: Date, id: String?) {
self.id = id
self.text = text
self.email = email
self.fromId = email
self.toId = email
self.profileImageUrl = profileImageUrl
self.timestamp = timestamp
}
init?(document: DocumentSnapshot) {
let data = document.data()
let text = data!["text"] as? String ?? ""
let email = data!["email"] as? String ?? ""
let fromId = data!["fromId"] as? String ?? ""
let toId = data!["toId"] as? String ?? ""
let profileImageUrl = data!["profileImageUrl"] as? String ?? ""
let timestamp = data!["timestamp"] as? Date ?? Date()
id = document.documentID
self.text = text
self.email = email
self.fromId = fromId
self.toId = toId
self.profileImageUrl = profileImageUrl
self.timestamp = timestamp
}
enum CodingKeys: String, CodingKey {
case id
case text
case email
case fromId
case toId
case profileImageUrl
case timestamp
}
func getUsernameFromEmail() -> String {
return email.components(separatedBy: "#").first ?? email
}
func getElapsedTime() -> String {
let formatter = RelativeDateTimeFormatter()
formatter.unitsStyle = .abbreviated
return formatter.localizedString(for: timestamp, relativeTo: Date())
}
}
extension RecentMessage: Comparable {
static func == (lhs: RecentMessage, rhs: RecentMessage) -> Bool {
return lhs.id == rhs.id
}
static func < (lhs: RecentMessage, rhs: RecentMessage) -> Bool {
return lhs.timestamp < rhs.timestamp
}
}
ViewModels
One thing in advance, instead of using FirebaseManager.shared, I declared once in the ViewModel:
let db = Firestore.firestore()
I split the existing MessagesViewModel into MessagesViewModel and UserViewModel
I added a completion block to the fetchCurrentUser function.
It must be completed first (successfully) to run the fetchRecentMessage function. With that block we do 2 things:
make sure the fetchCurrentUser function is completed. I assume, even if the id is saved to the database, in your init function the fetchRecentMessage function will be called before you got the data for the currentUser. It is asynchronous and your code is not waiting for Firestore to be finished.
make sure that chatUser really is initialized with the values from Firestore.
UserViewModel
class UsersViewModel: ObservableObject {
let db = Firestore.firestore()
#Published var errorMessage = ""
#Published var chatUser: ChatUser?
func fetchCurrentUser(_ completion: #escaping (Bool, String) ->Void) {
guard let uid = Auth.auth().currentUser?.uid else {
self.errorMessage = "Could not find firebase uid"
completion(false, self.errorMessage)
return
}
let docRef = self.db.collection("chatUsers").document(uid)
docRef.getDocument { (document, error) in
if let document = document, document.exists {
self.chatUser = ChatUser(document: document)
completion(true, "ChatUser set up from Firestore db")
} else {
self.errorMessage = "ChatUser document not found"
completion(false, self.errorMessage)
}
}
}
}
MessagesViewModel
class MessagesViewModel: ObservableObject {
let db = Firestore.firestore()
#Published var errorMessage = ""
#Published var recentMessages: [RecentMessage] = []
private var firestoreListener: ListenerRegistration?
func fetchData(_ completion: #escaping (Bool, String) ->Void) {
self.recentMessages.removeAll()
db.collection("recentMessages").addSnapshotListener { (querySnapshot, error) in
guard let documents = querySnapshot?.documents else {
self.errorMessage = "No documents"
completion(true, self.errorMessage)
return
}
self.recentMessages = documents.map { queryDocumentSnapshot -> RecentMessage in
return RecentMessage(document: queryDocumentSnapshot)!
}
completion(true, "Data fetched")
}
}
}
Views
ContentView
The ContentView just opens the next view MusicBandFanView(), when the MusicBandFanView() appears it first calls the fetchCurrentUser() function and when the completion block of that function is completed, it calls the fetchRecentMessages() function
struct ContentView: View {
#Environment(\.managedObjectContext) private var viewContext
#EnvironmentObject var usersViewModel: UsersViewModel
#EnvironmentObject var messagesViewModel: MessagesViewModel
#EnvironmentObject var authViewModel: AuthViewModel
var body: some View {
MusicBandFanView()
.onAppear {
usersViewModel.fetchCurrentUser({ (success, logMessage) -> Void in
if success {
print(logMessage)
messagesViewModel.fetchData({ (success, logMessage) -> Void in
print(logMessage)
})
} else {
print(logMessage)
}
})
}
}
}
MusicBandFanView
This is the first part of your view. I just change the messageView from a var within the MusicBandFanView to a distinct view.
Since I don't have your images you use, I used SF-Symbols instead.
struct MusicBandFanView: View {
#Environment(\.managedObjectContext) private var viewContext
#EnvironmentObject var usersViewModel: UsersViewModel
#EnvironmentObject var messagesViewModel: MessagesViewModel
#EnvironmentObject var authViewModel: AuthViewModel
var body: some View {
VStack() {
HStack {
Button() {
} label: {
Image(systemName: "magnifyingglass")
.frame(width: 32, height: 32)
.padding(.leading, 11)
}
Spacer()
Image(systemName: "applelogo")
.frame(width: 32, height: 32)
.scaledToFill()
Spacer()
Button() {
} label: {
Image(systemName: "ellipsis")
.renderingMode(.template)
.frame(width: 32, height: 32)
.foregroundColor(Color(.init(red: 0.59, green: 0.62, blue: 0.67, alpha: 1)))
.padding(.trailing, 9)
.offset(y: -4)
}
}.padding(.init(top: 0, leading: 8, bottom: 0, trailing: 8))
NavigationView {
VStack {
Text("messages")
MessagesView()
}
}
}
}
}
MessagesView
That is what you put into a var.
I don't really got what you tried with the button, but I guess you can add that portion similar to the exiting code.
Since I didn't install SDWebimages for the test environment here, I just used an alternative SF-Symbol.
struct MessagesView: View {
#Environment(\.managedObjectContext) private var viewContext
#EnvironmentObject var usersViewModel: UsersViewModel
#EnvironmentObject var messagesViewModel: MessagesViewModel
#EnvironmentObject var authViewModel: AuthViewModel
var body: some View {
ScrollView {
VStack {
ForEach(messagesViewModel.recentMessages, id: \.self){ recentMessage in
Button {
// I don't really get what you try to do here. The user is already logged in and the fetch for the chatUser (current user) has already been performed.
} label: {
HStack(spacing: 16) {
Image(systemName: "person.crop.circle.fill")
.resizable()
.scaledToFill()
.frame(width: 64, height: 64)
.clipShape(Circle())
.overlay(Circle()
.stroke(Color.black, lineWidth: 1))
.shadow(radius: 5)
VStack(alignment: .leading, spacing: 8) {
Text(recentMessage.getUsernameFromEmail())
.font(.system(size: 16, weight: .bold))
.foregroundColor(Color(.label))
.multilineTextAlignment(.leading)
Text(recentMessage.text)
.font(.system(size: 14))
.foregroundColor(Color(.darkGray))
.multilineTextAlignment(.leading)
}
Spacer()
Text("\(recentMessage.getElapsedTime())")
.font(.system(size: 14, weight: .semibold))
.foregroundColor(Color(.label))
}
}
Divider()
.padding(.vertical, 8)
}
.padding(.horizontal)
}
.padding(.bottom, 50)
}
}
}
Screenshots
App Screenshot from simulator
Firestore Console Output ChatUser
Firestore Console Output RecentMessage

Should I pass viewModel or only model to view to upload image/Pdf Document in swiftui in MVVM?Do I need to keep progress for each document?

I have a situation Where I can add multiple images or videos URLs in Array 1 by 1. Similarly, a separate View (AssetView) is modified based on the array elements added. Now, the status of the Image/Video/PDF upload is changed and needs to reflect the progress upload and progress done. But here, I am unable to use MVVM.
I am confused about where should I call the upload function.
Should I pass viewModel to asset View or only view??
I am adding the Source code below to show my problem.
This is the first Screen where the user will get an option to show the card. On Card, the User can select Image/Video/Pdf any document.
struct ContentView: View {
#State var cardShown = false
var body: some View {
NavigationView {
ZStack {
Button(action: {
cardShown.toggle()
}, label: {
Text("Show Card")
.bold()
.foregroundColor(Color.white)
.background(Color.blue)
.frame(width: 200, height: 50)
})
BottomCard(cardShown: $cardShown, height: 400, content: {
CardContent()
.padding()
})
}
}
}
}
This is the CardContentView, Where the user will add documents.
enum ActionType {
case ImageButtonAction
case VideoButtonAction
case None
}
struct CardContent: View {
#State private var text = ""
#State private var image: Image? = Image("UserProfilePlaceholder")
#State private var shouldPresentImagePicker = false
#State private var shouldPresentActionScheet = false
#State private var shouldPresentCamera = false
#State private var galleryAssetTypeSelected = GalleryAssetType.None
#State private var actionType = ActionType.None
#StateObject var messageAttachmentViewModel = MessageAttachmentViewModel()
// Document
#State private var shouldPresentDocumentPicker = false
var body: some View {
VStack {
Text("Photo Collage")
.bold()
.font(.system(size: 30))
.padding()
Text("You can create awesome photo grids and share them with all of your friends")
.font(.system(size: 18))
.multilineTextAlignment(.center)
TextEditor(text: $text)
.frame(height: 40)
ScrollView(.horizontal, showsIndicators: false) {
HStack {
ForEach(0..<self.messageAttachmentViewModel.commonMessageAttachmentModel.count, id: \.self) { i in
AssetView(messageAttachmentViewModel: messageAttachmentViewModel, index: i)
}
}
}
.background(Color.white)
.frame(height: 140)
HStack {
Button(action: {
self.shouldPresentActionScheet = true
self.actionType = .ImageButtonAction
}, label: {
Text("IMAGE")
})
Button(action: {
self.shouldPresentActionScheet = true
self.actionType = .VideoButtonAction
}, label: {
Text("VIDEO")
})
Button(action: {
self.galleryAssetTypeSelected = .PDF
self.shouldPresentDocumentPicker = true
}, label: {
Text("PDF")
})
Spacer()
Text("500")
.font(.system(size: 18))
Button(action: {
}, label: {
Text("SEND")
})
}
}
.padding()
.sheet(isPresented: $shouldPresentImagePicker) {
ImagePicker(sourceType: self.shouldPresentCamera ? .camera : .photoLibrary, image: self.$image, isPresented: self.$shouldPresentImagePicker, galleryAssetType: $galleryAssetTypeSelected, messageAttachmentViewModel: messageAttachmentViewModel)
}.actionSheet(isPresented: $shouldPresentActionScheet) { () -> ActionSheet in
ActionSheet(title: Text("Choose mode"), message: Text("Please choose your preferred mode to set your profile image"), buttons: [ActionSheet.Button.default(Text("Camera"), action: {
self.shouldPresentImagePicker = true
self.shouldPresentCamera = true
self.galleryAssetTypeSelected = .None
}), ActionSheet.Button.default(Text(actionType == ActionType.ImageButtonAction ? "Photo Library" : "Video"), action: {
self.shouldPresentImagePicker = true
self.shouldPresentCamera = false
self.galleryAssetTypeSelected = (actionType == ActionType.ImageButtonAction) ? GalleryAssetType.Photo : GalleryAssetType.Video
self.galleryAssetTypeSelected = actionType == ActionType.ImageButtonAction ? .Photo : .Video
}), ActionSheet.Button.cancel()])
}
// .sheet(isPresented: $shouldPresentDocumentPicker) {
// DocumentPicker(isDocumentPickerPresented: $shouldPresentDocumentPicker, galleryAssetType: $galleryAssetTypeSelected, commentAttachments: $commentAttachments)
// }
}
}
Below is Image Picker Struct to select Image/Video from Gallery.
struct ImagePicker: UIViewControllerRepresentable {
var sourceType: UIImagePickerController.SourceType = .photoLibrary
#Binding var image: Image?
#Binding var isPresented: Bool
#Binding var galleryAssetType: GalleryAssetType
#ObservedObject var messageAttachmentViewModel: MessageAttachmentViewModel
func makeCoordinator() -> ImagePickerViewCoordinator {
return ImagePickerViewCoordinator(image: $image, isPresented: $isPresented, galleryAssetType: $galleryAssetType, messageAttachmentViewModel: messageAttachmentViewModel)
}
func makeUIViewController(context: Context) -> UIImagePickerController {
let pickerController = UIImagePickerController()
pickerController.sourceType = sourceType
pickerController.delegate = context.coordinator
if galleryAssetType == .Photo {
pickerController.mediaTypes = ["public.image"]
} else if galleryAssetType == .Video {
pickerController.mediaTypes = ["public.movie"]
pickerController.videoQuality = .typeHigh
}
return pickerController
}
func updateUIViewController(_ uiViewController: UIImagePickerController, context: Context) {
// Nothing to update here
}
}
class ImagePickerViewCoordinator: NSObject, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
#Binding var image: Image?
#Binding var isPresented: Bool
#Binding var galleryAssetType: GalleryAssetType
#ObservedObject var messageAttachmentViewModel: MessageAttachmentViewModel
init(image: Binding<Image?>, isPresented: Binding<Bool>, galleryAssetType: Binding<GalleryAssetType>, messageAttachmentViewModel: MessageAttachmentViewModel) {
self._image = image
self._isPresented = isPresented
self._galleryAssetType = galleryAssetType
self.messageAttachmentViewModel = messageAttachmentViewModel
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
self.image = Image(uiImage: image)
}
if galleryAssetType == .Photo {
if let imageURL = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerImageURL") ] as? URL {
let image = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerOriginalImage")] as? UIImage
let messageAttachmentModel = MessageAttachmentModel(assetType: .Photo, assetUrl: imageURL, image: image, uploadStatus: false)
self.messageAttachmentViewModel.commonMessageAttachmentModel.append(messageAttachmentModel)
}
if let image = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
self.image = Image(uiImage: image)
}
} else if galleryAssetType == .Video {
if let videoURL = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerMediaURL") ] as? URL {
let messageAttachmentModel = MessageAttachmentModel(assetType: .Video, assetUrl: videoURL, uploadStatus: false)
self.messageAttachmentViewModel.commonMessageAttachmentModel.append(messageAttachmentModel)
}
}
self.isPresented = false
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
self.isPresented = false
}
}
This is VideoThumnail View to show only thumbnail after selection. The actual video has to be uploaded to the server.
struct VideoThumbnail: View {
private enum LoadState {
case loading, success, failure
}
private class Loader: ObservableObject {
var videoThumbnail = UIImage()
var state = LoadState.loading
init(url: URL) {
if url.pathComponents.isEmpty {
self.state = .failure
return
}
let asset = AVAsset(url: url)
let avAssetImageGenerator = AVAssetImageGenerator(asset: asset)
avAssetImageGenerator.appliesPreferredTrackTransform = true
avAssetImageGenerator.maximumSize = CGSize(width: 150, height: 150)
let thumnailTime = CMTimeMake(value: 2, timescale: 1)
do {
let cgThumbImage = try avAssetImageGenerator.copyCGImage(at: thumnailTime, actualTime: nil)
self.videoThumbnail = UIImage(cgImage: cgThumbImage)
self.state = .success
} catch {
print(error.localizedDescription)
self.state = .failure
}
DispatchQueue.main.async {
self.objectWillChange.send()
}
}
}
#StateObject private var loader: Loader
var loading: Image
var failure: Image
var body: some View {
selectImage()
.resizable()
.aspectRatio(contentMode: .fit)
}
init(url: URL, loading: Image = Image(systemName: "photo"), failure: Image = Image(systemName: "multiply.circle")) {
_loader = StateObject(wrappedValue: Loader(url: url))
self.loading = loading
self.failure = failure
}
private func selectImage() -> Image {
switch loader.state {
case .loading:
return loading
case .failure:
return failure
default:
return Image(uiImage: loader.videoThumbnail)
}
}
}
Below is PDFThumbnail View.
struct PdfThumbnailView: View {
private enum LoadState {
case loading, success, failure
}
private class Loader: ObservableObject {
var pdfThumbnail = UIImage()
var state = LoadState.loading
init(url: URL) {
if url.pathComponents.isEmpty {
self.state = .failure
return
}
let pdfDocument = PDFDocument(url: url)
if let pdfDocumentPage = pdfDocument?.page(at: 1) {
pdfThumbnail = pdfDocumentPage.thumbnail(of: CGSize(width: 150, height: 150), for: PDFDisplayBox.trimBox)
self.state = .success
} else {
self.state = .failure
}
}
}
#StateObject private var loader: Loader
var loading: Image
var failure: Image
var body: some View {
selectImage()
.resizable()
.aspectRatio(contentMode: .fit)
}
init(url: URL, loading: Image = Image(systemName: "photo"), failure: Image = Image(systemName: "multiply.circle")) {
_loader = StateObject(wrappedValue: Loader(url: url))
self.loading = loading
self.failure = failure
}
private func selectImage() -> Image {
switch loader.state {
case .loading:
return loading
case .failure:
return failure
default:
return Image(uiImage: loader.pdfThumbnail)
}
}
}
MessageAttachmentModel: This Model is created when Image/Video/Pdf is selected.
struct MessageAttachmentModel {
var assetType = GalleryAssetType.None
var assetUrl: URL
var image: UIImage?
var uploadStatus: Bool
init(assetType: GalleryAssetType, assetUrl: URL, image: UIImage? = nil, uploadStatus: Bool) {
self.assetType = assetType
self.assetUrl = assetUrl
self.image = image
self.uploadStatus = uploadStatus
}
}
MessageAttachmentModelView: This ModelView contains an array of MessageAttachmentModel as a published property to reflect the change.
class MessageAttachmentViewModel: ObservableObject {
#Published var commonMessageAttachmentModel: [MessageAttachmentModel] = []
#Published var isUploadedLeft: Bool = false
func getIsUploadedStatus() {
let leftToUpload = commonMessageAttachmentModel.filter({ $0.uploadStatus == false })
isUploadedLeft = (leftToUpload.count > 0) ? true : false
}
func updateData() {
for var model in commonMessageAttachmentModel {
if model.uploadStatus == false {
if let endUsedId = getEndUserDataId(), let data = getDataFromURL(url: model.assetUrl) {
let timestamp = Date().timeIntervalSince1970
let key = "u_me_\(endUsedId)_\(timestamp))"
var assetType = (model.assetType == .Photo) ? ("Image") : ((model.assetType == .Video) ? "Video" : "Files")
uploadFileData(assetType: assetType, key: key, data: data) { status, urlString in
if status {
model.uploadStatus = true
}
}
}
}
}
}
func uploadFileData(assetType: String, key: String, data: Data , completion: #escaping (Bool, String) -> Void ) {
/// Server Data Upload
}
func getEndUserDataId() -> String? {
var endUserId: String?
return "5"
}
I have to show the progress of Image/Video/Pdf upload on Asset View. I am unable to identify how can I achieve it.
I am adding a simulator screenshot also to understand the situation clearly.
I am struggling to identify that do I need to keep progress for each document? Please help.

Update view with observed objects of observed array in swiftui

I'm having an Image holder that would load the thumdnail on init and allow for download later on. My issue is that the view is not updated with the images after I load them. After pressing the load button for the second time, my first images are then displayed.
I'm having trouble finding the reason behind this behaviour.
The image holder :
class MyImage: ObservableObject {
private static let sessionProcessingQueue = DispatchQueue(label: "SessionProcessingQueue")
#Published var thumbnail: UIImage?
#Published var loaded: Bool
var fullName: String {
"\(folderName)/\(fileName)"
}
var onThumbnailSet: ((UIImage?) -> Void)
private var folderName: String
private var fileName: String
private var cancelableThumbnail: AnyCancellable?
private var thumbnailUrl: URL? {
return URL(string: "\(BASE_URL)/thumbnail/\(fullName)")
}
private var downloadUrl: URL? {
return URL(string: "\(BASE_URL)/download/\(fullName)")
}
init(folderName: String, fileName: String) {
self.folderName = folderName
self.fileName = fileName
self.loaded = false
self.loadThumbnail()
}
private func loadThumbnail() {
guard let requestUrl = thumbnailUrl else { fatalError() }
self.cancelableThumbnail = URLSession.shared.dataTaskPublisher(for: requestUrl)
.subscribe(on: Self.sessionProcessingQueue)
.map { UIImage(data: $0.data) }
.receive(on: DispatchQueue.main)
.sink(receiveCompletion: { (suscriberCompletion) in
switch suscriberCompletion {
case .finished:
break
case .failure(let error):
print(error.localizedDescription)
}
}, receiveValue: { [weak self] (value) in
self?.objectWillChange.send()
self?.loaded.toggle()
self?.thumbnail = value
})
}
The view :
struct MyView: View {
#ObservedObject var imagesHolder: ImagesHolder = ImagesHolder()
var body: some View {
VStack {
Button(action: {
self.loadImages()
}, label: {
Text("Load images")
})
ForEach(imagesHolder.images, id: \.self) { image in
if image.loaded {
Image(uiImage: image.thumbnail!)
.frame(width: 600, height: 600)
} else {
Text("Not loaded")
}
}
}
}
private func loadImages() -> Void {
loadMediaList(
onLoadDone: { myImages in
myImages.forEach { image in
imagesHolder.append(image)
}
}
)
}
}
The observed object containing the array of loaded images :
class ImagesHolder: ObservableObject {
#Published var images: [MyImage] = []
func append(_ myImage: MyImage) {
objectWillChange.send()
images.append(myImage)
}
}
And finally my data loader :
func loadMediaList(onLoadDone: #escaping (([MyImage]) -> Void)) -> AnyCancellable {
let url = URL(string: "\(BASE_URL)/medias")
guard let requestUrl = url else { fatalError() }
return URLSession.shared.dataTaskPublisher(for: requestUrl)
.subscribe(on: Self.sessionProcessingQueue)
.map { parseJSON(data: $0.data) }
.receive(on: DispatchQueue.main)
.sink(receiveCompletion: { (suscriberCompletion) in
switch suscriberCompletion {
case .finished:
break
case .failure(let error):
print(error.localizedDescription)
}
}, receiveValue: { images in
onLoadDone(images);
})
}
What I ended up doing and worked great for me was having a seperate view for the display of my Image like this :
struct MyImageView: View {
#ObservedObject var image: MyImage
init(image: MyImage) {
self.image = image
}
var body: some View {
if image.loaded {
Image(uiImage: image.thumbnail!)
.resizable()
} else {
ProgressView()
.progressViewStyle(CircularProgressViewStyle())
.frame(width: 100, height: 100, alignment: .center)
}
}
}
struct MyView: View {
#ObservedObject var imagesHolder: ImagesHolder = ImagesHolder()
var body: some View {
VStack {
Button(action: {
self.loadImages()
}, label: {
Text("Load images")
})
ForEach(imagesHolder.images, id: \.self) { image in
MyImageView(image: image)
}
}
}
private func loadImages() -> Void {
loadMediaList(
onLoadDone: { myImages in
myImages.forEach { image in
imagesHolder.append(image)
}
}
)
}
}