Swift and ObjectMapper: NSDate with min value - swift

I'm using ObjectMapper to cast json into objects. My problem is that the NSDate property is not being mapped correctly. Here is the json:
{
"Id":4775,
"Cor":{
"Id":2,
"Nome":"Amarelo",
"HTMLCode":"FFFB00"
},
"Data":"2016-07-25T09:35:00",
"Texto":"test test test",
"Kilometro":547.0
}
And here is my mappable class
class RoadWarning : Mappable {
var id: Int?
var color: RoadWarningColor?
var date: NSDate?
var text: String?
var kilometer: Float?
required init?(_ map: Map){
}
func mapping(map: Map) {
id <- map["Id"]
color <- map["Cor"]
text <- map["Texto"]
kilometer <- map["Kilometro"]
date <- (map["Data"], DateTransform())
}
}
The problem is that the date property is always 1970-01-01. I can't see yet what I am missing. Can you see what is wrong in this mapping?
Thanks

ObjectMapper not convert from String to NSDate properly you have to make a workaround like this to specify the type of NSDate format it need to convert from the String :
func mapping(map: Map) {
id <- map["Id"]
color <- map["Cor"]
text <- map["Texto"]
kilometer <- map["Kilometro"]
let dateFormatter = NSDateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss"
if let dateString = map["Data"].currentValue as? String, let _date = dateFormatter.dateFromString(dateString) {
date = _date
}
}
I hope this help you.

You could just create a TransformType class allowing a dateFormat as parameter:
// DateFormatTransform.swift
import Foundation
import ObjectMapper
public class DateFormatTransform: TransformType {
public typealias Object = NSDate
public typealias JSON = String
var dateFormat = NSDateFormatter(dateFormat: "yyyy-MM-dd HH:mm:ss")
convenience init(dateFormat: String) {
self.init()
self.dateFormat = NSDateFormatter(dateFormat: dateFormat)
}
public func transformFromJSON(value: AnyObject?) -> Object? {
if let dateString = value as? String {
return self.dateFormat.dateFromString(dateString)
}
return nil
}
public func transformToJSON(value: NSDate?) -> JSON? {
if let date = value {
return self.dateFormat.stringFromDate(date)
}
return nil
}
}
And use it like this:
func mapping(map: Map) {
id <- map["Id"]
color <- map["Cor"]
text <- map["Texto"]
kilometer <- map["Kilometro"]
date <- (map["Data"], DateFormatTransform(dateFormat: "yyyy-MM-dd"))
}

RodolfoAntonici answer rewrited to Swift 4 with usage of SwiftDate library
import SwiftDate
import Foundation
import ObjectMapper
public class DateFormatTransform: TransformType {
public typealias Object = Date
public typealias JSON = String
public func transformFromJSON(_ value: Any?) -> Object? {
if let dateString = value as? String {
return dateString.toDate()?.date
}
return nil
}
public func transformToJSON(_ value: Date?) -> JSON? {
if let date = value {
return date.toString()
}
return nil
}
}

import Foundation
import ObjectMapper
public class DateFormatTransform: TransformType {
public typealias Object = Date
public typealias JSON = Double
var dateFormat = DateFormatter()
convenience init(_ format: String) {
self.init()
self.dateFormat.dateFormat = format
}
open func transformFromJSON(_ value: Any?) -> Date? {
if let timeInt = value as? Double {
return Date(timeIntervalSince1970: TimeInterval(timeInt))
}
if let timeStr = value as? String {
return self.dateFormat.date(from: timeStr)
}
return nil
}
open func transformToJSON(_ value: Date?) -> Double? {
if let date = value {
return Double(date.timeIntervalSince1970)
}
return nil
}
}

Related

guard let number as NSString and NSNumber

I am getting data from different sources, the variable could be a number or a string of number. How do I make sure that "(number as? NSString)" or "(number as? NSNumber)" always success? Something similar to Java optInt, which will never fail even if the number is a String. See example below:
func testNumber()
{
var number = 123
guard let a = (number as? NSNumber)?.intValue else { print("1");return; }
}
func testNumberString()
{
var number = "123"
guard let a = (number as? NSNumber)?.intValue else { print("2");return; } // this failed.
}
func testNumberToString()
{
var number = 123
guard let a = (number as? NSString)?.intValue else { print("2");return; } // this sometimes failed too depend on datasource.
}
As I understand from your question, you want an integer value at the end, no matter if the input type is string or integer.
You can achieve this by using ExpressibleByStringLiteral.
Here is the demo
extension Int: ExpressibleByStringLiteral {
public typealias StringLiteralType = String
public init(stringLiteral value: StringLiteralType) {
self = Int(value) ?? 0
}
}
This Int extension allows you to accept string value as Int and return int value. If it did not convert it will give you 0 by default.
Example
func testInt() {
let numberOne: Int = "5656"
let numberTwo: Int = 1234
print(numberOne)
print(numberTwo)
}
Or another way is to create your own ExpressibleByStringLiteral, which helps you to give default value as you want.
struct StringInt: ExpressibleByStringLiteral {
var value: Int?
init(stringLiteral value: String) {
self.value = Int("\(value)")
}
func wrapped(with defaultValue: Int) -> Int {
return self.value ?? defaultValue
}
}
Example
func testInt() {
var numberThree: StringInt = "5656"
print(numberThree.value as Any) // with nil or optional value
numberThree = "asf"
print(numberThree.wrapped(with: 15)) // with default value
/**
Output
Optional(5656)
15
*/
}

How to Format Number Converted From String Without Losing Decimal Precision

I have the following JSON payload that I need to convert to numbers and subsequently format for display.
{
"kilometers_per_second": "14.4578929636",
"kilometers_per_hour": "52048.4146691173",
"miles_per_hour": "32340.8607703746"
}
Using Codable, I created the following structure:
struct RelativeVelocity: Codable, Equatable {
let kilometersPerSecond: String?
let kilometersPerHour: String?
let milesPerHour: String?
enum CodingKeys: String, CodingKey {
case kilometersPerSecond = "kilometers_per_second"
case kilometersPerHour = "kilometers_per_hour"
case milesPerHour = "miles_per_hour"
}
}
The properties are String instances because that's what the API returns, and I am learning to use view models for the first time, so I would like to use a view model to convert the String instances into numbers prior to returning formatted String instances.
My view model has the following structure:
struct RelativeVelocityViewModel {
private let relativeVelocity: RelativeVelocity
init(relativeVelocity: RelativeVelocity) {
self.relativeVelocity = relativeVelocity
}
}
extension RelativeVelocityViewModel {
var formattedKilometersPerHour: String? {
guard
let stringValue = relativeVelocity.kilometersPerHour,
let decimalValue = Decimal(string: stringValue),
let formatted = NumberFormatter.relativeVelocityFormatter.string(from: decimalValue as NSNumber)
else { return nil }
return formatted
}
var formattedKilometersPerSecond: String? {
guard
let stringValue = relativeVelocity.kilometersPerSecond,
let decimalValue = Decimal(string: stringValue),
let formatted = NumberFormatter.relativeVelocityFormatter.string(from: decimalValue as NSNumber)
else { return nil }
return formatted
}
var formattedMilesPerHour: String? {
guard
let stringValue = relativeVelocity.kilometersPerSecond,
let decimalValue = Decimal(string: stringValue),
let formatted = NumberFormatter.relativeVelocityFormatter.string(from: decimalValue as NSNumber)
else { return nil }
return formatted
}
}
As you can see, it converts the String instances into Decimal instances, and the Decimal instances are then formatted by the following NumberFormatter:
extension NumberFormatter {
static let relativeVelocityFormatter: NumberFormatter = {
let formatter = NumberFormatter()
formatter.maximumFractionDigits = .max
formatter.numberStyle = .decimal
formatter.usesGroupingSeparator = true
return formatter
}()
}
My XCTestCase subclass for testing my view models is:
class Tests_RelativeVelocityViewModel: XCTestCase {
let kilometersPerSecond = "14.4578929636"
let kilometersPerHour = "52048.4146691173"
let milesPerHour = "32340.8607703746"
var populatedViewModel: RelativeVelocityViewModel!
var emptyViewModel: RelativeVelocityViewModel!
override func setUpWithError() throws {
try super.setUpWithError()
let populatedRelativeVelocity = RelativeVelocity(
kilometersPerSecond: kilometersPerSecond,
kilometersPerHour: kilometersPerHour,
milesPerHour: milesPerHour
)
populatedViewModel = RelativeVelocityViewModel(relativeVelocity: populatedRelativeVelocity)
let emptyRelativeVelocity = RelativeVelocity(
kilometersPerSecond: nil,
kilometersPerHour: nil,
milesPerHour: nil
)
emptyViewModel = RelativeVelocityViewModel(relativeVelocity: emptyRelativeVelocity)
}
override func tearDownWithError() throws {
emptyViewModel = nil
populatedViewModel = nil
try super.tearDownWithError()
}
func test_RelativeVelocityViewModel_ReturnsNilFormattedKilometersPerHour_WhenValueIsMissing() {
XCTAssertNil(emptyViewModel.formattedKilometersPerHour)
}
func test_RelativeVelocityViewModel_ReturnsFormattedKilometersPerHour_WhenValueIsPresent() {
let expected = "52,048.4146691173"
XCTAssertEqual(populatedViewModel.formattedKilometersPerHour, expected)
}
func test_RelativeVelocityViewModel_ReturnsNilFormattedKilometersPerSecond_WhenValueIsMissing() {
XCTAssertNil(emptyViewModel.formattedKilometersPerSecond)
}
func test_RelativeVelocityViewModel_ReturnsNilFormattedMilesPerHour_WhenValueIsMissing() {
XCTAssertNil(emptyViewModel.formattedMilesPerHour)
}
}
The following test...
func test_RelativeVelocityViewModel_ReturnsFormattedKilometersPerHour_WhenValueIsPresent() {
let expected = "52,048.4146691173"
XCTAssertEqual(populatedViewModel.formattedKilometersPerHour, expected)
}
...produces the following failure:
XCTAssertEqual failed: ("Optional("52,048.414669")") is not equal to ("Optional("52,048.4146691173")")
I know that I can use XCTAssertEqual(_:_:accuracy:_:file:line:), but I want to retain all of the decimal values.
What am I doing incorrectly that is causing the formatted result to be rounded by losing the value's precision?
Try this:
class MyProjectTests: XCTestCase {
func testExample() throws {
let stringValue = "52048.12345678911111"
let decimalValue = Decimal(string: stringValue)!
let formatted = NumberFormatter.relativeVelocityFormatter(maxFractionDigits: decimalValue.significantFractionalDecimalDigits).string(from: decimalValue as NSNumber)
XCTAssert(formatted == stringValue)
}
}
extension NumberFormatter {
static func relativeVelocityFormatter(maxFractionDigits: Int) -> NumberFormatter {
let formatter = NumberFormatter()
formatter.maximumFractionDigits = maxFractionDigits
formatter.numberStyle = .none
formatter.usesGroupingSeparator = true
return formatter
}
}
extension Decimal {
var significantFractionalDecimalDigits: Int {
return max(-exponent, 0)
}
}
Anyway, there is always a limit:
33 decimal digits.

App Crashing with error: generic parameter 'T' could not be inferred

I'm trying to get custom object which is hashable from UserDefault.
My custom model is defined below:
class WorkerProfileResponse: Mappable, Hashable{
static func == (lhs: WorkerProfileResponse, rhs: WorkerProfileResponse) -> Bool {
return lhs.id == rhs.id
}
var hashValue: Int{
return self.id!
}
var id, loginStatus, lastLogin, lastActive: Int?
var username, email, mobileNumber: String?
var userCategories: [String]?
var userSubCategories: [String]?
var biometricToken: String?
var accessToken: AccessToken?
var userStatus: UserStatus?
var userProfile: UserProfile?
required init(map: Map) {
}
func mapping(map: Map) {
id <- map["id"]
loginStatus <- map["is_logged_in"]
lastLogin <- map["last_login"]
lastActive <- map["last_active"]
biometricToken <- map["biometricToken"]
username <- map["username"]
email <- map["email"]
mobileNumber <- map["mobile_number"]
accessToken <- map["accessToken"]
userStatus <- map["userStatus"]
userCategories <- map["userCategories"]
userSubCategories <- map["userSubCategories"]
userProfile <- map["userProfile"]
}
}
My userdefault method is:
class func getModel<T: Hashable>(key: String) -> T {
let decoded = UserDefaults.standard.data(forKey: key)
let decodedModel = NSKeyedUnarchiver.unarchiveObject(with: decoded!) as! T
return decodedModel
}
And I'm calling it like this:
UserDefault.getModel(key: "workerProfile")
App is crashing when I'm calling this method I don't understand the reason, error is:
error: generic parameter 'T' could not be inferred
I'm answering my own question, if it helps anyone in the future.
It was crashing while decoding because there was no value present in userdefault.
This line had the issue because of force casting:
let decodedModel = NSKeyedUnarchiver.unarchiveObject(with: decoded!) as! T
I've changes this method:
class func getModel<T: Hashable>(key: String) -> T {
let decoded = UserDefaults.standard.data(forKey: key)
let decodedModel = NSKeyedUnarchiver.unarchiveObject(with: decoded!) as! T
return decodedModel
}
To this:
class func getModel<T: Hashable>(key: String) -> T? {
let decoded = UserDefaults.standard.data(forKey: key)
if decoded != nil{
let decodedModel = NSKeyedUnarchiver.unarchiveObject(with: decoded!) as! T
return decodedModel
}
else
{
return nil
}
}

Getting "Must have a uuid if no _objectID" exception when inserting object into dictionary

I'm writing a unit-test to a class that uses PHAsset type. I mocked it as below:
class PHAssetMock: PHAsset {
let date: Date
let uuid: UUID
init(dateStr: String) {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "MM-dd-yyyy HH:mm"
self.date = dateFormatter.date(from: dateStr)!
self.uuid = UUID()
}
override var creationDate: Date? {
return date
}
override var hash: Int {
let hash = Int(self.date.timeIntervalSinceNow)
return hash
}
static func ==(lhs: PHAsseMock, rhs: PHAsseMock) -> Bool {
return lhs.date.timeIntervalSinceNow == rhs.date.timeIntervalSinceNow
}
}
When a function that uses mocked objects tries to insert it in a dictionary I'm getting an exception:
func foo(assets: [PHAsset]) {
var label: [T: String]()
for asset in assets {
label[asset] = "undefined" // Exception: "NSInternalInconsistencyException", "Must have a uuid if no _objectID"
}
}
When debugging, the override hash var is being called.
I had the same issue with the PHAsset when unit testing Photos framework. Overriding isEqual function helped to get rid of the exception.
class Mock : PHAsset {
let _localIdentifier: String = UUID().uuidString
let _hash: Int = UUID().hashValue
override var localIdentifier: String {
return _localIdentifier
}
override var hash: Int {
return _hash
}
override func isEqual(_ object: Any?) -> Bool {
guard let object = object as? Mock else {
return false
}
return self.localIdentifier == object.localIdentifier
}
}

Decoding generics with phantom types

I am trying to define a Currency type that would prevent numeric and alphabetic currency codes from getting mixed up:
public protocol ISO4217Type {}
public enum ISO4217Alpha: ISO4217Type {}
public enum ISO4217Num: ISO4217Type {}
public struct Currency<T: ISO4217Type> {
public let value: String
}
extension Currency where T == ISO4217Alpha {
public init?(value: String) {
let isLetter = CharacterSet.letters.contains
guard value.unicodeScalars.all(isLetter) else { return nil }
self.value = value
}
}
extension Currency where T == ISO4217Num {
public init?(value: String) {
let isDigit = CharacterSet.decimalDigits.contains
guard value.unicodeScalars.all(isDigit) else { return nil }
self.value = value
}
}
This works great. Now, is it possible to add a Codable conformance that would throw a decoding error when trying to decode a currency code with the wrong payload? (For example, decoding USD as a numeric currency code.)
The key revelation was that it’s possible to customize the behaviour using static functions on the phantom type:
public protocol ISO4217Type {
static func isValidCode(_ code: String) -> Bool
}
public enum ISO4217Alpha: ISO4217Type {
public static func isValidCode(_ code: String) -> Bool {
let isLetter = CharacterSet.letters.contains
return code.unicodeScalars.all(isLetter)
}
}
public enum ISO4217Num: ISO4217Type {
public static func isValidCode(_ code: String) -> Bool {
let isDigit = CharacterSet.decimalDigits.contains
return code.unicodeScalars.all(isDigit)
}
}
public struct Currency<T: ISO4217Type> {
public let value: String
private init(uncheckedValue value: String) {
self.value = value
}
public init?(value: String) {
guard T.isValidCode(value) else { return nil }
self.value = value
}
}
extension Currency: Codable {
public func encode(to encoder: Encoder) throws {
var c = encoder.singleValueContainer()
try c.encode(value)
}
public init(from decoder: Decoder) throws {
let c = try decoder.singleValueContainer()
let value = try c.decode(String.self)
guard T.isValidCode(value) else {
throw DecodingError.dataCorruptedError(in: c,
debugDescription: "Invalid \(type(of: T.self)) code")
}
self.init(uncheckedValue: value)
}
}