Swift - Codable Decode array of arrays of CLLocation - swift

I have a struct that contains an array of arrays of CLLocations. This is to support a multipolyline (in other words, a bunch of potentially discontiguous lines). I wish to encode and decode this data. I am having trouble writing the encoding and decoding methods as CLLocation is not codable by default.
struct MyTrack {
let coords: [[CLLocation]]?
enum CodingKeys: String, CodingKey {
case coords
}
}
extension MyTrack: Decodable {
init(from decoder: Decoder) throws {
let values = try decoder.container(keyedBy: CodingKeys.self)
coords = try values.decodeIfPresent([[CLLocation]].self, forKey: .coords)?
.map { ($0 as AnyObject).map { CLLocation(model: $0) } }
}
}
Its currently throwing two errors in Xcode:
Cannot convert value of type '[[CLLocation]].Type' to expected argument type '[Any?].Type'
Value of type 'AnyObject' has no member 'map'
Any help much appreciated!
Because CLLocation is not Codable by default, I followed a tutorial to create a wrapper struct around it, the code goes like this:
extension CLLocation: Encodable {
enum CodingKeys: String, CodingKey {
case latitude
case longitude
case altitude
case horizontalAccuracy
case verticalAccuracy
case speed
case course
case timestamp
}
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(coordinate.latitude, forKey: .latitude)
try container.encode(coordinate.longitude, forKey: .longitude)
try container.encode(altitude, forKey: .altitude)
try container.encode(horizontalAccuracy, forKey: .horizontalAccuracy)
try container.encode(verticalAccuracy, forKey: .verticalAccuracy)
try container.encode(speed, forKey: .speed)
try container.encode(course, forKey: .course)
try container.encode(timestamp, forKey: .timestamp)
}
}
struct Location: Codable {
let latitude: CLLocationDegrees
let longitude: CLLocationDegrees
let altitude: CLLocationDistance
let horizontalAccuracy: CLLocationAccuracy
let verticalAccuracy: CLLocationAccuracy
let speed: CLLocationSpeed
let course: CLLocationDirection
let timestamp: Date
}
extension CLLocation {
convenience init(model: Location) {
self.init(coordinate: CLLocationCoordinate2DMake(model.latitude, model.longitude), altitude: model.altitude, horizontalAccuracy: model.horizontalAccuracy, verticalAccuracy: model.verticalAccuracy, course: model.course, speed: model.speed, timestamp: model.timestamp)
}
}

You are decoding CLLocation, not your wrapper struct. You should decode your wrapper struct instead. Also, you shouldn't cast to AnyObject.
extension MyTrack: Decodable {
init(from decoder: Decoder) throws {
let values = try decoder.container(keyedBy: CodingKeys.self)
coords = try values.decodeIfPresent([[Location]].self, forKey: .coords)?
.map { $0.map(CLLocation.init) }
}
}

Related

Issue converting custom type array into data | Swift

Essentially I would like to convert an array of the following custom struct into data for data for easier saving in CoreData as Binary Data. How can the following be converted into data to then be ready to decode back:
Custom Struct
struct Place: Codable, Identifiable {
var id = UUID()
var coordinate: Coordinate
struct Coordinate: Codable {
let latitude: Double
let longitude: Double
func locationCoordinate() -> CLLocationCoordinate2D {
return CLLocationCoordinate2D(latitude: self.latitude,
longitude: self.longitude)
}
}
}
Adding to Custom Struct
var mapAddresses = [Place]()
Task {
mapAddresses.append(Place(coordinate: try await getCoordinate(from:
post.location)))
}
The issue I am having is converting the array mapAddresses with the custom structure into Binary Data, that can then be decoded back into the custom array.
In comments, your phrasing indicates that you want a binary encoding. I'll get to that, but first let's just encode/decode JSON.
Given
struct Place: Codable {...}
let mapAddresses: [Place] = ...
The following code encodes mapAddresses, then immediately decodes the resulting Data:
guard let data = try? JSONEncoder().encode(mapAddresses) else {
fatalError("Failed to encode")
}
guard let decodedMapAddresses =
try? JSONDecoder().decode([Place].self, from: data)
else { fatalError("Failed to decode") }
Whether data is binary is a matter of interpretation. data will just contain the text of the JSON representing mapAddresses. Technically text is binary, but then again so is anything else in a computer. What we normally mean is non-text, that is some more compact or more directly machine-friendly encoding that is not so human-friendly. JSONEncoder doesn't provide such a facility, but PropertyListEncoder does, via it's outputFormat property. To use it:
let encoder = PropertlyListEncoder()
encoder.outputFormat = .binary
guard let data = encoder.encode(mapAddresses) else {
fatalError("Failed to encode")
}
guard let decodedMapAddresses =
try? PropertyListDecoder().decode([Place].self, from: data)
else { fatalError("Failed to decode") }
Note there is no need specify that it's binary in the decoder, because PropertyListDecoder knows how to detect that.
Like others have said in the comments, you don't have a fully-formed question yet. But first, you don't need Coordinate. Just make CLLocationCoordinate2D Codable, like Apple should have.
import struct CoreLocation.CLLocationCoordinate2D
public extension CLLocationCoordinate2D {
enum CodingKey: Swift.CodingKey {
case latitude
case longitude
}
}
extension CLLocationCoordinate2D: Decodable {
public init(from decoder: Decoder) throws {
try self.init(
Self.init, (CodingKey.latitude, .longitude),
decoder: decoder
)
}
}
extension CLLocationCoordinate2D: Encodable {
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKey.self)
try container.encode(latitude, forKey: .latitude)
try container.encode(longitude, forKey: .longitude)
}
}
public extension Decodable {
/// Initialize using 2 keyed, decoded arguments.
/// - Parameters:
/// - init: An initializer (or factory function) whose arguments are the decoded values.
/// - keys: `CodingKey` instances, matching the arguments.
init<
Parameter0: Decodable, Parameter1: Decodable, Key: CodingKey
>(
_ init: (Parameter0, Parameter1) -> Self,
_ keys: (Key, Key),
decoder: Decoder
) throws {
let container = try decoder.container(keyedBy: Key.self)
self = try `init`(
container.decode(forKey: keys.0),
container.decode(forKey: keys.1)
)
}
}
public extension KeyedDecodingContainerProtocol {
/// Decode, relying on the return type, to avoid having to explicitly use a metatype argument.
func decode<Decodable: Swift.Decodable>(forKey key: Key) throws -> Decodable {
try decode(Decodable.self, forKey: key)
}
}

Swift struct with custom encoder and decoder cannot conform to 'Encodable'

[Edited to provide a minimal reproducible example ]
This is the complete struct without non relevant vars and functions
InstrumentSet.swift
import Foundation
struct InstrumentsSet: Identifiable, Codable {
private enum CodingKeys: String, CodingKey {
case name = "setName"
case tracks = "instrumentsConfig"
}
var id: String { name }
var name: String
var tracks: [Track]
}
Track.swift
import Foundation
extension InstrumentsSet {
struct Track: Identifiable, Encodable {
private enum TrackKeys: String, CodingKey {
case id = "trackId"
case effects
}
let id: String
var effects: [Effect]?
}
}
extension InstrumentsSet.Track: Decodable {
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: TrackKeys.self)
id = try container.decode(String.self, forKey: .id)
effects = try container.decodeIfPresent([Effect].self, forKey: .effects)
}
}
Effect.swift
import Foundation
import AudioKit
import SoundpipeAudioKit
extension InstrumentsSet.Track {
enum Effect: Decodable {
private enum EffectKeys: String, CodingKey {
case effectType = "effectName"
case cutoffFrequency
case resonance
}
case lowPassFilter(LowPassFilterEffect)
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: EffectKeys.self)
let effectType = try container.decode(Effect.EffectType.self, forKey: .effectType)
switch effectType {
case .lowPassFilter:
let cutOffFrequency = try container.decode(ValueAndRange.self, forKey: .cutoffFrequency)
let resonance = try container.decode(ValueAndRange.self, forKey: .resonance)
self = .lowPassFilter(LowPassFilterEffect(cutOffFrequency: cutOffFrequency, resonance: resonance))
default:
fatalError("Not implemented!")
}
}
}
}
extension InstrumentsSet.Track.Effect {
enum EffectType: String, Decodable {
case lowPassFilter
}
}
extension InstrumentsSet.Track.Effect: Encodable {
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: EffectKeys.self)
//FIXME: This is the location of the error: Type 'ValueAndRange.Type' cannot conform to 'Encodable'
try container.encode(ValueAndRange.self, forKey: .cutoffFrequency)
}
}
The problem is ValueAndRange.self not not conforming to Encodable
I've followed multiple examples to get to this implementation:
import Foundation
import AudioKit
struct ValueAndRange: Encodable {
private enum ValueRangeKeys: String, CodingKey {
case value
case range
}
static var zero: ValueAndRange { .init(value: 0, range: [0, 0]) }
var value: AUValue
var range: [Double]
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: ValueRangeKeys.self)
try container.encode(value, forKey: .value)
try container.encode(range, forKey: .range)
}
}
extension ValueAndRange: Decodable {
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: ValueRangeKeys.self)
value = try container.decode(AUValue.self, forKey: .value)
range = try container.decode([Double].self, forKey: .range)
}
}
I cannot see why this struct should not conform to Encodable. Maybe any of you got betters eyes (and brains) then I got?
Your encode function is incorrectly trying to encode a type, ValueAndRange.self, rather than a value.
Looking at the init(from:) method I think your encode function should look something like this
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: EffectKeys.self)
switch self {
case .lowPassFilter(let effect):
try container.encode(effect.cutOffFrequency, forKey: .cutoffFrequency)
try container.encode(effect.resonance, forKey: .resonance)
}
}
I didn't include .effectType in this code since I am uncertain of its usage (isn't it always the same hard coded string?).

Trying to make this class codable and decodable

Trying to make this class codable and decodable
import Foundation
class Attribute : Decodable {
struct Att: Decodable {
var number: Int16
var label: String
var comments: String
// Everything from here on is generated for you by the compiler
init(from decoder: Decoder) throws {
let keyedContainer = try decoder.container(keyedBy: CodingKeys.self)
number = try keyedContainer.decode(Int16.self, forKey: .number)
label = try keyedContainer.decode(String.self, forKey: .label)
comments = try keyedContainer.decode(String.self, forKey: .comments)
}
enum CodingKeys: String, CodingKey {
case number
case label
case comments
}
}
}
extension Attribute: Encodable {
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(number, forKey: .number)
try container.encode(label, forKey: .label)
try container.encode(comments, forKey: .comments)
}
}
I have an error on these lines
try container.encode(number, forKey: .number)
try container.encode(label, forKey: .label)
try container.encode(comments, forKey: .comments)
with the message
Use of unresolved identifier 'number'
Use of unresolved identifier 'label'
Use of unresolved identifier 'comments'
How do I solve that?
Why do you have an empty class with a nested struct in it? The error comes from the fact that those properties are defined on Att rather than Attribute, so you need to encode those when extending Att to conform to Encodable.
Btw, you don't have any special encoding/decoding, so you don't need to declare the encoder/decoder functions manually, the compiler can synthesise them for you.
class Attribute: Codable {
struct Att: Codable {
var number: Int16
var label: String
var comments: String
}
}
I may have missed something, but the following should work, or at least compile:
class Attribute : Decodable {
var number: Int16
var label: String
var comments: String
// Everything from here on is generated for you by the compiler
required init(from decoder: Decoder) throws {

Creating a CLLocationCoordinate2D using Codable protocol

I'm trying to conform a custom class to the MKAnnotation protocol. To create this class, I'm decoding from a JSON feed using the Codable protocol.
class CustomClass: NSObject, Codable, MKAnnotation {
var id: String
var name: String
var lat: Double?
var lon: Double?
var coordinate: CLLocationCoordinate2D
// Note, the coordinate var is not a part of the decoded JSON file.
// It is derived from the lat and lon attributes, which are in the
// JSON file.
enum CodingKeys: String, CodingKey {
case id
case name
case lat
case lon
}
required init(from decoder: Decoder) throws {
let values = try decoder.container(keyedBy: CodingKeys.self)
self.id = try values.decode(String.self, forKey: .id)
self.name = try values.decodeIfPresent(String.self, forKey: .name)
self.lat = try values.decodeIfPresent(Double.self, forKey: .lat)
self.lon = try values.decodeIfPresent(Double.self, forKey: .lon)
self.coordinate = CLLocationCoordinate2D(latitude: self.lat!, longitude: self.lon!)
}
}
When I run this code, I get the following error in the line where I set the self.coordinate var:
Thread 2: Fatal error: Unexpectedly found nil while unwrapping an Optional value
It looks like the coordinate variable is getting set before the lat and lon variables are decoded. How can I set the coordinate var using the decoded lat and lon vars inside my init method?

Making NSDecimalNumber Codable

Is it possible to extend NSDecimalNumber to conform Encodable & Decodable protocols?
It is not possible to extend NSDecimalNumber to conform to Encodable & Decodable protocols. Jordan Rose explains it in the following swift evolution email thread.
If you need NSDecimalValue type in your API you can build computed property around Decimal.
struct YourType: Codable {
var decimalNumber: NSDecimalNumber {
get { return NSDecimalNumber(decimal: decimalValue) }
set { decimalValue = newValue.decimalValue }
}
private var decimalValue: Decimal
}
Btw. If you are using NSNumberFormatter for parsing, beware of a known bug that causes precision loss in some cases.
let f = NumberFormatter()
f.generatesDecimalNumbers = true
f.locale = Locale(identifier: "en_US_POSIX")
let z = f.number(from: "8.3")!
// z.decimalValue._exponent is not -1
// z.decimalValue._mantissa is not (83, 0, 0, 0, 0, 0, 0, 0)
Parse strings this way instead:
NSDecimalNumber(string: "8.3", locale: Locale(identifier: "en_US_POSIX"))
In swift you should use Decimal type. This type confirms to protocols Encodable & Decodable from the box.
If you have NSDecimalNumber type in your code it's easy to cast it to Decimal
let objcDecimal = NSDecimalNumber(decimal: 10)
let swiftDecimal = (objcDecimal as Decimal)
With Swift 5.1 you can use property wrappers to avoid the boilerplate of writing a custom init(from decoder: Decoder) / encode(to encoder: Encoder).
#propertyWrapper
struct NumberString {
private let value: String
var wrappedValue: NSDecimalNumber
init(wrappedValue: NSDecimalNumber) {
self.wrappedValue = wrappedValue
value = wrappedValue.stringValue
}
}
extension NumberString: Decodable {
init(from decoder: Decoder) throws {
value = try String(from: decoder)
wrappedValue = NSDecimalNumber(string: value)
}
}
extension NumberString: Encodable {
func encode(to encoder: Encoder) throws {
var container = encoder.singleValueContainer()
try container.encode(wrappedValue.stringValue)
}
}
extension NumberString: Equatable {}
Usage:
struct Foo: Codable {
#NumberString var value: NSDecimalNumber
}
In my case, We are maintaining legacy code which is Objective-C and Swift
One of the modules we needed to have a property of type NSNumber (internal API reason) which is not supported by Codable
So We use Codable for almost all supported data types and NSCoding with a help of NSKeyedUnarchiver for unsupported types
I am sharing here a sample of the code, as a reference that might help
someone who has a such scenario.
class Branch: NSObject, Codable {
#objc var discountMaxLimit: NSNumber?
private enum CodingKeys: String, CodingKey {
case discountInfoKeys
}
private enum CorporateDiscountInfoKeys: String, CodingKey {
case discountMaxLimit = "discount_max_limit"
}
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
var discountInfoK = container.nestedContainer(keyedBy: discountInfoKeys.self, forKey: .discountInfoKeys)
if let value = discountMaxLimit {
let data = try NSKeyedArchiver.archivedData(withRootObject: value, requiringSecureCoding: false)
try discountInfoK.encode(data, forKey: .discountMaxLimit)
}
}
required init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
let discountInfo = try container.nestedContainer(keyedBy: discountInfoKeys.self, forKey: .discountInfoKeys)
let discountMaxLimitData = try discountInfo.decode(Data.self, forKey: .discountMaxLimit)
discountMaxLimit = try NSKeyedUnarchiver.unarchiveTopLevelObjectWithData(discountMaxLimitData) as? NSNumber
}
}