I know I can do this with super class, but Swift doesn’t support abstract class, and I wanna use protocol instead. However, when there are many property requirements, I find it really hard to avoid duplicate self.xxx = xxx code.. Example:
protocol ManyProperties {
var a: Int { get }
var b: Int { get }
var c: Int { get }
var d: Int { get }
}
struct S: ManyProperties {
let a: Int
let b: Int
let c: Int
let d: Int
init(a: Int, b: Int, c: Int, d: Int) {
self.a = a
self.b = b
self.c = c
self.d = d
}
}
class C: ManyProperties {
let a: Int
let b: Int
let c: Int
let d: Int
// duplicate initializing
init(a: Int, b: Int, c: Int, d: Int) {
self.a = a
self.b = b
self.c = c
self.d = d
}
}
I really want to type something like super.init() and I do not want inheritance. How can I accomplish this?
Structs has a free memberwise initializer, so you don't need to write this kind of init for them:
struct S: ManyProperties {
let a, b, c, d: Int
}
but for the class you have some options:
1- Use base class and inherit from it instead of conforming to protocol:
class ManyPropertiesClass: ManyProperties {
let a: Int
let b: Int
let c: Int
let d: Int
// duplicate initializing
init(a: Int, b: Int, c: Int, d: Int) {
self.a = a
self.b = b
self.c = c
self.d = d
}
}
class C: ManyPropertiesClass {
}
2- Add init inside the protocol, so it forces you to implement it with a little autocompletion help
protocol ManyProperties: class {
var a: Int { get }
var b: Int { get }
var c: Int { get }
var d: Int { get }
init(a: Int, b: Int, c: Int, d: Int)}
}
3- define another initializer inside the protocol and make variables setable so the compiler knows that all properties are initialized. Then you can extend the protocol to have the initializer:
protocol ManyProperties: class {
var a: Int { get set }
var b: Int { get set }
var c: Int { get set }
var d: Int { get set }
init()
}
extension ManyProperties {
init(a: Int, b: Int, c: Int, d: Int) {
self.init()
self.a = a
self.b = b
self.c = c
self.d = d
}
}
class C: ManyProperties {
var a: Int = 0
var b: Int = 0
var c: Int = 0
var d: Int = 0
required init() {}
}
Related
I am studying the swift programming language.
Now I'm looking at how opaque types works.
Why does it occur error: type of expression is ambiguous without more context? (Swift 5.1)
protocol NumericExpression {
associatedtype TNumeric: Numeric
var value: TNumeric { get }
}
struct Expression {
static func sum<TA: NumericExpression, TB: NumericExpression>(_ a: TA,_ b: TB) -> some NumericExpression
where TA.TNumeric == TB.TNumeric {
return NumericExpressionSum(a: a, b: b)
}
static func multi<TA: NumericExpression, TB: NumericExpression>(_ a: TA, _ b: TB) -> some NumericExpression
where TA.TNumeric == TB.TNumeric {
return NumericExpressionMulti(a: a, b: b)
}
}
extension Int: NumericExpression {
var value: Self {
return self
}
}
struct NumericExpressionSum<TA: NumericExpression, TB: NumericExpression>: NumericExpression
where TA.TNumeric == TB.TNumeric {
let a: TA
let b: TB
var value: TA.TNumeric { a.value + b.value }
}
struct NumericExpressionMulti<TA: NumericExpression, TB: NumericExpression>: NumericExpression
where TA.TNumeric == TB.TNumeric {
let a: TA
let b: TB
var value: TA.TNumeric { a.value * b.value }
}
No compile error.
Sample code with compile error:
var s1 = Expression.sum(1, 2)
var s2 = Expression.sum(2, 3)
var m1 = Expression.multi(s1, s2)
var m2 = Expression.multi(3, 4)
print(m1.value) // 15
print(m2.value) // 12
print(type(of: m1)) // NumericExpressionMulti<NumericExpressionSum<Int, Int>, NumericExpressionSum<Int, Int>>
print(type(of: m2)) // NumericExpressionMulti<Int, Int>
print(type(of: m1).TNumeric) // Int
print(type(of: m2).TNumeric) // Int
let m3 = Expression.multi(m1, m2) //Error: type of expression is ambiguous without more context
Does this work for swift 5.5?
Thanks!
I have a generic binary search tree based on Comparable:
public class BSTree<T: Comparable> {
public func insert(_ val: T, _ n: Int) {
// ...
}
#discardableResult
public func delete(_ val: T, _ n: Int) -> Int {
// ...
}
}
I want to add the ability to provide the sum of the values, if T is an arithmetic type. I tried the following:
public class BSTree<T: Comparable> {
private var sumStorage: T?
public func insert(_ val: T, _ n: Int) {
if let arithVal = val as? AdditiveArithmetic {
for _ in 0 ..< n { sumStorage += arithVal }
}
// ...
}
#discardableResult
public func delete(_ val: T, _ n: Int) -> Int {
// ...
numDeleted = ...
if let arithVal = val as? AdditiveArithmetic {
for _ in 0 ..< numDeleted { sumStorage -= arithVal }
}
}
}
extension BSTree where T: AdditiveArithmetic {
public var sum: T {
sumStorage as? T ?? T.zero
}
}
Of course, when I try to cast val as AdditiveArithmetic I get “Protocol 'AdditiveArithmetic' can only be used as a generic constraint because it has Self or associated type requirements”. Plus sumStorage isn’t AdditiveArithmetic, so I can’t add to it, and I can’t make it a stored property of the extension, because ... you just can’t.
What I finally came up with was to use inheritance:
class SummedBSTree<T>: BSTree<T> where T: AdditiveArithmetic & Comparable {
public var sum = T.zero
override public func insert(_ val: T, _ n: Int) {
super.insert(val, n)
for _ in 0 ..< n { sum += val }
}
#discardableResult
override public func delete(_ val: T, _ n: Int) -> Int {
let numDeleted = super.delete(val, n)
for _ in 0 ..< numDeleted { sum -= val }
return numDeleted
}
}
This works, but it seems like it’s a case of using a sledgehammer where a jeweler’s screwdriver should be able to do the trick. It’s frustrating that something that would be so easy to do in Objective-C (and other less strongly typed languages) is so difficult in Swift. Can someone come up with a way of adding the summing capability without subclassing?
import UIKit
//https://stackoverflow.com/questions/61784548/swift-extended-capability-for-a-more-restrictive-generic
protocol ON1Speedable {
associatedtype Item: Comparable
var sumStorage: Item? { get set }
}
public class BSTree<T: Comparable> {
var sumStorage: T?
init(sumStorage: T? = nil) {
self.sumStorage = sumStorage
}
}
extension ON1Speedable where Item: AdditiveArithmetic, Item: Strideable, Item.Stride: SignedInteger {
mutating func insert(_ val: Item, _ n: Int) {
sumStorage = sumStorage ?? Item.zero
for _ in 0..<n { sumStorage! += val }
}
#discardableResult
mutating func delete(_ val: Item, _ n: Int) -> Item? {
sumStorage = sumStorage ?? Item.zero
for _ in Item.zero..<val { sumStorage! -= val }
return sumStorage
}
}
extension BSTree: ON1Speedable { }
var g2 = BSTree<Int>(sumStorage: 0)
g2.sumStorage
g2.insert(5, 5)
g2.sumStorage // 25
g2.delete(5, 5) // 0
var g3 = BSTree<String>()
g3.sumStorage // nil
//g3.insert("foo", 5) // Error: Referencing instance method 'insert' on 'ON1Speedable' requires that 'String.Stride' conform to 'SignedInteger'
g3.sumStorage // nil
//g3.delete("bar", 5) // Error: Referencing instance method 'delete' on 'ON1Speedable' requires that 'String.Stride' conform to 'SignedInteger'
Please take a look at the following code:
class A {
let a: String
let b: String
init(a: String, b: String) {
self.a = a
self.b = b
}
}
class B: A {
let c: Bool
private let aExpectedValue = "a"
private let bExpectedValue = "b"
override init(a: String, b: String) {
c = (a == aExpectedValue && b == bExpectedValue)
super.init(a: a, b: b)
}
}
This causes an error in B.init:
However, if I change it either to c = (a == aExpectedValue) or c = (b == bExpectedValue) then it compiles correctly.
Does anybody know why is that?
The problem is in bExpectedValue. That's an instance property on B. That interacts with the definition of && on Bool:
static func && (lhs: Bool, rhs: #autoclosure () throws -> Bool) rethrows -> Bool
The #autoclosure makes the b == bExpectedValue into a closure, capturing it as self.bExpectedValue. That's not allowed before initialization is complete. (The closure here is to allow short-circuiting. The rhs closure is not evaluated if lhs is false.)
This is pretty awkward (see SR-944 that MartinR references for a little discussion about it).
If bExpectedValue were static, or if it were moved outside the class definition, then this wouldn't be an issue. The following approach will also fix it:
override init(a: String, b: String) {
let goodA = a == aExpectedValue
let goodB = b == bExpectedValue
c = goodA && goodB
super.init(a: a, b: b)
}
You need to create a new initializer with another vars or call super.init(a:, b:) before any expression with this properties.
Call this:
override init(a: String, b: String) {
super.init(a: a, b: b)
c = (a == aExpectedValue && b == bExpectedValue)
}
or change it to:
init(newA: String, newB: String) {
c = (newA == aExpectedValue && newB == bExpectedValue)
super.init(a: newA, b: newB)
}
Can I define a convenience subscript setter in Swift?
For instance let's say I've:
extension Matrix {
subscript(_ i: Int, _ j: Int) -> Double {
get {
return value(atRow: i, column: j)
}
set {
setValue(newValue, row: i, column: j)
}
}
}
and I also want to define a setter that accepts Float since I'm tired of casting manually. I'd like to do:
extension Matrix {
subscript(_ i: Int, _ j: Int) -> Double {
get {
return value(atRow: i, column: j)
}
set {
setValue(newValue, row: i, column: j)
}
}
subscript(_ i: Int, _ j: Int) -> Float {
set {
setValue(Double(newValue), row: i, column: j)
}
}
}
But I can't do this since the second subscript has no getter.
Since Swift allows overloading on return value (unlike Java and C++), you could add a Float getter:
extension Matrix {
subscript(_ i: Int, _ j: Int) -> Double {
get { return value(atRow: i, column: j) }
set { setValue(newValue, row: i, column: j) }
}
subscript(_ i: Int, _ j: Int) -> Float {
get { return Float(value(atRow: i, column: j)) }
set { setValue(Double(newValue), row: i, column: j) }
}
}
However, you'll run into trouble when you try to use the subscript operator in a context that allows either Float or Double. Example:
20. let m = Matrix()
21. let x = m[0, 0]
error: repl.swift:21:10: error: ambiguous use of 'subscript'
let x = m[0, 0]
^
repl.swift:9:5: note: found this candidate
subscript(_ i: Int, _ j: Int) -> Double {
^
repl.swift:14:5: note: found this candidate
subscript(_ i: Int, _ j: Int) -> Float {
^
You can disambiguate by specifying the type:
let x: Float = m[0, 0]
If I have a struct, I can implement ExpressibleByIntegerLiteral to implicitly create an instance of it from an integer literal.
struct MyStruct: ExpressibleByIntegerLiteral {
let value: Int
init(integerLiteral value: Int) {
self.value = value
}
}
let a: MyStruct = 1
Nice.
Is there a way to do the opposite i.e. implicitly convert from a MyStruct to an Int?
I know that I can implement an init method in an extension on Int to get explicit casting to work, like this:
extension Int {
init(_ myStruct: MyStruct) {
self = myStruct.value
}
}
let b = Int(a)
But is there anything I can do to get this line to compile:
let c: Int = a
The best way to do that in one line is to choose the value property:
let c: Int = a.value
Because you cannot overload the "=" you could check out that:
struct MyStruct: ExpressibleByIntegerLiteral {
let value: Int
init(integerLiteral value: Int) {
self.value = value
}
}
func += (inout left: Int, right: MyStruct) {
left = right.value
}
let a: MyStruct = 1;
var c:Int = 0;
c += a
Otherwise you have to create your custom ExpressibleByStructLiteral