I have a type that has a custom + defined for it. I was wondering if it was possible for Swift to automatically write a definition for +=, namely a += b --> a = a+b. Ideally I would not have to write the corresponding assignment operator for each operator I write.
Sample code:
class A {
var value: Int
init(_ value: Int) {
self.value = value
}
static func +(lhs: A, rhs: A) -> A {
return A(lhs.value + rhs.value)
}
}
var a = A(42)
let b = A(10)
// OK
let c = a + b
// error: binary operator '+=' cannot be applied to two 'A' operands
// Ideally, this would work out of the box by turning it into a = a+b
a += b
Usually you have to define += when you define +.
You could create a Summable protocol that declares both + and +=, but you'll still have to define the + function since addition of arbitrary types has no concrete meaning. Here's an example:
protocol Summable {
static func +(lhs: Self, rhs: Self) -> Self
static func +=(lhs: inout Self, rhs: Self)
}
extension Summable {
static func +=(lhs: inout Self, rhs: Self) { lhs = lhs + rhs }
}
struct S: Summable { }
func +(lhs: S, rhs: S) -> S {
// return whatever it means to add a type of S to S
return S()
}
func f() {
let s0 = S()
let s1 = S()
let _ = s0 + s1
var s3 = S()
s3 += S() // you get this "for free" because S is Summable
}
Related
I need to write a function that swaps the contents of any two variables. This is what I have currently but I am getting the error "Cannot assign to value: 'a' is a 'let' constant".
func swap<T>(a: T, b: T) {
(a, b) = (b, a);
}
Can somebody please explain why this doesn't work and suggest how to fix it?
Thank you.
You need to make the parameters inout to be able to do that, like this:
func swap<T>(_ a: inout T,_ b: inout T) {
(a, b) = (b, a)
}
var a = 0, b = 1
swap(&a, &b)
The parameters inside your function is immutable so it cannot be swapped , like if you trying to swap two let value:
let x = 5
let y = 6
//if you try to swap these values Xcode will tell you to change their declaration to var
Here almost the same and to be able to change the values you have to pass inout declaration like below :
func swapTwoValues<T>(_ a: inout T, _ b: inout T) {
(a, b) = (b, a)
}
// call it like this
var x = 1
var y = 2
swapTwoValues(&x, &y)
// Note that you cannot call it like this: swapTwoValues(&1, &2)
That is because the arguments of functions are constants and are treated as local properties, unless you mark them inout and then they are treated as sort of "pass-through" properties that aren't confined to the scope of the function.
var kiwi = "kiwi"
var mango = "mango"
func swap<T>(a: inout T, b: inout T) {
(a, b) = (b, a)
}
swap(&kiwi, &mango)
If having to put & everywhere or the verbosity of the function annoy you, you could always use a descriptive operator like this:
infix operator <->: AssignmentPrecedence
func <-> <A>(lhs: inout A, rhs: inout A) {
(lhs, rhs) = (rhs, lhs)
}
var kenobi = "Hello there!"
var helloThere = "Kenobi..."
helloThere <-> kenobi
print(helloThere, kenobi, separator: "\n")
// Hello there!
// Kenobi...
Which I feel looks much nicer. You don't need to use & in operators with inout parameters – that's how the += and -= operators work:
public extension AdditiveArithmetic {
static func +=(lhs: inout Self, rhs: Self) {
lhs = lhs + rhs
}
static func -=(lhs: inout Self, rhs: Self) {
lhs = lhs - rhs
}
}
I have a generic binary search tree based on Comparable:
public class BSTree<T: Comparable> {
public func insert(_ val: T, _ n: Int) {
// ...
}
#discardableResult
public func delete(_ val: T, _ n: Int) -> Int {
// ...
}
}
I want to add the ability to provide the sum of the values, if T is an arithmetic type. I tried the following:
public class BSTree<T: Comparable> {
private var sumStorage: T?
public func insert(_ val: T, _ n: Int) {
if let arithVal = val as? AdditiveArithmetic {
for _ in 0 ..< n { sumStorage += arithVal }
}
// ...
}
#discardableResult
public func delete(_ val: T, _ n: Int) -> Int {
// ...
numDeleted = ...
if let arithVal = val as? AdditiveArithmetic {
for _ in 0 ..< numDeleted { sumStorage -= arithVal }
}
}
}
extension BSTree where T: AdditiveArithmetic {
public var sum: T {
sumStorage as? T ?? T.zero
}
}
Of course, when I try to cast val as AdditiveArithmetic I get “Protocol 'AdditiveArithmetic' can only be used as a generic constraint because it has Self or associated type requirements”. Plus sumStorage isn’t AdditiveArithmetic, so I can’t add to it, and I can’t make it a stored property of the extension, because ... you just can’t.
What I finally came up with was to use inheritance:
class SummedBSTree<T>: BSTree<T> where T: AdditiveArithmetic & Comparable {
public var sum = T.zero
override public func insert(_ val: T, _ n: Int) {
super.insert(val, n)
for _ in 0 ..< n { sum += val }
}
#discardableResult
override public func delete(_ val: T, _ n: Int) -> Int {
let numDeleted = super.delete(val, n)
for _ in 0 ..< numDeleted { sum -= val }
return numDeleted
}
}
This works, but it seems like it’s a case of using a sledgehammer where a jeweler’s screwdriver should be able to do the trick. It’s frustrating that something that would be so easy to do in Objective-C (and other less strongly typed languages) is so difficult in Swift. Can someone come up with a way of adding the summing capability without subclassing?
import UIKit
//https://stackoverflow.com/questions/61784548/swift-extended-capability-for-a-more-restrictive-generic
protocol ON1Speedable {
associatedtype Item: Comparable
var sumStorage: Item? { get set }
}
public class BSTree<T: Comparable> {
var sumStorage: T?
init(sumStorage: T? = nil) {
self.sumStorage = sumStorage
}
}
extension ON1Speedable where Item: AdditiveArithmetic, Item: Strideable, Item.Stride: SignedInteger {
mutating func insert(_ val: Item, _ n: Int) {
sumStorage = sumStorage ?? Item.zero
for _ in 0..<n { sumStorage! += val }
}
#discardableResult
mutating func delete(_ val: Item, _ n: Int) -> Item? {
sumStorage = sumStorage ?? Item.zero
for _ in Item.zero..<val { sumStorage! -= val }
return sumStorage
}
}
extension BSTree: ON1Speedable { }
var g2 = BSTree<Int>(sumStorage: 0)
g2.sumStorage
g2.insert(5, 5)
g2.sumStorage // 25
g2.delete(5, 5) // 0
var g3 = BSTree<String>()
g3.sumStorage // nil
//g3.insert("foo", 5) // Error: Referencing instance method 'insert' on 'ON1Speedable' requires that 'String.Stride' conform to 'SignedInteger'
g3.sumStorage // nil
//g3.delete("bar", 5) // Error: Referencing instance method 'delete' on 'ON1Speedable' requires that 'String.Stride' conform to 'SignedInteger'
Please take a look at the following code:
class A {
let a: String
let b: String
init(a: String, b: String) {
self.a = a
self.b = b
}
}
class B: A {
let c: Bool
private let aExpectedValue = "a"
private let bExpectedValue = "b"
override init(a: String, b: String) {
c = (a == aExpectedValue && b == bExpectedValue)
super.init(a: a, b: b)
}
}
This causes an error in B.init:
However, if I change it either to c = (a == aExpectedValue) or c = (b == bExpectedValue) then it compiles correctly.
Does anybody know why is that?
The problem is in bExpectedValue. That's an instance property on B. That interacts with the definition of && on Bool:
static func && (lhs: Bool, rhs: #autoclosure () throws -> Bool) rethrows -> Bool
The #autoclosure makes the b == bExpectedValue into a closure, capturing it as self.bExpectedValue. That's not allowed before initialization is complete. (The closure here is to allow short-circuiting. The rhs closure is not evaluated if lhs is false.)
This is pretty awkward (see SR-944 that MartinR references for a little discussion about it).
If bExpectedValue were static, or if it were moved outside the class definition, then this wouldn't be an issue. The following approach will also fix it:
override init(a: String, b: String) {
let goodA = a == aExpectedValue
let goodB = b == bExpectedValue
c = goodA && goodB
super.init(a: a, b: b)
}
You need to create a new initializer with another vars or call super.init(a:, b:) before any expression with this properties.
Call this:
override init(a: String, b: String) {
super.init(a: a, b: b)
c = (a == aExpectedValue && b == bExpectedValue)
}
or change it to:
init(newA: String, newB: String) {
c = (newA == aExpectedValue && newB == bExpectedValue)
super.init(a: newA, b: newB)
}
If I have a struct, I can implement ExpressibleByIntegerLiteral to implicitly create an instance of it from an integer literal.
struct MyStruct: ExpressibleByIntegerLiteral {
let value: Int
init(integerLiteral value: Int) {
self.value = value
}
}
let a: MyStruct = 1
Nice.
Is there a way to do the opposite i.e. implicitly convert from a MyStruct to an Int?
I know that I can implement an init method in an extension on Int to get explicit casting to work, like this:
extension Int {
init(_ myStruct: MyStruct) {
self = myStruct.value
}
}
let b = Int(a)
But is there anything I can do to get this line to compile:
let c: Int = a
The best way to do that in one line is to choose the value property:
let c: Int = a.value
Because you cannot overload the "=" you could check out that:
struct MyStruct: ExpressibleByIntegerLiteral {
let value: Int
init(integerLiteral value: Int) {
self.value = value
}
}
func += (inout left: Int, right: MyStruct) {
left = right.value
}
let a: MyStruct = 1;
var c:Int = 0;
c += a
Otherwise you have to create your custom ExpressibleByStructLiteral
I'm porting Java library to Swift 2.0 and have some troubles with generics.
I have following protocol hierarchy:
public protocol Graph {
typealias V: Hashable
typealias E: Hashable
func getAllEdges(sourceVertex: V, targetVertex: V) -> Set<E>?
func getEdge(sourceVertex: V, targetVertex: V) -> E?
func getEdgeFactory() -> EdgeFactory<V, E>?
func addEdge(sourceVertex: V, targetVertex: V) -> E?
func addEdge(sourceVertex: V, targetVertex: V, e: E) -> Bool
func addVertex(v: V) -> Bool
func containsEdge(sourceVertex: V, targetVertex: V) -> Bool
func containsEdge(e: E) -> Bool
func containsVertex(v: V) -> Bool
func edgeSet() -> Set<E>
func edgesOf(v: V) -> Set<E>
func removeAllEdges<T: CollectionType where T.Generator.Element == E>(edges: T) -> Bool
func removeAllEdges(sourceVertex: V, targetVertex: V) -> Set<E>?
func removeAllVertices<T: CollectionType where T.Generator.Element == V>(vertices: T) -> Bool
func removeEdge(sourceVertex: V, targetVertex: V)
func removeEdge(e: E) -> Bool
func removeVertex(v: V) -> Bool
func vertexSet() -> Set<V>
func getEdgeSource(e: E) -> V
func getEdgeTarget(e: E) -> V
func getEdgeWeight(e: E) -> Double
}
public protocol DirectedGraph: Graph {
func inDegreeOf(vertex: V) -> Int
func incomingEdgesOf(vertex: V) -> Set<E>
func outDegreeOf(vertex: V) -> Int
func outgoingEdgesOf(vertex: V) -> Set<E>
}
public protocol UndirectedGraph: Graph {
func degreeOf(vertex: V) -> Int
}
And here's definition of class which causes trouble:
public class CrossComponentIterator
<V: Hashable, E: Hashable, D, G: Graph
where G.V == V, G.E == E>
: AbstractGraphIterator<V, E>
Namely, it has method which should init one of its variables based on actual type of graph passed - DirectedGraph or UndirectedGraph.
I've tried solving this by declaring multiple versions of the function which does this:
func createGraphSpecifics<DG: Graph where DG: DirectedGraph, DG.V == V, DG.E == E>(graph: DG)
-> CrossComponentIteratorSpecifics<V, E>
{
return DirectedSpecifics<V, E, DG>(graph: graph)
}
func createGraphSpecifics<UG: Graph where UG: UndirectedGraph, UG.V == V, UG.E == E>(graph: UG)
-> CrossComponentIteratorSpecifics<V, E>
{
return UndirectedSpecifics<V, E, UG>(graph: graph)
}
func createGraphSpecifics<GG: Graph where GG.V == V, GG.E == E>(graph: GG)
-> CrossComponentIteratorSpecifics<V, E>
{
fatalError("Unknown graph type instance")
}
But unfortunately, only last version of function is called for any instance of graph (even if it conforms to "DirectedGraph" or "UndirectedGraph")
And I know, that probably I can solve this problem by converting protocols DirectedGraph and UndirectedGraph to abstract classes
(by that I mean classes with fatalError() in each declared function since Swift doesn't support abstract classes de jure).
But maybe there is another, more elegant and Swifty solution?
In Java this is trivial - conformance to interface is checked at runtime:
if (g instanceof DirectedGraph<?, ?>) {
return new DirectedSpecifics<V, E>((DirectedGraph<V, E>) g);
} else {
return new UndirectedSpecifics<V, E>(g);
}
Edit here's minimal code for what I'm trying to achieve:
protocol P {
// This typealias makes impossible to use 'P'
// (or its descendants) as a type.
// It can be used only as generic constraint.
typealias A
// myfunc is needed for compiler to infer 'A'
func myfunc(a: A)
}
protocol P1:P {
func p1specific(a: A)
}
protocol P2:P {
func p2specific(a: A)
}
struct S<T:P> {
init(t: T) {
// TODO: check if 't' conforms to 'P1', 'P2', both or neither
}
}
// Examples of concrete implementations of 'P1' and 'P2'
struct S1<X>:P1{
func myfunc(a: X) {}
func p1specific(a: X) {}
}
struct S2<X>:P2{
func myfunc(a: X) {}
func p2specific(a: X) {}
}
Is there any way to determine sub-protocol conformance for instance
conforming to generic protocol in Swift 2 (at runtime or during
compilation)?
Yes.
Here's my trick for achieving type erasure, hence allowing one to utilize the runtime. Observe _P:
protocol _P {
static var _A: Any.Type { get }
func _myfunc(_a: Any) -> Void?
}
extension _P where Self: P {
static var _A: Any.Type {
return A.self
}
func _myfunc(_a: Any) -> Void? {
return (_a as? A).map(myfunc)
}
}
protocol P {
typealias A
func myfunc(a: A)
}
protocol _P1:_P {
func _p1specific(_a: Any) -> Void?
}
extension _P1 where Self: P1 {
func _p1specific(_a: Any) -> Void? {
return (_a as? A).map(p1specific)
}
}
protocol P1:_P1, P {
func p1specific(a: A)
}
protocol _P2:_P {
func _p2specific(_a: Any) -> Void?
}
extension _P2 where Self: P2 {
func _p2specific(_a: Any) -> Void? {
return (_a as? A).map(p2specific)
}
}
protocol P2:_P2, P {
func p2specific(a: A)
}
You can now determine whether a value conforms to P1 or P2, and force cast accordingly. Additionally, the generic parameter A is now available via the opaque Any.Type.
(x as? _P1) != nil ? true : false
import XCPlayground
import Foundation
protocol P {}
protocol P1:P {}
protocol P2:P {}
struct S1:P1{}
struct S2:P2{}
struct S<T:P> {
var p1: P1?
var p2: P2?
init(t: T) {
p1 = t as? P1
p2 = t as? P2
}
}
let p1 = S1()
let p2 = S2()
let s1 = S(t: p1)
let s2 = S(t: p2)
dump(s1)
dump(s2)
/*
▿ S<S1>
▿ p1: S1
- Some: S1
- p2: nil
▿ S<S2>
- p1: nil
▿ p2: S2
- Some: S2
*/
use
g is Type // trur or false
and
let v2 = v1 as? Type // v2 = v2 or nil
in swift
update
protocol P {
typealias A
}
protocol P1:P {}
protocol P2:P {}
struct S1:P1{
typealias A = Int
}
struct S2:P2{
typealias A = Double
}
struct S<T:P> {
var p1: S1?
var p2: S2?
init(t: T) {
p1 = t as? S1
p2 = t as? S2
}
}
let p1 = S1()
let p2 = S2()
let s1 = S(t: p1)
let s2 = S(t: p2)
.....
protocol P {
// This typealias makes impossible to use 'P'
// (or its descendants) as a type.
// It can be used only as generic constraint.
typealias A
// myfunc is needed for compiler to infer 'A'
func myfunc(a: A)
}
protocol P1:P {}
protocol P2:P {}
// aka 'abstract' conforming to P1
struct S1:P1{
typealias A = AnyObject
func myfunc(a: A) {}
}
// aka 'abstract' conforming to P2
struct S2:P2{
typealias A = Int
func myfunc(a: A) {}
}
// generic struct with type conforming to P
struct S<T:P> {
init(t: T) {
// TODO: check if 't' conforms to 'P1', 'P2', both or neither
if t is S1 {
print("t conforms to P1, because it is type S1")
}
if t is S2 {
print("t conforms to P2, besause it is type S2")
}
}
}
let s1 = S(t: S1()) // t conforms to P1, because it is type S1
let s2 = S(t: S2()) // t conforms to P2, besause it is type S2
// WARNING !!!!!!
// var s = s1
// s = s2 // error: cannot assign value of type 'S<S2>' to type 'S<S1>'