Swift how to make a set with multiplicity (multiset) - swift

I am substracting two Array of Integers. I did the substraction using sets :
let numbersA = [1, 2, 3]
let numbersB = [3, 4, 5]
Set(numbersA).subtracting(numbersB)
but I then realized that I have multiplicity in my arrays, and I should take it into account. What is the data structure for multisets in Swift?

In swift 5, there is no Multiset implementation in the core library.
You can reproduce the Multiset behavior by using a dictionary [Element:Multiplicity].
Your code will be:
let numbersA = [1, 2, 3, 3]
let numbersB = [3, 4, 5]
Set(numbersA).subtracting(numbersB) // [1, 2, 3]
As pointed out by Leo Dabus in comment, this gives you an unordered collection as a result.
You can find a good tutorial on multisets at https://github.com/raywenderlich/swift-algorithm-club/tree/master/Multiset
Here is a ready to use implementation of multiset. I added to it the substracting implementation. Notice it can be considered an incomplet implementation of a Multiset since, for exemple, it doesn't extends the Collection protocol.
//
// Multiset.swift
// Multiset
//
// Created by Simon Whitaker on 28/08/2017.
//
// Extended by Jeremy Cochoy on 17/11/2019
import Foundation
public struct Multiset<T: Hashable> {
private var storage: [T: UInt] = [:]
public private(set) var count: UInt = 0
public init() {}
public init<C: Collection>(_ collection: C) where C.Element == T {
for element in collection {
self.add(element)
}
}
public mutating func add (_ elem: T) {
storage[elem, default: 0] += 1
count += 1
}
public mutating func remove (_ elem: T) {
if let currentCount = storage[elem] {
if currentCount > 1 {
storage[elem] = currentCount - 1
} else {
storage.removeValue(forKey: elem)
}
count -= 1
}
}
public func isSubSet (of superset: Multiset<T>) -> Bool {
for (key, count) in storage {
let supersetcount = superset.storage[key] ?? 0
if count > supersetcount {
return false
}
}
return true
}
public func count(for key: T) -> UInt {
return storage[key] ?? 0
}
public var allItems: [T] {
var result = [T]()
for (key, count) in storage {
for _ in 0 ..< count {
result.append(key)
}
}
return result
}
public func subtracting(_ elems: [T]) -> Multiset<T> {
var resultSet = self
elems.forEach { resultSet.remove($0) }
return resultSet
}
}
// MARK: - Equatable
extension Multiset: Equatable {
public static func == (lhs: Multiset<T>, rhs: Multiset<T>) -> Bool {
if lhs.storage.count != rhs.storage.count {
return false
}
for (lkey, lcount) in lhs.storage {
let rcount = rhs.storage[lkey] ?? 0
if lcount != rcount {
return false
}
}
return true
}
}
// MARK: - ExpressibleByArrayLiteral
extension Multiset: ExpressibleByArrayLiteral {
public init(arrayLiteral elements: T...) {
self.init(elements)
}
}

As you say, swift does not have Multiset as a native type.
I implemented a fairly thorough implementation of Multiset in Swift here:
https://github.com/Cortado-J/Multiset
and full documentation of that library with usage examples here:
https://cortado-j.github.io/Multiset/Structs/Multiset.html

Related

How can I implement a CountedSet (NSCountedSet) in Swift?

Create a generic CountedSet struct that is constrained to Hashable elements. A counted set is an unordered collection of unique elements that may appear more than once in the collection. Use a private dictionary as your backing storage for set members and their counts.
struct CountedSet<Element> {
private(set) var elements: [Element]
mutating func insert(_ element: Element) {
elements.append(element)
}
mutating func remove() -> Element? {
guard elements.isEmpty == false else { return nil}
return elements.removeFirst()
}
subscript(_ member: Element) -> Int {
return 0
}
}
I don't understand what the real objective is here. The instructions are very confusing at least to me.
1) Make your generic struct element conform to Hashable, this is necessary because the dictionary keys are required to conform to Hashable.
struct CountedSet<Element: Hashable>
2) The backing storage you have used is an ordered array, not a dictionary and you need to initialize it with an empty one.
private(set) var elements: [Element: Int] = [:]
3) Your subscript method you need to return the count for the counted set member or zero if it is nil.
return elements[member] ?? 0
4) Your Insert and Remove methods need to first check the count of a member in the backing dictionary before adding or removing an element from it.
So your CountedSet should look like this:
struct CountedSet<Element: Hashable> {
private(set) var elements: [Element: Int] = [:]
mutating func insert(_ member: Element) {
elements[member, default: 0] += 1
}
mutating func remove(_ member: Element) -> Element? {
guard var count = elements[member], count > 0 else { return nil }
count -= 1
elements[member] = count == 0 ? nil : count
return member
}
subscript(_ member: Element) -> Int {
elements[member] ?? 0
}
}
var countedSet = CountedSet<Int>()
countedSet.insert(3)
countedSet.insert(3)
countedSet.insert(4)
countedSet.elements // [4: 1, 3: 2]
countedSet.remove(4)
countedSet.elements // [3: 2]
countedSet.remove(4) // nil
Expanding on that you can also make your CountedSet conform to ExpressibleByArrayLiteral to allow you to initialize your CountedSet with an array and CustomStringConvertible to allow you to print its elements:
extension CountedSet: ExpressibleByArrayLiteral, CustomStringConvertible {
typealias ArrayLiteralElement = Element
init<S: Sequence>(_ sequence: S) where S.Element == Element {
self.elements = sequence.reduce(into: [:]) { $0[$1, default: 0] += 1 }
}
init(arrayLiteral elements: Element...) { self.init(elements) }
var description: String { .init(describing: elements) }
}
var countedSet: CountedSet = [1,2,2,3,3,3,4,4,5,5,5]
print(countedSet) // "[5: 3, 2: 2, 3: 3, 4: 2, 1: 1]\n"

Adopting CollectionType (Collection) in Swift

I'm writing a graphics library to display data in a graph. Since most of the projects I do tend to have a large learning component in them, I decided to create a generically typed struct to manage my data set DataSet<T: Plottable> (note here that Plottable is also Comparable).
In trying to conform to MutableCollectionType, I've run across an error. I'd like to use the default implementation of sort(), but the compiler is giving the following error when trying to use the sorting function.
Ambiguous reference to member 'sort()'
Here's a code example:
var data = DataSet<Int>(elements: [1,2,3,4])
data.sort() //Ambiguous reference to member 'sort()'
The compiler suggests two candidates, but will not actually display them to me. Note that the compiler error goes away if I explicitly implement sort() on my struct.
But the bigger question remains for me. What am I not seeing that I expect the default implementation to be providing? Or am I running across a bug in Swift 3 (this rarely is the case... usually I have overlooked something).
Here's the balance of the struct:
struct DataSet<T: Plottable>: MutableCollection, BidirectionalCollection {
typealias Element = T
typealias Iterator = DataSetIterator<T>
typealias Index = Int
/**
The list of elements in the data set. Private.
*/
private var elements: [Element] = []
/**
Initalize the data set with an array of data.
*/
init(elements data: [T] = []) {
self.elements = data
}
//MARK: Sequence Protocol
func makeIterator() -> DataSetIterator<T> {
return DataSetIterator(self)
}
//MARK: Collection Protocol
subscript(_ index:DataSet<T>.Index) -> DataSet<T>.Iterator.Element {
set {
elements[index] = newValue
}
get {
return elements[index]
}
}
subscript(_ inRange:Range<DataSet<T>.Index>) -> DataSet<T> {
set {
elements.replaceSubrange(inRange, with: newValue)
}
get {
return DataSet<T>(elements: Array(elements[inRange]))
}
}
//required index for MutableCollection and BidirectionalCollection
var endIndex: Int {
return elements.count
}
var startIndex: Int {
return 0
}
func index(after i: Int) -> Int {
return i+1
}
func index(before i: Int) -> Int {
return i-1
}
mutating func append(_ newElement: T) {
elements.append(newElement)
}
// /**
// Sorts the elements of the DataSet from lowest value to highest value.
// Commented because I'd like to use the default implementation.
// - note: This is equivalent to calling `sort(by: { $0 < $1 })`
// */
// mutating func sort() {
// self.sort(by: { $0 < $1 })
// }
//
// /**
// Sorts the elements of the DataSet by an abritrary block.
// */
// mutating func sort(by areInIncreasingOrder: #noescape (T, T) -> Bool) {
// self.elements = self.elements.sorted(by: areInIncreasingOrder)
// }
/**
Returns a `DataSet<T>` with the elements sorted by a provided block.
This is the default implementation `sort()` modified to return `DataSet<T>` rather than `Array<T>`.
- returns: A sorted `DataSet<T>` by the provided block.
*/
func sorted(by areInIncreasingOrder: #noescape (T, T) -> Bool) -> DataSet<T> {
return DataSet<T>(elements: self.elements.sorted(by: areInIncreasingOrder))
}
func sorted() -> DataSet<T> {
return self.sorted(by: { $0 < $1 })
}
}
Your DataSet is a BidirectionalCollection. The sort() you're trying to use requires a RandomAccessCollection. The most important thing you need to add is an Indicies typealias.
typealias Indices = Array<Element>.Indices
Here's my version of your type:
protocol Plottable: Comparable {}
extension Int: Plottable {}
struct DataSet<Element: Plottable>: MutableCollection, RandomAccessCollection {
private var elements: [Element] = []
typealias Indices = Array<Element>.Indices
init(elements data: [Element] = []) {
self.elements = data
}
var startIndex: Int {
return elements.startIndex
}
var endIndex: Int {
return elements.endIndex
}
func index(after i: Int) -> Int {
return elements.index(after: i)
}
func index(before i: Int) -> Int {
return elements.index(before: i)
}
subscript(position: Int) -> Element {
get {
return elements[position]
}
set {
elements[position] = newValue
}
}
subscript(bounds: Range<Int>) -> DataSet<Element> {
get {
return DataSet(elements: Array(elements[bounds]))
}
set {
elements[bounds] = ArraySlice(newValue.elements)
}
}
}
var data = DataSet(elements: [4,2,3,1])
data.sort()
print(data.elements) // [1,2,3,4]
You don't actually need an Iterator if you don't want one. Swift will give you Sequence automatically if you implement Collection.

Conforming a new protocol to Sequence with a default makeIterator() implementation

I made a (very basic) BinaryTree protocol:
public enum BinaryTreeChildSide {
case left, right
}
public protocol BinaryTree {
associatedtype Element
associatedtype Index
func child(of index: Index, side: BinaryTreeChildSide) -> Index?
var rootIndex: Index? { get }
subscript(position: Index) -> Element { get }
}
For a basic iterative in-order traversal, I made a BinaryTreeIterator (note that I don't implement Sequence just yet):
public extension BinaryTree {
func makeIterator() -> BinaryTreeIterator<Self> {
return BinaryTreeIterator(self)
}
}
public struct BinaryTreeIterator<Tree: BinaryTree>: IteratorProtocol {
private let tree: Tree
private var stack: [Tree.Index]
private var index: Tree.Index?
private init(_ tree: Tree) {
self.tree = tree
stack = []
index = tree.rootIndex
}
public mutating func next() -> Tree.Element? {
while let theIndex = index {
stack.append(theIndex)
index = tree.child(of: theIndex, side: .left)
}
guard let currentIndex = stack.popLast() else { return nil }
defer { index = tree.child(of: currentIndex, side: .right) }
return tree[currentIndex]
}
}
Implementing a binary heap to this protocol is also pretty straight-forward:
public struct BinaryHeap<Element> {
private var elements: [Element]
public init(_ elements: [Element]) {
self.elements = elements
}
}
extension BinaryHeap: BinaryTree {
private func safeIndexOrNil(_ index: Int) -> Int? {
return elements.indices.contains(index) ? index : nil
}
public func child(of index: Int, side: BinaryTreeChildSide) -> Int? {
switch side {
case .left: return safeIndexOrNil(index * 2 + 1)
case .right: return safeIndexOrNil(index * 2 + 2)
}
}
public var rootIndex: Int? { return safeIndexOrNil(0) }
public subscript(position: Int) -> Element {
return elements[position]
}
}
So far, so good. I can now make a simple heap and iterate through its elements:
let heap = BinaryHeap([4, 2, 6, 1, 3, 5, 7])
var iterator = heap.makeIterator()
while let next = iterator.next() {
print(next, terminator: " ")
}
// 1 2 3 4 5 6 7
This works, but of course the goal of implementing makeIterator() is to conform to Sequence. however, if I replace
public protocol BinaryTree {
with
public protocol BinaryTree: Sequence {
then the compiler complains that BinaryHeap doesn't implement Sequence because the associated type Iterator couldn't be inferred. If I manually specify the Iterator type with
extension BinaryHeap: BinaryTree {
typealias Iterator = BinaryTreeIterator<BinaryHeap>
...
}
then the compiler shows an error that Iterator circularly references itself. So that might be why the Iterator type couldn't be inferred.
Interestingly, it works if I wrap my custom BinaryTreeIterator in an AnyIterator instance:
public extension BinaryTree {
func makeIterator() -> AnyIterator<Element> {
return AnyIterator(BinaryTreeIterator(self))
}
}
let heap = BinaryHeap([4, 2, 6, 1, 3, 5, 7])
for number in heap {
print(number, terminator: " ")
}
// 1 2 3 4 5 6 7
Apple's own IndexingIterator seems to work in a similar fashion to my BinaryTreeIterator:
public struct IndexingIterator<
Elements : IndexableBase
// FIXME(compiler limitation):
// Elements : Collection
> : IteratorProtocol, Sequence {
...
}
From the source code. Maybe the problem I'm facing might also be because of the compiler limitation mentioned there, but I don't know for sure.
Is there a way to conform BinaryTree to Sequence without using AnyIterator?
This is the farthest I could take it. Now the compiler will still complain about the heap not containing any makeIterator member
(which I thought was included by default once someone conforms to sequence—wrong turns out one must conform to Sequence ​and​ IteratorProtocol for the default implementation) and having a next—but once you add those methods its smooth sailing.
So makeIterator + next method to make Mr/Mrs/Preferred Gender Pronoun compiler happy.
public enum BinaryTreeChildSide {
case left, right
}
public struct BinaryTreeIterator<Tree: BinaryTree>: Sequence, IteratorProtocol {
private let tree: Tree
private var stack: [Tree.Index]
private var index: Tree.Index?
private init(_ tree: Tree) {
self.tree = tree
stack = []
index = tree.rootIndex
}
public mutating func next() -> Tree.Element? {
while let theIndex = index {
stack.append(theIndex)
index = tree.child(of: theIndex, side: .left)
}
guard let currentIndex = stack.popLast() else { return nil }
defer { index = tree.child(of: currentIndex, side: .right) }
return tree[currentIndex]
}
}
public protocol BinaryTree: Sequence {
associatedtype Element
associatedtype Index
func child(of index: Index, side: BinaryTreeChildSide) -> Index?
var rootIndex: Index? { get }
subscript(position: Index) -> Element { get }
}
extension BinaryTree {
func makeIterator() -> BinaryTreeIterator<Self> {
return BinaryTreeIterator(self)
}
}
extension BinaryHeap {
private func safeIndexOrNil(_ index: Int) -> Int? {
return elements.indices.contains(index) ? index : nil
}
public func child(of index: Int, side: BinaryTreeChildSide) -> Int? {
switch side {
case .left: return safeIndexOrNil(index * 2 + 1)
case .right: return safeIndexOrNil(index * 2 + 2)
}
}
public var rootIndex: Int? { return safeIndexOrNil(0) }
public subscript(position: Int) -> Element {
return elements[position]
}
}
public struct BinaryHeap<Element> {
private var elements: [Element]
public init(_ elements: [Element]) {
self.elements = elements
}
}
let heap = BinaryHeap([4, 2, 6, 1, 3, 5, 7])
var iterator = heap.makeIterator()
while let next = iterator.next() {
print(next, terminator: " ")
}
Apparently it was a Swift bug: my code compiles fine using Swift 3.1.

Hashing problems using a wrapper class around NSUUID as the key

** REWRITE **
OK, it turns out I'm really asking a different question. I understand about hashValue and ==, so that's not relevant.
I would like my wrapper class BUUID to "do the right thing" and act just like NSUUID's act in a Dictionary.
See below, where they don't.
import Foundation
class BUUID: NSObject {
init?(str: String) {
if let uuid = NSUUID(UUIDString: str) {
_realUUID = uuid
}
else {
return nil
}
}
override init() {
_realUUID = NSUUID()
}
private var _realUUID: NSUUID
override var description: String { get { return _realUUID.UUIDString } }
override var hashValue: Int { get { return _realUUID.hashValue } }
var UUIDString: String { get { print("WARNING Use description or .str instead"); return _realUUID.UUIDString } }
var str: String { get { return _realUUID.UUIDString } }
}
func ==(lhs: BUUID, rhs: BUUID) -> Bool { return lhs._realUUID == rhs._realUUID }
let a = BUUID()
let b = BUUID(str: a.str)!
print("a: \(a)\nb: \(b)")
print("a === b: \(a === b)")
print("a == b: \(a == b)")
var d = [a: "Hi"]
print("\(d[a]) \(d[b])")
let nA = NSUUID()
let nB = NSUUID(UUIDString: nA.UUIDString)!
print("na: \(nA)\nnB: \(nB)")
print("nA === nB: \(nA === nB)")
print("nA == nB: \(nA == nB)")
var nD = [nA: "Hi"]
print("\(nD[nA]) \(nD[nB])")
Results. Note that I can look up using NSUUID nB and get back what I put under nA. Not so with my BUUID.
a: 9DE6FE91-D4B5-4A6B-B912-5AAF34DB41C8
b: 9DE6FE91-D4B5-4A6B-B912-5AAF34DB41C8
a === b: false
a == b: true
Optional("Hi") nil
nA: <__NSConcreteUUID 0x7fa193c39500> BB9F9851-93CF-4263-B98A-5015810E4286
nB: <__NSConcreteUUID 0x7fa193c37dd0> BB9F9851-93CF-4263-B98A-5015810E4286
nA === nB: false
nA == nB: true
Optional("Hi") Optional("Hi")
Inheriting from NSObject also assumes isEqual(object: AnyObject?) -> Bool method overloading:
import Foundation
class BUUID: NSObject {
init?(str: String) {
if let uuid = NSUUID(UUIDString: str) {
_realUUID = uuid
}
else {
return nil
}
}
override init() {
_realUUID = NSUUID()
}
private var _realUUID: NSUUID
override func isEqual(object: AnyObject?) -> Bool {
guard let buuid = object as? BUUID else {
return false
}
return buuid._realUUID == _realUUID
}
override var description: String { get { return _realUUID.UUIDString } }
override var hashValue: Int { get { return _realUUID.hashValue } }
var UUIDString: String { get { print("WARNING Use description or .str instead"); return _realUUID.UUIDString } }
var str: String { get { return _realUUID.UUIDString } }
}
func ==(lhs: BUUID, rhs: BUUID) -> Bool { return lhs._realUUID == rhs._realUUID }
let a = BUUID()
let b = BUUID(str: a.str)!
print("a: \(a)\nb: \(b)")
print("a === b: \(a === b)")
print("a == b: \(a == b)")
var d = [a: "Hi"]
print("\(d[a]) \(d[b])")
let nA = NSUUID()
let nB = NSUUID(UUIDString: nA.UUIDString)!
print("na: \(nA)\nnB: \(nB)")
print("nA === nB: \(nA === nB)")
print("nA == nB: \(nA == nB)")
var nD = [nA: "Hi"]
print("\(nD[nA]) \(nD[nB])")
So the answer is to not make BUUID inherit from NSObject, which undercuts the Swiftiness of overriding ==.
So:
extension BUUID: Hashable {}
class BUUID: CustomStringConvertible {
// take away all 'override' keywords, nothing to override
// otherwise same as above
}
Interesting!
This answer is relevant to initially asked question: Why that's possible to get two key-value pairs with identical key's hashes in a dictionary
This example illustrates that keys in Dictionary can have identical hashes, but equality operation should return false for different keys:
func ==(lhs: FooKey, rhs: FooKey) -> Bool {
return unsafeAddressOf(lhs) == unsafeAddressOf(rhs)
}
class FooKey: Hashable, Equatable {
var hashValue: Int {
get {
return 123
}
}
}
var d = Dictionary<FooKey, String>()
let key1 = FooKey()
let key2 = FooKey()
d[key1] = "value1"
d[key2] = "value2"
Output
[FooKey: "value1", FooKey: "value2"]
That's definitely not good to have all keys with the same hash. In this case we are getting that worst case when search element complexity fells down to O(n) (exhaustive search). But it will work.

Swift for-in loop with enumerate on custom Array2D class?

How would I go about implementing a custom enumerate function that makes something like this work (Swift 2):
for ((column, row), item) in Array2D.enumerate() { ... }
In my simple Array2D struct:
struct Array2D<T> : SequenceType {
let columns: Int
let rows: Int
private var array: Array<T?>
init(columns: Int, rows: Int) {
self.columns = columns
self.rows = rows
array = Array(count: rows*columns, repeatedValue: nil)
}
subscript(column: Int, row: Int) -> T? {
get {
return array[columns*row + column]
}
set {
array[columns*row + column] = newValue
}
}
func generate() -> AnyGenerator<T?> {
var column = 0
var row = 0
return anyGenerator() {
guard row < self.rows else {
return nil
}
let item = self[column, row]
if ++column == self.columns {
column = 0
++row
}
return item
}
}
}
I couldn't find any good explanation on implementing an enumerate function in Swift
The enumerate() function in Swift returns integers starting from 0 for the first part of its tuple. Those have nothing to do with the sequence you're enumerating over. So, for instance, this won't work:
let word = "hello".characters
for (index, letter) in word.enumerate() {
print(word[index])
}
Because the indices of a characterView are String.Indexs.
So there are several ways to get what you're going for. The first is to just overload enumerate() for your struct. Again, there are a few days you could do this. First off, how about a function that uses your own generator, and uses its own logic to figure out the coordinates. This could work:
func enumerate() -> AnyGenerator<((Int, Int), T?)> {
let g = self.generate()
var coord = -1
return anyGenerator {
g.next().map { ((++coord % self.columns, coord / self.columns), $0) }
}
}
But you're duplicating code there, especially from your generate method. Seeing you're already using coordinates to return each element, why not just have your enumerate method be the default, and your generate method call on that. Something like this:
// Original generate method, now returns the coords it used
func enumerate() -> AnyGenerator<((Int, Int), T?)> {
var column = 0
var row = 0
return anyGenerator() {
guard row < self.rows else {
return nil
}
let item = self[column, row]
if ++column == self.columns {
column = 0
++row
}
return ((column, row), item)
}
}
// uses enumerate, ignores coords
func generate() -> AnyGenerator<T?> {
let g = self.enumerate()
return anyGenerator {
g.next().map { $1 }
}
}
If you wanted to go a little overboard, you could write an enumerate function that enumerates the specific indices of its base. Call it specEnumerate:
public struct SpecEnumerateGen<Base : CollectionType> : GeneratorType {
private var eG: Base.Generator
private let sI: Base.Index
private var i : Base.Index?
public mutating func next() -> (Base.Index, Base.Generator.Element)? {
i?._successorInPlace() ?? {self.i = self.sI}()
return eG.next().map { (i!, $0) }
}
private init(g: Base.Generator, i: Base.Index) {
self.eG = g
self.sI = i
self.i = nil
}
}
public struct SpecEnumerateSeq<Base : CollectionType> : SequenceType {
private let col: Base
public func generate() -> SpecEnumerateGen<Base> {
return SpecEnumerateGen(g: col.generate(), i: col.startIndex)
}
}
public extension CollectionType {
func specEnumerate() -> SpecEnumerateSeq<Self> {
return SpecEnumerateSeq(col: self)
}
}
With this function, this would work:
let word = "hello".characters
for (index, letter) in word.specEnumerate() {
print(word[index])
}
But your matrix struct is still a SequenceType, with no specific indices. For that, you'll have to implement your own MatrixIndex:
public struct MatrixIndex: BidirectionalIndexType {
public let x, y : Int
private let columns: Int
public func successor() -> MatrixIndex {
return (x + 1 == columns) ?
MatrixIndex(x: 0, y: y + 1, columns: columns) :
MatrixIndex(x: x + 1, y: y, columns: columns)
}
public func predecessor() -> MatrixIndex {
return (x == 0) ?
MatrixIndex(x: columns - 1, y: y - 1, columns: columns) :
MatrixIndex(x: x - 1, y: y, columns: columns)
}
}
public func == (lhs: MatrixIndex, rhs: MatrixIndex) -> Bool {
return lhs.x == rhs.x && lhs.y == rhs.y
}
extension MatrixIndex : CustomDebugStringConvertible {
public var debugDescription: String {
return "\(x), \(y)"
}
}
extension MatrixIndex: RandomAccessIndexType {
public func advancedBy(n: Int) -> MatrixIndex {
let total = (y * columns) + x + n
return MatrixIndex(x: total % columns, y: total / columns, columns: columns)
}
public func distanceTo(other: MatrixIndex) -> Int {
return (other.x - x) + (other.y - y) * columns
}
}
Right. Now you'll need another matrix struct:
public struct Matrix2D<T> : MutableCollectionType {
public var contents: [[T]]
public subscript(index: MatrixIndex) -> T {
get {
return contents[index.y][index.x]
} set {
self.contents[index.y][index.x] = newValue
}
}
public var count: Int { return contents[0].count * contents.count }
public var startIndex: MatrixIndex {
return MatrixIndex(x: 0, y: 0, columns: contents[0].count)
}
public var endIndex: MatrixIndex {
return MatrixIndex(x: 0, y: contents.endIndex, columns: contents[0].count)
}
}
Right. So now, after all of that, this works:
let myMatrix = Matrix2D(contents: [[1, 2], [3, 4]])
for (coordinate, value) in myMatrix.specEnumerate() {
value == myMatrix[coordinate] // True every time
}
It might suffice defining your own enumerate taking advantage of the one you already have:
func enumerate() -> AnyGenerator<((Int, Int), T?)> {
var index = 0
var g = array.generate()
return anyGenerator() {
if let item = g.next() {
let column = index % self.columns
let row = index / self.columns
++index
return ((column, row) , item)
}
return nil
}
}
Notice in this case you could avoid conforming to SequenceType since I use generate from the private array. Anyway it could be consistent to do so.
Here is how then you could use it:
var a2d = Array2D<Int>(columns: 2, rows: 4)
a2d[0,1] = 4
for ((column, row), item) in a2d.enumerate() {
print ("[\(column) : \(row)] = \(item)")
}
Hope this helps