Is there a better way to have a Swift array with names associated to index numbers? - swift

In Swift, I want to have an array of items and then to be able to access those items by name or by index. The array is a fixed size array with 3 elements.
I have the following implementation using a struct
struct Boxes {
var array: [Int]!
var a: Int {
get { return array[0] }
set { array[0] = newValue }
}
var b: Int {
get { return array[1] }
set { array[1] = newValue }
}
var c: Int {
get { return array[2] }
set { array[2] = newValue }
}
init() {
self.array = Array<Int>(repeating: 0, count: 3)
}
}
And then, use it as follows:
var box = Boxes()
box.a = 1
box.b = box.array[0]
box.array[0] = 2
box.b = box.a
Is there a simpler, more elegant/concise form to do this?

struct Boxes {
var array: [Int] {
get { return [a, b, c] }
set { (a, b, c) = (newValue[0], newValue[1], newValue[2]) }
}
var (a, b, c) = (0, 0 ,0)
}
Note that it is based on this sentence: fixed size array with 3 elements
Exact same usage:
var box = Boxes()
box.a = 1
box.b = box.array[0]
box.array[0] = 2
box.b = box.a
Extra:
You can add custom initializer if you want to init with an array:
init(array: [Int] = [0, 0, 0]) {
self.array = array
}

How about using an enum to store name-to-index associations, and add a subscript to Boxes to access the array:
enum BoxName : Int {
case a, b, c
}
struct Boxes {
var array: [Int]!
subscript(_ name: BoxName) -> Int {
get { return array[name.rawValue] }
set { array[name.rawValue] = newValue }
}
init() {
self.array = Array<Int>(repeating: 0, count: 3)
}
}
// usage:
var box = Boxes()
box[.a] = 1
box[.b] = box.array[0]
box.array[0] = 2
box[.b] = box[.a]
Now when you want to add a new element and a new name in the array, you just need to add a new enum case.

Related

Union-find: largest component size by common factor algorithm gives different results on every run

I was practicing data structure algorithm and made a Union - Find solution for the question.
The problem is, I think the code seems ok, but when I run it on Xcode playground, it shows different answers for the same input.
For example, I put an array [4, 6, 15, 35] in the function largestComponentSize, then it shows 2, 3, or 4 as the answer. I don't understand what's happening behind.
class Solution {
var uf = UnionFind()
func largestComponentSize(_ nums: [Int]) -> Int {
var maxNum:Int = 0
var numFactorMap = [Int:Int]()
var factorAdded = Set<Int>()
for num in nums {
var pFactors = getPrimeFactors(num)
numFactorMap[num] = pFactors[0]
for (i, val) in pFactors.enumerated() {
if !factorAdded.contains(val) {
uf.addSet(val)
factorAdded.insert(val)
}
if i > 0 {
uf.union(pFactors[i-1], val)
}
}
}
var groupCountMap = [Int:Int]()
for num in nums {
var groupId = uf.find(numFactorMap[num]!)!
if groupCountMap.keys.contains(groupId) {
groupCountMap[groupId]! += 1
} else {
groupCountMap[groupId] = 1
}
maxNum = max(maxNum, groupCountMap[groupId]!)
}
return maxNum
}
func getPrimeFactors(_ num: Int) -> [Int] {
var ans:Set<Int> = []
if num == 1 {
return []
}
var crrNum = num
var deno = 2
while crrNum >= deno {
if crrNum % deno == 0 {
ans.insert(deno)
crrNum = crrNum / deno
} else {
deno = deno + 1
}
}
return Array(ans)
}
class UnionFind {
var index = [Int: Int]()
var parent: [Int]
var size: [Int]
init() {
parent = []
size = []
}
func addSet(_ ele: Int) {
index[ele] = parent.count
parent.append(parent.count)
size.append(1)
}
func getSetSize(_ ele: Int) -> Int {
if let found = find(ele) {
return size[found]
}
return 0
}
func find(_ ele: Int) -> Int? {
if let indexOfEle = index[ele] {
if parent[indexOfEle] == indexOfEle {
return indexOfEle
} else {
if let found = find(parent[indexOfEle]) {
parent[indexOfEle] = found
}
return parent[indexOfEle]
}
} else {
return nil //never come here
}
}
func union(_ first: Int, _ second: Int) {
guard let indexOfFirst = index[first], let indexOfSecond = index[second] else {
return
}
if parent[indexOfFirst] == parent[indexOfSecond] {
return
}
var indexOfLarger = indexOfFirst
var indexOfSmaller = indexOfSecond
if size[indexOfFirst] < size[indexOfSecond] {
indexOfLarger = indexOfSecond
indexOfSmaller = indexOfFirst
}
parent[indexOfSmaller] = indexOfLarger
size[indexOfLarger] += size[indexOfSmaller]
return
}
}
}
var sol = Solution()
var nums = [4, 6, 15, 35]
var ans = sol.largestComponentSize(nums)
Thank you for your help in advance!
I just tried it on Xcode playground.

Set does not work despite copying from Developer Documentation

Whilst attempting to create a set, I get the error stating "Cannot convert value of type '[Int]' to specified type 'Set'". This occurs even though I write the code exactly as in the DD: https://developer.apple.com/documentation/swift/set
var intSet2 : Set = [2, 3, 5, 7]
// Cannot convert value of type '[Int]' to specified type 'Set'
I have now switched to 'NSSet' (which I found by accidence) and now it appears to work. How come using normal 'Set' does not work? When I go to the DD of NSSet it says: "An object representing a static, unordered, uniquing collection, for use instead of a Set constant in cases that require reference semantics." Is this about reference types? The DD also states that arrays are value types. I'm at a loss as to when to use Set or NSSet (or even NSMutableSet for that manner).
var intSet2 : NSSet = [2, 3, 5, 7]
// Works
Lastly, when I try to convert an array of colors with type [CardColor] to a Set or NSset, I receive error message stating: "Cannot convert value of type '[CardColor]' to specified type 'NSSet'.
let colorCheck = selectedCards.map { $0.color }
var colorCheckSet : NSSet = colorCheck
// Cannot convert value of type '[CardColor]' to specified type 'NSSet'
Thanks for your help in advance.
Set model:
import Foundation
class Set {
// MARK: properties
var deck = [Card]()
var tableCards = [Card]()
var matchedCards = [Card]()
var selectedCards: [Card] {
get {
var cards = [Card]()
for card in tableCards.indices {
if tableCards[card].isSelected == true {
cards.append(tableCards[card])
}
}
return cards
}
}
var unmatchedCards = 12
var score = 0
// MARK: functions
// Selects the card. If this is the third card to be selected, proceeds to check for matches
func selectCard(at index: Int) {
if tableCards[index].isSelected == false {
if selectedCards.count < 3 {
tableCards[index].isSelected = true
if selectedCards.count == 3 {
checkIfCardsMatch()
}
}
}
else if tableCards[index].isSelected == true {
tableCards[index].isSelected = false
}
}
func checkIfCardsMatch() {
let colorCheck = selectedCards.map { $0.color }
var colorCheckSet : NSSet = colorCheck
// Cannot convert value of type '[CardColor]' to specified type 'NSSet'
var intSet2 : Set = [2, 3, 5, 7]
// Cannot convert value of type '[Int]' to specified type 'Set'
// for item in colorCheck {
// colorCheckSet.insert()
// }
// let symbolCheck: Set = selectedCards.map() { $0.symbol }
// let numberCheck: Set = selectedCards.map() { $0.number }
// let shadingCheck: Set = selectedCards.map() { $0.shading }
}
// MARK: functions
func dealThreeMoreCards() {
if unmatchedCards <= 21 {
unmatchedCards += 3
}
print(unmatchedCards)
}
//MARK: initialization
init() {
for cardcolor in CardColor.allCases {
for cardsymbol in CardSymbol.allCases {
for cardnumber in CardNumber.allCases {
for cardshading in CardShading.allCases {
let card = Card(initcolor: cardcolor, initsymbol: cardsymbol, initnumber: cardnumber, initshading: cardshading)
deck.append(card)
}
}
}
}
}
}
Card model:
import Foundation
struct Card {
var identifier: Int = 0
var isSelected = false
var color: CardColor
var symbol: CardSymbol
var number: CardNumber
var shading: CardShading
static var identifierFactory = 0
init(initcolor: CardColor, initsymbol: CardSymbol, initnumber: CardNumber, initshading: CardShading){
color = initcolor
symbol = initsymbol
number = initnumber
shading = initshading
Card.identifierFactory += 1
self.identifier = Card.identifierFactory
}
}
enum CardColor: Int, CaseIterable {
case red = 1
case green = 2
case purple = 3
// static let all = [cardColor.red, cardColor.green, cardColor.purple]
}
enum CardSymbol: Int, CaseIterable {
case ovals = 1
case squiggles = 2
case diamonds = 3
// static let all = [cardSymbol.ovals, cardSymbol.squiggles, cardSymbol.diamonds]
}
enum CardNumber: Int, CaseIterable {
case one = 1
case two = 2
case three = 3
// static let all = [cardNumber.one, cardNumber.two, cardNumber.three]
}
enum CardShading: Int, CaseIterable {
case solid = 1
case open = 2
case striped = 3
// static let all = [cardShading.solid, cardShading.open, cardShading.striped]
}
// Not every Card variable has been included below. Could cause issues later.
extension Card: Hashable {
static func == (lhs: Card, rhs: Card) -> Bool {
return lhs.identifier == rhs.identifier &&
lhs.isSelected == rhs.isSelected &&
lhs.color == rhs.color &&
lhs.symbol == rhs.symbol &&
lhs.number == rhs.number &&
lhs.shading == rhs.shading
}
func hash(into hasher: inout Hasher) {
hasher.combine(identifier)
hasher.combine(isSelected)
hasher.combine(color)
hasher.combine(symbol)
hasher.combine(number)
hasher.combine(shading)
}
}

Subscript of a struct doesn't set values when created as an implicitly unwrapped optional

Why can't I change the the "numbers" array using subscripts when "Foo" is an implicitly unwrapped optional?
struct Foo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get { return self.numbers[index] }
set { self.numbers[index] = newValue }
}
}
var fooA:Foo!
fooA = Foo()
fooA[1] = 1 // does not change numbers array
fooA[1] // returns 0
fooA.numbers[1] = 1 // this works
fooA[1] // returns 1
var fooB:Foo!
fooB = Foo()
fooB![1] = 1 // this works
fooB![1] // returns 1
For some reason it works when I make "Foo" a class (called "Goo" below)
class Goo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get { return self.numbers[index] }
set { self.numbers[index] = newValue }
}
}
var goo:Goo!
goo = Goo()
goo[1] = 1 // this works
goo[1] // returns 1
it looks like a bug (or i miss something important), check this
struct Foo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get {
return self.numbers[index]
}
set {
numbers[index] = newValue
}
}
}
var fooA:Foo! = Foo()
// here is the difference
fooA?[1] = 1
fooA[1] // 1
fooA.numbers[1] = 1
fooA[1] // 1
more 'complex' experiment
struct Foo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get {
return numbers[index]
}
set {
print(numbers[index],newValue)
numbers[index] = newValue
print(numbers[index])
}
}
}
var fooA:Foo! = Foo()
fooA[1] = 1
fooA[1] // 0
// but prints
// 0 1
// 1
for more 'fun'
var fooA:Foo! = Foo()
if var foo = fooA {
foo[1] = 1
print(foo)
}
prints
"Foo(numbers: [0, 1, 0])\n"

Swift for-in loop with enumerate on custom Array2D class?

How would I go about implementing a custom enumerate function that makes something like this work (Swift 2):
for ((column, row), item) in Array2D.enumerate() { ... }
In my simple Array2D struct:
struct Array2D<T> : SequenceType {
let columns: Int
let rows: Int
private var array: Array<T?>
init(columns: Int, rows: Int) {
self.columns = columns
self.rows = rows
array = Array(count: rows*columns, repeatedValue: nil)
}
subscript(column: Int, row: Int) -> T? {
get {
return array[columns*row + column]
}
set {
array[columns*row + column] = newValue
}
}
func generate() -> AnyGenerator<T?> {
var column = 0
var row = 0
return anyGenerator() {
guard row < self.rows else {
return nil
}
let item = self[column, row]
if ++column == self.columns {
column = 0
++row
}
return item
}
}
}
I couldn't find any good explanation on implementing an enumerate function in Swift
The enumerate() function in Swift returns integers starting from 0 for the first part of its tuple. Those have nothing to do with the sequence you're enumerating over. So, for instance, this won't work:
let word = "hello".characters
for (index, letter) in word.enumerate() {
print(word[index])
}
Because the indices of a characterView are String.Indexs.
So there are several ways to get what you're going for. The first is to just overload enumerate() for your struct. Again, there are a few days you could do this. First off, how about a function that uses your own generator, and uses its own logic to figure out the coordinates. This could work:
func enumerate() -> AnyGenerator<((Int, Int), T?)> {
let g = self.generate()
var coord = -1
return anyGenerator {
g.next().map { ((++coord % self.columns, coord / self.columns), $0) }
}
}
But you're duplicating code there, especially from your generate method. Seeing you're already using coordinates to return each element, why not just have your enumerate method be the default, and your generate method call on that. Something like this:
// Original generate method, now returns the coords it used
func enumerate() -> AnyGenerator<((Int, Int), T?)> {
var column = 0
var row = 0
return anyGenerator() {
guard row < self.rows else {
return nil
}
let item = self[column, row]
if ++column == self.columns {
column = 0
++row
}
return ((column, row), item)
}
}
// uses enumerate, ignores coords
func generate() -> AnyGenerator<T?> {
let g = self.enumerate()
return anyGenerator {
g.next().map { $1 }
}
}
If you wanted to go a little overboard, you could write an enumerate function that enumerates the specific indices of its base. Call it specEnumerate:
public struct SpecEnumerateGen<Base : CollectionType> : GeneratorType {
private var eG: Base.Generator
private let sI: Base.Index
private var i : Base.Index?
public mutating func next() -> (Base.Index, Base.Generator.Element)? {
i?._successorInPlace() ?? {self.i = self.sI}()
return eG.next().map { (i!, $0) }
}
private init(g: Base.Generator, i: Base.Index) {
self.eG = g
self.sI = i
self.i = nil
}
}
public struct SpecEnumerateSeq<Base : CollectionType> : SequenceType {
private let col: Base
public func generate() -> SpecEnumerateGen<Base> {
return SpecEnumerateGen(g: col.generate(), i: col.startIndex)
}
}
public extension CollectionType {
func specEnumerate() -> SpecEnumerateSeq<Self> {
return SpecEnumerateSeq(col: self)
}
}
With this function, this would work:
let word = "hello".characters
for (index, letter) in word.specEnumerate() {
print(word[index])
}
But your matrix struct is still a SequenceType, with no specific indices. For that, you'll have to implement your own MatrixIndex:
public struct MatrixIndex: BidirectionalIndexType {
public let x, y : Int
private let columns: Int
public func successor() -> MatrixIndex {
return (x + 1 == columns) ?
MatrixIndex(x: 0, y: y + 1, columns: columns) :
MatrixIndex(x: x + 1, y: y, columns: columns)
}
public func predecessor() -> MatrixIndex {
return (x == 0) ?
MatrixIndex(x: columns - 1, y: y - 1, columns: columns) :
MatrixIndex(x: x - 1, y: y, columns: columns)
}
}
public func == (lhs: MatrixIndex, rhs: MatrixIndex) -> Bool {
return lhs.x == rhs.x && lhs.y == rhs.y
}
extension MatrixIndex : CustomDebugStringConvertible {
public var debugDescription: String {
return "\(x), \(y)"
}
}
extension MatrixIndex: RandomAccessIndexType {
public func advancedBy(n: Int) -> MatrixIndex {
let total = (y * columns) + x + n
return MatrixIndex(x: total % columns, y: total / columns, columns: columns)
}
public func distanceTo(other: MatrixIndex) -> Int {
return (other.x - x) + (other.y - y) * columns
}
}
Right. Now you'll need another matrix struct:
public struct Matrix2D<T> : MutableCollectionType {
public var contents: [[T]]
public subscript(index: MatrixIndex) -> T {
get {
return contents[index.y][index.x]
} set {
self.contents[index.y][index.x] = newValue
}
}
public var count: Int { return contents[0].count * contents.count }
public var startIndex: MatrixIndex {
return MatrixIndex(x: 0, y: 0, columns: contents[0].count)
}
public var endIndex: MatrixIndex {
return MatrixIndex(x: 0, y: contents.endIndex, columns: contents[0].count)
}
}
Right. So now, after all of that, this works:
let myMatrix = Matrix2D(contents: [[1, 2], [3, 4]])
for (coordinate, value) in myMatrix.specEnumerate() {
value == myMatrix[coordinate] // True every time
}
It might suffice defining your own enumerate taking advantage of the one you already have:
func enumerate() -> AnyGenerator<((Int, Int), T?)> {
var index = 0
var g = array.generate()
return anyGenerator() {
if let item = g.next() {
let column = index % self.columns
let row = index / self.columns
++index
return ((column, row) , item)
}
return nil
}
}
Notice in this case you could avoid conforming to SequenceType since I use generate from the private array. Anyway it could be consistent to do so.
Here is how then you could use it:
var a2d = Array2D<Int>(columns: 2, rows: 4)
a2d[0,1] = 4
for ((column, row), item) in a2d.enumerate() {
print ("[\(column) : \(row)] = \(item)")
}
Hope this helps

How to sort an array by boolean filed in swift?

I have a class defined like this:
class someClass {
var isCompleted = false
}
how to sort the list of the someClass? if want to move the completed items to the top of the list.
You can sort according to the boolean property by converting
the values to Int:
let arrayOfClasses = ... // array of classes
let sortedArrayOfClasses = sorted(arrayOfClasses) {
Int($0.isCompleted) > Int($1.isCompleted)
}
Or with "in-situ" sort:
var arrayOfClasses = ... // array of classes
sort(&arrayOfClasses) {
Int($0.isCompleted) > Int($1.isCompleted)
}
parameter closure for sort or sorted returns true iff the first parameter must be ordered before the second, otherwise false.
So you can:
let array = [ ... ]
let sortedArray = array.sorted { a, b in
a.isCompleted && !b.isCompleted
}
var yourArrayOfBooleans:[SomeClass] = ...
var sortedArrayOfBooleans:[SomeClass] = []
var tmpArray:[SomeClass] = []
for ar in yourArrayOfBooleans
{
if ar.isCompleted
{
sortedArrayOfBooleans.append(ar)
}
else
{
tmpArray.append(ar)
}
}
sortedArrayOfBooleans += tmpArray