Subscript of a struct doesn't set values when created as an implicitly unwrapped optional - swift

Why can't I change the the "numbers" array using subscripts when "Foo" is an implicitly unwrapped optional?
struct Foo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get { return self.numbers[index] }
set { self.numbers[index] = newValue }
}
}
var fooA:Foo!
fooA = Foo()
fooA[1] = 1 // does not change numbers array
fooA[1] // returns 0
fooA.numbers[1] = 1 // this works
fooA[1] // returns 1
var fooB:Foo!
fooB = Foo()
fooB![1] = 1 // this works
fooB![1] // returns 1
For some reason it works when I make "Foo" a class (called "Goo" below)
class Goo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get { return self.numbers[index] }
set { self.numbers[index] = newValue }
}
}
var goo:Goo!
goo = Goo()
goo[1] = 1 // this works
goo[1] // returns 1

it looks like a bug (or i miss something important), check this
struct Foo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get {
return self.numbers[index]
}
set {
numbers[index] = newValue
}
}
}
var fooA:Foo! = Foo()
// here is the difference
fooA?[1] = 1
fooA[1] // 1
fooA.numbers[1] = 1
fooA[1] // 1
more 'complex' experiment
struct Foo {
var numbers = [0,0,0]
subscript(index: Int) -> Int {
get {
return numbers[index]
}
set {
print(numbers[index],newValue)
numbers[index] = newValue
print(numbers[index])
}
}
}
var fooA:Foo! = Foo()
fooA[1] = 1
fooA[1] // 0
// but prints
// 0 1
// 1
for more 'fun'
var fooA:Foo! = Foo()
if var foo = fooA {
foo[1] = 1
print(foo)
}
prints
"Foo(numbers: [0, 1, 0])\n"

Related

Union-find: largest component size by common factor algorithm gives different results on every run

I was practicing data structure algorithm and made a Union - Find solution for the question.
The problem is, I think the code seems ok, but when I run it on Xcode playground, it shows different answers for the same input.
For example, I put an array [4, 6, 15, 35] in the function largestComponentSize, then it shows 2, 3, or 4 as the answer. I don't understand what's happening behind.
class Solution {
var uf = UnionFind()
func largestComponentSize(_ nums: [Int]) -> Int {
var maxNum:Int = 0
var numFactorMap = [Int:Int]()
var factorAdded = Set<Int>()
for num in nums {
var pFactors = getPrimeFactors(num)
numFactorMap[num] = pFactors[0]
for (i, val) in pFactors.enumerated() {
if !factorAdded.contains(val) {
uf.addSet(val)
factorAdded.insert(val)
}
if i > 0 {
uf.union(pFactors[i-1], val)
}
}
}
var groupCountMap = [Int:Int]()
for num in nums {
var groupId = uf.find(numFactorMap[num]!)!
if groupCountMap.keys.contains(groupId) {
groupCountMap[groupId]! += 1
} else {
groupCountMap[groupId] = 1
}
maxNum = max(maxNum, groupCountMap[groupId]!)
}
return maxNum
}
func getPrimeFactors(_ num: Int) -> [Int] {
var ans:Set<Int> = []
if num == 1 {
return []
}
var crrNum = num
var deno = 2
while crrNum >= deno {
if crrNum % deno == 0 {
ans.insert(deno)
crrNum = crrNum / deno
} else {
deno = deno + 1
}
}
return Array(ans)
}
class UnionFind {
var index = [Int: Int]()
var parent: [Int]
var size: [Int]
init() {
parent = []
size = []
}
func addSet(_ ele: Int) {
index[ele] = parent.count
parent.append(parent.count)
size.append(1)
}
func getSetSize(_ ele: Int) -> Int {
if let found = find(ele) {
return size[found]
}
return 0
}
func find(_ ele: Int) -> Int? {
if let indexOfEle = index[ele] {
if parent[indexOfEle] == indexOfEle {
return indexOfEle
} else {
if let found = find(parent[indexOfEle]) {
parent[indexOfEle] = found
}
return parent[indexOfEle]
}
} else {
return nil //never come here
}
}
func union(_ first: Int, _ second: Int) {
guard let indexOfFirst = index[first], let indexOfSecond = index[second] else {
return
}
if parent[indexOfFirst] == parent[indexOfSecond] {
return
}
var indexOfLarger = indexOfFirst
var indexOfSmaller = indexOfSecond
if size[indexOfFirst] < size[indexOfSecond] {
indexOfLarger = indexOfSecond
indexOfSmaller = indexOfFirst
}
parent[indexOfSmaller] = indexOfLarger
size[indexOfLarger] += size[indexOfSmaller]
return
}
}
}
var sol = Solution()
var nums = [4, 6, 15, 35]
var ans = sol.largestComponentSize(nums)
Thank you for your help in advance!
I just tried it on Xcode playground.

Can you explain the output of the following Swift Projected Value?

#propertyWrapper
struct smallNumber {
private var num: Int
private var maximum: Int
var projectedValue: Bool
var wrappedValue: Int {
get { return num }
set {
if num > maximum {
num = maximum
projectedValue = true
}
else {
num = newValue
projectedValue = false
}
}
}
init() {
num = 0
maximum = 12
projectedValue = false
}
init(wrappedValue: Int) {
maximum = 12
projectedValue = false
num = min(wrappedValue, maximum)
}
init(wrappedValue: Int, maximum: Int) {
self.maximum = maximum
projectedValue = false
num = max(wrappedValue, maximum)
}
}
struct Rectangle {
#smallNumber var height: Int
#smallNumber var width: Int
var area: Int {
get {
return height * width
}
}
}
var x = Rectangle()
x.height = 9
x.width = 89
//x.width = 78
print(x.$height, x.$width, x.area)
Hi, I am learning swift and I am having trouble in the above code. In the swift reference(Projected Values), it's written that if we set the value of x greater than 12 then projectedValue becomes true. But after running the above code x.$width prints false. After removing the comment at 2nd last line x.$width prints true. Can someone explain me how is it working?
You copied the code incorrectly.
You have
if num > maximum {
but need
if newValue > maximum {

Is there a better way to have a Swift array with names associated to index numbers?

In Swift, I want to have an array of items and then to be able to access those items by name or by index. The array is a fixed size array with 3 elements.
I have the following implementation using a struct
struct Boxes {
var array: [Int]!
var a: Int {
get { return array[0] }
set { array[0] = newValue }
}
var b: Int {
get { return array[1] }
set { array[1] = newValue }
}
var c: Int {
get { return array[2] }
set { array[2] = newValue }
}
init() {
self.array = Array<Int>(repeating: 0, count: 3)
}
}
And then, use it as follows:
var box = Boxes()
box.a = 1
box.b = box.array[0]
box.array[0] = 2
box.b = box.a
Is there a simpler, more elegant/concise form to do this?
struct Boxes {
var array: [Int] {
get { return [a, b, c] }
set { (a, b, c) = (newValue[0], newValue[1], newValue[2]) }
}
var (a, b, c) = (0, 0 ,0)
}
Note that it is based on this sentence: fixed size array with 3 elements
Exact same usage:
var box = Boxes()
box.a = 1
box.b = box.array[0]
box.array[0] = 2
box.b = box.a
Extra:
You can add custom initializer if you want to init with an array:
init(array: [Int] = [0, 0, 0]) {
self.array = array
}
How about using an enum to store name-to-index associations, and add a subscript to Boxes to access the array:
enum BoxName : Int {
case a, b, c
}
struct Boxes {
var array: [Int]!
subscript(_ name: BoxName) -> Int {
get { return array[name.rawValue] }
set { array[name.rawValue] = newValue }
}
init() {
self.array = Array<Int>(repeating: 0, count: 3)
}
}
// usage:
var box = Boxes()
box[.a] = 1
box[.b] = box.array[0]
box.array[0] = 2
box[.b] = box[.a]
Now when you want to add a new element and a new name in the array, you just need to add a new enum case.

Set does not work despite copying from Developer Documentation

Whilst attempting to create a set, I get the error stating "Cannot convert value of type '[Int]' to specified type 'Set'". This occurs even though I write the code exactly as in the DD: https://developer.apple.com/documentation/swift/set
var intSet2 : Set = [2, 3, 5, 7]
// Cannot convert value of type '[Int]' to specified type 'Set'
I have now switched to 'NSSet' (which I found by accidence) and now it appears to work. How come using normal 'Set' does not work? When I go to the DD of NSSet it says: "An object representing a static, unordered, uniquing collection, for use instead of a Set constant in cases that require reference semantics." Is this about reference types? The DD also states that arrays are value types. I'm at a loss as to when to use Set or NSSet (or even NSMutableSet for that manner).
var intSet2 : NSSet = [2, 3, 5, 7]
// Works
Lastly, when I try to convert an array of colors with type [CardColor] to a Set or NSset, I receive error message stating: "Cannot convert value of type '[CardColor]' to specified type 'NSSet'.
let colorCheck = selectedCards.map { $0.color }
var colorCheckSet : NSSet = colorCheck
// Cannot convert value of type '[CardColor]' to specified type 'NSSet'
Thanks for your help in advance.
Set model:
import Foundation
class Set {
// MARK: properties
var deck = [Card]()
var tableCards = [Card]()
var matchedCards = [Card]()
var selectedCards: [Card] {
get {
var cards = [Card]()
for card in tableCards.indices {
if tableCards[card].isSelected == true {
cards.append(tableCards[card])
}
}
return cards
}
}
var unmatchedCards = 12
var score = 0
// MARK: functions
// Selects the card. If this is the third card to be selected, proceeds to check for matches
func selectCard(at index: Int) {
if tableCards[index].isSelected == false {
if selectedCards.count < 3 {
tableCards[index].isSelected = true
if selectedCards.count == 3 {
checkIfCardsMatch()
}
}
}
else if tableCards[index].isSelected == true {
tableCards[index].isSelected = false
}
}
func checkIfCardsMatch() {
let colorCheck = selectedCards.map { $0.color }
var colorCheckSet : NSSet = colorCheck
// Cannot convert value of type '[CardColor]' to specified type 'NSSet'
var intSet2 : Set = [2, 3, 5, 7]
// Cannot convert value of type '[Int]' to specified type 'Set'
// for item in colorCheck {
// colorCheckSet.insert()
// }
// let symbolCheck: Set = selectedCards.map() { $0.symbol }
// let numberCheck: Set = selectedCards.map() { $0.number }
// let shadingCheck: Set = selectedCards.map() { $0.shading }
}
// MARK: functions
func dealThreeMoreCards() {
if unmatchedCards <= 21 {
unmatchedCards += 3
}
print(unmatchedCards)
}
//MARK: initialization
init() {
for cardcolor in CardColor.allCases {
for cardsymbol in CardSymbol.allCases {
for cardnumber in CardNumber.allCases {
for cardshading in CardShading.allCases {
let card = Card(initcolor: cardcolor, initsymbol: cardsymbol, initnumber: cardnumber, initshading: cardshading)
deck.append(card)
}
}
}
}
}
}
Card model:
import Foundation
struct Card {
var identifier: Int = 0
var isSelected = false
var color: CardColor
var symbol: CardSymbol
var number: CardNumber
var shading: CardShading
static var identifierFactory = 0
init(initcolor: CardColor, initsymbol: CardSymbol, initnumber: CardNumber, initshading: CardShading){
color = initcolor
symbol = initsymbol
number = initnumber
shading = initshading
Card.identifierFactory += 1
self.identifier = Card.identifierFactory
}
}
enum CardColor: Int, CaseIterable {
case red = 1
case green = 2
case purple = 3
// static let all = [cardColor.red, cardColor.green, cardColor.purple]
}
enum CardSymbol: Int, CaseIterable {
case ovals = 1
case squiggles = 2
case diamonds = 3
// static let all = [cardSymbol.ovals, cardSymbol.squiggles, cardSymbol.diamonds]
}
enum CardNumber: Int, CaseIterable {
case one = 1
case two = 2
case three = 3
// static let all = [cardNumber.one, cardNumber.two, cardNumber.three]
}
enum CardShading: Int, CaseIterable {
case solid = 1
case open = 2
case striped = 3
// static let all = [cardShading.solid, cardShading.open, cardShading.striped]
}
// Not every Card variable has been included below. Could cause issues later.
extension Card: Hashable {
static func == (lhs: Card, rhs: Card) -> Bool {
return lhs.identifier == rhs.identifier &&
lhs.isSelected == rhs.isSelected &&
lhs.color == rhs.color &&
lhs.symbol == rhs.symbol &&
lhs.number == rhs.number &&
lhs.shading == rhs.shading
}
func hash(into hasher: inout Hasher) {
hasher.combine(identifier)
hasher.combine(isSelected)
hasher.combine(color)
hasher.combine(symbol)
hasher.combine(number)
hasher.combine(shading)
}
}

Swift/XCode build error: Cannot convert value of type [Die] to expected argument type [Die]

Using XCode 8.3.3 which I believe uses Swift 3.1, but not 100% sure (how can you tell?). Here is the complete code. Note that when I paste this into a clean Playground, I get no errors. But within an XCode project, I get the build error on the line "let dice = Dice(withArrayOfDie: arrayOfDie)" in the unit test:
let defaultFaceCount = 6
let defaultDieCount = 6
func randomInt(withMaxValue maxValue: Int) -> Int {
return Int(arc4random_uniform(UInt32(maxValue))) + 1
}
class Die
{
private let m_faceCount: Int // Constant only set in init
private var m_faceValue: Int?
init(numFaces initialFaceCount: Int, withValue initialFaceValue: Int) {
// Make sure number of faces is greater than 0.
m_faceCount = (initialFaceCount > 0) ? initialFaceCount : defaultFaceCount
// Make sure face value is in proper range.
if initialFaceValue == 0 || initialFaceValue > m_faceCount {
m_faceValue = randomInt(withMaxValue: m_faceCount)
}
else {
m_faceValue = abs(initialFaceValue)
}
}
convenience init(numFaces initialFaceCount: Int) {
self.init(numFaces: initialFaceCount,
withValue: randomInt(withMaxValue: initialFaceCount))
}
convenience init() {
self.init(numFaces: defaultFaceCount)
}
var faceValue: Int {
get {
return m_faceValue!
}
}
var faceCount: Int {
get {
return m_faceCount
}
}
func roll() {
// face values are 1 based!
m_faceValue = randomInt(withMaxValue: m_faceCount)
}
}
class Dice {
var m_dice: [Die]
var m_occurrencesOf: [Int]
// Init with a pre-initialized array of Die. Every Die in
// the array must have the same face count.
init(withArrayOfDie: [Die]) {
var faceCount = defaultFaceCount
if withArrayOfDie.isEmpty {
// If there are no dice, add defaults.
m_dice = [Die]()
for _ in 1...defaultDieCount {
m_dice.append(Die(numFaces: defaultFaceCount))
}
}
else {
m_dice = withArrayOfDie
faceCount = m_dice[0].faceCount
}
// Keep trace of # of occurrences of each face value.
m_occurrencesOf = Array(repeating: 0, count: faceCount)
for die in m_dice {
m_occurrencesOf[die.faceValue - 1] += 1
}
}
// Init numDice dice, each with numFaces.
convenience init(numDice count: Int, numFaces faceCount: Int) {
var dice = [Die]()
for _ in 1...count {
dice.append(Die(numFaces: faceCount))
}
self.init(withArrayOfDie: dice)
}
// Init defaultDieCount dice, each with defaultFaceCount faces.
convenience init() {
self.init(numDice: defaultDieCount, numFaces: defaultFaceCount)
}
var count: Int {
return m_dice.count
}
// Retrieve the die at the specified (0 based) index.
func die(atIndex index: Int) -> Die? {
if !m_dice.isEmpty && index >= 0 && index < m_dice.count {
return m_dice[index]
}
return nil
}
subscript(index: Int) -> Die? {
get {
return die(atIndex: index)
}
}
}
// Unit Test
var arrayOfDie = [Die]()
for i in 1...6 {
arrayOfDie.append(Die())
}
let dice = Dice(withArrayOfDie: arrayOfDie)
// XCTAssertEqual(6, dice.count)
I get the build error "Cannot convert value of type [Die] to expected argument type [Die]" on the line "let dice = Dice(withArrayOfDie: arrayOfDie)". Cannot figure out why the argument which is an array of die does not match the expected init argument type.
Thanks!
Get rid of () in var m_dice: [Die]()
class Dice {
var m_dice: [Die]
init(withArrayOfDie dice: [Die]) {
m_dice = dice
}
}