Swift return bool in method - swift

I've made this method:
func checkScore(player: Int) -> Bool {
var checkedFields: [Int] = []
var won: Bool = false
for var i = 0; i <= 9; i += 1 {
if(winningCombinations[i] == player) {
checkedFields.append(i)
}
}
for value in winningCombinations {
var hits = 0
for n in checkedFields {
if value.contains(n){
hits += 1
}
}
if hits == 3 {
won = true
}
}
return won
}
But when I try to build it everything becomes white and the build crashes. Am I doing something wrong here? I pass the value like this:
if self.checkScore(player) {
print("Won!")
}
(I see no error message!)

Your func checkScore(player: Int) accepts player, which is of type Int.
In your code you also say : if(winningCombinations[i] == player), meaning that you expect the elements in array winningCombinations to also be of type Int
But then you say
for value in winningCombinations {
var hits = 0
for n in checkedFields {
if value.contains(n){
If value is an element in winningCombination, it means that value is an int.. how can you say value.contains(n). Int cannot perform contains operation. Arrays can.

Related

How to check if a value in a dictionary has duplicates?

The algorithm below checks to see if an array has at least two or more duplicates. It uses a dictionary to store the occurrences; the time complexity is linear because it has to traverse the dictionary to see if a key occurs twice. In swift, how can I look up a value to see if it occurs more than twice in constant time ?
func containsDuplicate(_ nums: [Int]) -> Bool {
var frequencyTable = [Int:Int]()
for num in nums {
frequencyTable[num] = (frequencyTable[num] ?? 0 ) + 1
}
for value in frequencyTable.values{
if value >= 2 {
return true
}
}
return false
}
containsDuplicate([1,1,2,3,3,3,3,4])
The second loop is not necessary if the first loop checks if the current element has already been inserted before, and returns from the function in that case:
func containsDuplicate(_ nums: [Int]) -> Bool {
var frequencyTable = [Int:Int]()
for num in nums {
if frequencyTable[num] != nil {
return true
}
frequencyTable[num] = 1
}
return false
}
Then it becomes apparent that we don't need a dictionary, a set is sufficient:
func containsDuplicate(_ nums: [Int]) -> Bool {
var seen = Set<Int>()
for num in nums {
if seen.contains(num) {
return true
}
seen.insert(num)
}
return false
}
This can be further simplified: The “insert and check if element was already present” operation can be done in a single call:
func containsDuplicate(_ nums: [Int]) -> Bool {
var seen = Set<Int>()
for num in nums {
if !seen.insert(num).inserted {
return true
}
}
return false
}
This is similar to the solution from this answer
return nums.count != Set(nums).count
but possibly more efficient: The function returns immediately when a duplicate element has been detected.
Finally we can make the function generic, so that it works with all arrays of a hashable type:
func containsDuplicate<T: Hashable>(_ array: [T]) -> Bool {
var seen = Set<T>()
for element in array {
if !seen.insert(element).inserted {
return true
}
}
return false
}
Example:
print(containsDuplicate([1,1,2,3,3,3,3,4])) // true
print(containsDuplicate(["A", "X"])) // false
Or as an extension for arbitrary collections of a hashable type:
extension Collection where Element: Hashable {
func containsDuplicate() -> Bool {
var seen = Set<Element>()
for element in self {
if !seen.insert(element).inserted {
return true
}
}
return false
}
}
print([1,1,2,3,3,3,3,4].containsDuplicate())
print(["A", "X"].containsDuplicate())
You just want to know if it has duplicate, you can use use set and compare the length.
func containsDuplicate(_ nums: [Int]) -> Bool {
return Set(nums).count != nums.count
}
like this examples, because the set remove the duplicate values.

Cannot convert type of Int to expected value [MyCustomType]

Xcode 8.3.3 is giving me this Swift 3 error on this line
values2[index] = nextValue(currentValue)
Cannot convert value of type 'Int' to expected argument type 'Card'
Here's my code:
//
// Card.swift
// match
//
// Created by quantum on 05/09/2017.
// Copyright © 2017 Quantum Productions. All rights reserved.
//
import UIKit
class Card: NSObject {
var quantity = 0
var fill = 0
var shape = 0
var color = 0
override var description : String {
return "Q" + String(quantity) + "/F" + String(fill) + "/S" + String(shape) + "/C" + String(color)
}
override init() {
super.init()
}
static func properties() -> [String] {
return ["quantity", "fill", "shape", "color"]
}
static func isMatch(cards: [Card]) -> Bool {
for property in self.properties() {
var sum = 0
for card in cards {
sum = sum + (card.value(forKey: property) as! Int)
}
if !([3, 6, 9, 7].contains(sum)) {
return false
}
}
return true
}
static func deck(_ values: [Int], _ index: Int, _ max: Int, _ acc: [Card]) -> [Card]{
let currentValue = values[index]
var values2 = values
if currentValue >= max {
if index == 0 {
return acc
}
values2[index] = 0
values2[index-1] = values2[index-1] + 1
return deck(values, index - 1, max, acc)
} else {
var acc2 = acc
let card = Card()
for (index, element) in self.properties().enumerated() {
card.setValue(values[index], forKey: element)
}
acc2.append(Card())
values2[index] = nextValue(Card())
return deck(values2, index, max, acc2)
}
}
func nextValue(_ v: Int) -> Int {
if (v == 0) {
return 1
} else if (v == 1) {
return 2
}
return 4
}
static func deck() -> [Card] {
return deck([1,1,1,1], 4, 3, [Card]())
}
}
this is inside of my Card class.
Strangely, if I try (this is wrong, I'm testing the compiler error)
values2[index] = nextValue(Card())
I get the error Cannot assign the value of type (Int) -> Int to type 'Int'.
Swift thinks my Card is an Int? I'm confused as to what's happening.
I expected to get the call nextvalue with the variable currentvalue, which should be an Int.
It's a bad error message from the compiler.
Your problem is that deck is declared static, but you're trying to call nextValue which is not declared static. This means that nextValue implicitly takes a hidden argument, self, but deck isn't providing it.
If you add static to the func nextValue declaration, it will work like you expect. (You'll get an error on the line referring to self.properties instead, but you'll be closer.)
To make this work properly, you probably want all these functions to be non-static instead. Just think about how this code gets called initially (i.e. how you get your first instance of Card).
A static method cannot call an instance method: the idea makes no sense, as there is no instance. Thus your reference to nextValue is impossible. That is why the line is problematic. How can a static method deck call an instance method nextValue?

Swift/XCode build error: Cannot convert value of type [Die] to expected argument type [Die]

Using XCode 8.3.3 which I believe uses Swift 3.1, but not 100% sure (how can you tell?). Here is the complete code. Note that when I paste this into a clean Playground, I get no errors. But within an XCode project, I get the build error on the line "let dice = Dice(withArrayOfDie: arrayOfDie)" in the unit test:
let defaultFaceCount = 6
let defaultDieCount = 6
func randomInt(withMaxValue maxValue: Int) -> Int {
return Int(arc4random_uniform(UInt32(maxValue))) + 1
}
class Die
{
private let m_faceCount: Int // Constant only set in init
private var m_faceValue: Int?
init(numFaces initialFaceCount: Int, withValue initialFaceValue: Int) {
// Make sure number of faces is greater than 0.
m_faceCount = (initialFaceCount > 0) ? initialFaceCount : defaultFaceCount
// Make sure face value is in proper range.
if initialFaceValue == 0 || initialFaceValue > m_faceCount {
m_faceValue = randomInt(withMaxValue: m_faceCount)
}
else {
m_faceValue = abs(initialFaceValue)
}
}
convenience init(numFaces initialFaceCount: Int) {
self.init(numFaces: initialFaceCount,
withValue: randomInt(withMaxValue: initialFaceCount))
}
convenience init() {
self.init(numFaces: defaultFaceCount)
}
var faceValue: Int {
get {
return m_faceValue!
}
}
var faceCount: Int {
get {
return m_faceCount
}
}
func roll() {
// face values are 1 based!
m_faceValue = randomInt(withMaxValue: m_faceCount)
}
}
class Dice {
var m_dice: [Die]
var m_occurrencesOf: [Int]
// Init with a pre-initialized array of Die. Every Die in
// the array must have the same face count.
init(withArrayOfDie: [Die]) {
var faceCount = defaultFaceCount
if withArrayOfDie.isEmpty {
// If there are no dice, add defaults.
m_dice = [Die]()
for _ in 1...defaultDieCount {
m_dice.append(Die(numFaces: defaultFaceCount))
}
}
else {
m_dice = withArrayOfDie
faceCount = m_dice[0].faceCount
}
// Keep trace of # of occurrences of each face value.
m_occurrencesOf = Array(repeating: 0, count: faceCount)
for die in m_dice {
m_occurrencesOf[die.faceValue - 1] += 1
}
}
// Init numDice dice, each with numFaces.
convenience init(numDice count: Int, numFaces faceCount: Int) {
var dice = [Die]()
for _ in 1...count {
dice.append(Die(numFaces: faceCount))
}
self.init(withArrayOfDie: dice)
}
// Init defaultDieCount dice, each with defaultFaceCount faces.
convenience init() {
self.init(numDice: defaultDieCount, numFaces: defaultFaceCount)
}
var count: Int {
return m_dice.count
}
// Retrieve the die at the specified (0 based) index.
func die(atIndex index: Int) -> Die? {
if !m_dice.isEmpty && index >= 0 && index < m_dice.count {
return m_dice[index]
}
return nil
}
subscript(index: Int) -> Die? {
get {
return die(atIndex: index)
}
}
}
// Unit Test
var arrayOfDie = [Die]()
for i in 1...6 {
arrayOfDie.append(Die())
}
let dice = Dice(withArrayOfDie: arrayOfDie)
// XCTAssertEqual(6, dice.count)
I get the build error "Cannot convert value of type [Die] to expected argument type [Die]" on the line "let dice = Dice(withArrayOfDie: arrayOfDie)". Cannot figure out why the argument which is an array of die does not match the expected init argument type.
Thanks!
Get rid of () in var m_dice: [Die]()
class Dice {
var m_dice: [Die]
init(withArrayOfDie dice: [Die]) {
m_dice = dice
}
}

Selection sort algorithm. Swift.

There is a great resource for swift algorithms by Ray Wenderlich:
https://github.com/raywenderlich/swift-algorithm-club
One of the basic ones is "Selection Sort":
func selectionSort(_ array: [Int]) -> [Int] {
guard array.count > 1 else { return array }
var a = array
for x in 0 ..< a.count - 1 {
var lowest = x
for y in x + 1 ..< a.count {
if a[y] < a[lowest] {
lowest = y
}
}
if x != lowest {
swap(&a[x], &a[lowest])
}
}
return a
}
I was trying to understand whats happening in here and was confused with var lowest = x step.
I decided to write my own solution for the problem and didn't use that step :
func selectSortArray1(_ array: [Int]) -> [Int] {
guard array.count > 1 else {return array}
var subArray = array
for x in 0..<subArray.count - 1 {
for y in x+1..<subArray.count {
if subArray[y] < subArray[x] {
swap(&subArray[x], &subArray[y])
}
}
}
return subArray
}
Seems to work as intended.
However, I doubt myself.
Is var lowest = x required for some edge cases?

Check prime number in array

Trying to write a check for Prime numbers in the array. The array is populated randomly. But when processing the array code does not work... What am I doing wrong? Thank you!
update 2
Filling the array correctly. But with the test simple number, something is wrong. Specifically what is wrong can not say, but the point is that the rules of a simple number sorting is not working.
import Foundation
func randomArray(var i:Int,var k:Int, var array: [Int]=[], var newArray: [Int]=[]) ->Int {
for i=0;i<10;i++ {
array.append(Int(arc4random_uniform(10)))
}
for i=0;i<=array.count; i++ {
for k=2; k<array[i]; k++ {
if array[i] != 0 && array[i] != 1 && array[i]%k != 0 {
newArray.append(array[i])
} else {
return 0
}
}
}
return newArray[i]
}
randomArray(0, k: 0)
It's not clear to me what you want to do.
a) If you want to generate an array of length k at random and filter for prime numbers, use the code below. Note that this can give give you an array of length between 0 to k, because they may be no prime in the randomly generated array, or every number generated was a prime:
func isPrime(num: Int) -> Bool {
if num < 2 {
return false
}
for i in 2..<num {
if num % i == 0 {
return false
}
}
return true
}
func randomArray(len: Int) -> [Int] {
var results = [Int]()
for _ in 0..<len {
results.append(Int(arc4random_uniform(10)))
}
return results.filter(isPrime)
}
b) If you want an array of k primes, use this instead:
func randomPrimeArray(len: Int) -> [Int] {
var results = [Int]()
while results.count < len {
let x = Int(arc4random_uniform(10))
if isPrime(x) {
results.append(x)
}
}
return results
}
if you want to filter prime numbers from an array use this code:
let primeNumbers = myArray.filter { number in
if number == 0 {
return false
}
return number % 2 == 0
}