class A {
let val : Int
init(val: Int) {
self.val = val
}
}
I have these 3 strings:
let className = "A"
let argName = "val"
let argValue = "4"
How can I call A(val:4) from using these 3 strings?
Since you've noted in the comments that the types will all be subclasses of some supertype, then that supertype can handle all the dispatching. In Cocoa, this is a pretty common pattern known as a class cluster.
class SuperA {
enum SuperAError: Error {
case cannotConstruct
}
static func create(className: String, argName: String, argValue: String) throws -> SuperA {
switch className {
case "A":
guard argName == "val",
let value = Int(argValue)
else { throw SuperAError.cannotConstruct }
return A(val: value)
default:
throw SuperAError.cannotConstruct
}
}
}
Now, I don't particularly like this approach. This kind of subclassing tends to be poor Swift. Swift is fine with classes when you require a reference type, but it doesn't favor subclassing. I'd do this with a Buildable protocol and a Builder:
enum BuildableError: Error {
case unknownType
case badParameters
}
protocol Buildable {
init(argName: String, argValue: String) throws
// ... and the rest of the methods you require ...
}
struct A {
var val: Int
}
extension A: Buildable {
init(argName: String, argValue: String) throws {
guard argName == "val", let value = Int(argValue) else {
throw BuildableError.badParameters
}
self.init(val: value)
}
}
final class Builder {
var buildables: [String: Buildable.Type] = [:]
func build(className: String, argName: String, argValue: String) throws -> Buildable {
guard let buildable = buildables[className] else {
throw BuildableError.unknownType
}
return try buildable.init(argName: argName, argValue: argValue)
}
}
let builder = Builder()
builder.buildables["A"] = A.self
builder.build(className: "A", argName: "val", argValue: "4")
If this leads to duplicated code, there are straightforward ways to address that with other protocols. For example, if many of your types had init(val: Int), they could share code with another protocol:
protocol ValIntBuildable: Buildable {
init(val: Int)
}
extension ValIntBuildable {
init(argName: String, argValue: String) throws {
guard argName == "val", let value = Int(argValue) else {
throw BuildableError.badParameters
}
self.init(val: value)
}
}
extension A: ValIntBuildable {}
In native Swift alone, you can't. Swift is not dynamic in such a way that you can instantiate an arbitrary class based on a string name of the class, and so forth. Objective-C is dynamic and has ways to do this, so if this kind of thing is important to you, make A an NSObject subclass and write this part of the code in Objective-C (or use equivalent Cocoa/objc-runtime calls).
Related
I was reading through the wonderful blog post by Jon Sundell where he is trying to demonstrate how one can use custom raw values with Swift Enums.
I had a play around with his code and made up a bit of a contrived example to see how robust the Enums with custom raw types are.
I wondered if I can make a type, instance of which can be used as a raw value whilst being expressible by both String and Integer literals. When I implemented ExpressiblebyIntegerLiteral the String part did not work the way I expected and I'm not quite sure why.
Here is the code (the question follows the code):
import Foundation
struct Path: Equatable {
var string: String
var int: Int
}
enum Target: Path {
case content
case resources
case images = "resources/images"
}
extension Path: ExpressibleByIntegerLiteral, ExpressibleByStringLiteral {
init(stringLiteral: String) {
string = stringLiteral
int = 0
}
init(integerLiteral: Int) {
string = ""
int = integerLiteral
}
}
if let stringTarget = Target(rawValue: "content") {
print("stringTarget: \(stringTarget)")
}
if let intTarget = Target(rawValue: 1) {
print("intTarget: \(intTarget)")
}
What I expected the above code to produce is both stringTarget and intTarget being initialized to appropriate Enum cases, however, the stringTarget one turns out to be nil.
If you remove the conformance to ExpressibleByIntegerLiteral protocol (and the appropriate block of code which initializes intTarget), the stringTarget automagically gets initialized as I expected: into Target.string case.
Could someone please explain to me what is going on here?
Solving your Question
What I expected the above code to produce is both stringTarget and intTarget being initialized to appropriate Enum cases, however, the stringTarget one turns out to be nil.
They aren't nil. They are this: ""
This happens because both the .content and .resources cases are not explicitly defined by a String. And because of this, they both take the ExpressibleByIntegerLiteral route, and are hence defined as this ""
init(integerLiteral: Int) {
string = "" // see
int = integerLiteral
}
Solved for Int
Use this fancy method in place of IntValue(rawValue: 1):
func IntValue(_ this: Int) -> String? {
return Target(rawValue: 0) != nil ? String(describing: Target(rawValue: 0)!) : nil
}
Solved for String
First, conform your enum to CaseIterable, like so:
enum Target: Path, CaseIterable {
Next, use this fancy method in place of Target(rawValue: "content"):
func StringValue(_ this: String) -> String? {
return Target.allCases.contains { this == String(describing: $0) } ? this : nil
}
Truly solved for String
Now, I've removed a crucial bug where case foo = "bar" can be found both as 'foo' or 'bar'. If you don't want this, use this code instead:
func StringValue(_ this: String) -> String? {
var found: String? = nil
_ = Target.allCases.filter {
if let a = Target(rawValue: Path.init(string: this, int: 0)) {
found = String(describing: a)
return this == String(describing: a)
}
found = String(describing: $0)
return this == String(describing: $0)
}
return found
}
Custom Raw Values for Enums
Here's a quick tutorial:
I am wondering if enum can conform it's rawValue to the ClosedRange struct, just like it can conform to String.
enum Foo: ClosedRange<Int> {
case bar = 1...4
}
Obviously this is not an option, since 1...4 is not a literal
This seems to work:
enum Foo: ClosedRange<Int> {
case foo = "1...3"
case bar = "1...4"
func overlaps(_ with: Foo) -> Bool { return self.rawValue.overlaps(with.rawValue) }
}
extension ClosedRange: ExpressibleByStringLiteral {
public typealias StringLiteralType = String
public init(stringLiteral value: String) {
let v = value.split(separator: ".")
switch Bound.self {
case is Int.Type: self = (Int(v[0])! as! Bound)...(Int(v[1])! as! Bound)
default: fatalError()
}
}
}
It allows you to do this:
print(Foo.foo.overlaps(Foo.bar))
You can add more types like Double or String using this technique
Side Note: My attempt allows for non-unique rawValues (SR-13212) which is a shame. But I'm not thinking that is fixable:
enum Foo: ClosedRange<Int> {
case foo = "1...3"
case bar = "1...4"
case bar = "1...04" // Duplicate, but Swift allows it.
}
Simplified example
Take a look at this simple protocol
protocol FooOwner {
static var foo: Self { get }
}
I would like an enum to conform to said protocol, and in my opinion this ought to work, since the static syntax SomeTypeConformingToFooOwner.foo should result in an instance of SomeTypeConformingToFooOwner and in the case where SomeTypeConformingToFooOwner is a enum.
enum Foo: FooOwner { // Type 'Foo' does not conform to protocol FooOwner
case foo
}
The workaround is this ugly thing:
protocol FooOwner {
static var fooOwner: Self { get }
}
enum Foo: FooOwner {
case foo
static var fooOwner: Foo {
return Foo.foo
}
}
Do you have a nicer workaround for enum conforming to protocols with static vars?
Real use case
protocol StringConvertibleError: Swift.Error {
static var invalidCharactersError: Self { get }
}
protocol StringConvertibleErrorOwner {
associatedtype Error: StringConvertibleError
}
protocol StringConvertible {
var value: String { get }
/// Calling this with an invalid String will result in runtime crash.
init(validated: String)
init(string value: String) throws
static func validate(_ string: String) throws -> String
}
// MARK: - Default Implementation Constrained
extension StringConvertible where Self: CharacterSetSpecifying, Self: StringConvertibleErrorOwner {
static func validate(_ string: String) throws -> String {
guard Self.allowedCharacters.isSuperset(of: CharacterSet(charactersIn: string)) else {
throw Error.invalidCharactersError
}
// Valid
return string
}
}
struct HexString: StringConvertible, CharacterSetSpecifying, StringConvertibleErrorOwner {
static var allowedCharacters = CharacterSet.hexadecimal
let value: String
init(validated unvalidated: String) {
do {
self.value = try HexString.validate(unvalidated)
} catch {
fatalError("Passed unvalid string, error: \(error)")
}
}
}
extension HexString {
enum Error: StringConvertibleError {
static var invalidCharactersError: Error {
return Error.invalidCharacters
}
case invalidCharacters
}
}
So it's the last part that I would like to change to:
extension HexString {
enum Error: StringConvertibleError {
case invalidCharacters
}
}
Since I have many similar types to HexString.
Yes of course obviously I can use one shared Error enum, but I would like to have one specific enum per type.
SE-0280 solves this - if accepted. It is currently in review.
Edit 1
Great news, SE-0280 has been accepted.
This might not fully match OP's issue, but as it seems somewhat related, I'm throwing it out there in case it helps.
In my case, I cared about some cases, but other cases could be ignored (or handled in a common way).
I admit my approach is not elegant and requires a lot of boiler plate, but it got me past a similar problem.
// Given these enums, we want a protocol that matches enums with
// 'foo' and 'bar' cases.
enum FooFriends {
case foo // conforms
case bar // conforms
case baz // don't really care
}
enum FooBar {
case foo // conforms
case bar // conforms
}
// We wish we could do this:
protocol FooBarWish {
case foo // or static var foo: Self { get }
case bar // or static var bar: Self { get }
}
// Workaround
// the boiler plate
enum FooBarProxy {
case foo
case bar
case other
}
protocol FooBarProtocol {
func getProxy() -> FooBarProxy
}
extension FooFriends: FooBarProtocol {
func getProxy() -> FooBarProxy {
switch self {
case .foo:
return .foo
case .bar:
return .bar
default:
return .other
}
}
}
extension FooBar: FooBarProtocol {
func getProxy() -> FooBarProxy {
switch self {
case .foo:
return .foo
case .bar:
return .bar
}
}
}
// Usage
// Instead of the ideal case (which won't work)
let fooBarOrFooFriend1: FooBarWish = FooFriends.foo
let fooBarOrFooFriend2: FooBarWish = FooBar.bar
// We can get by with
let fooBarProxy1 = FooFriends.foo.getProxy()
let fooBarProxy2 = FooBar.bar.getProxy()
// Verification
func checkIt(_ foobar: FooBarProxy) {
switch foobar {
case .foo:
print("it was foo")
case .bar:
print("it was bar")
case .other:
print("it was neither")
}
}
checkIt(fooBarProxy1)
checkIt(fooBarProxy2)
// =>
// it was foo
// it was bar
As an exercise in learning I'm rewriting my validation library in Swift.
I have a ValidationRule protocol that defines what individual rules should look like:
protocol ValidationRule {
typealias InputType
func validateInput(input: InputType) -> Bool
//...
}
The associated type InputType defines the type of input to be validated (e.g String). It can be explicit or generic.
Here are two rules:
struct ValidationRuleLength: ValidationRule {
typealias InputType = String
//...
}
struct ValidationRuleCondition<T>: ValidationRule {
typealias InputType = T
// ...
}
Elsewhere, I have a function that validates an input with a collection of ValidationRules:
static func validate<R: ValidationRule>(input i: R.InputType, rules rs: [R]) -> ValidationResult {
let errors = rs.filter { !$0.validateInput(i) }.map { $0.failureMessage }
return errors.isEmpty ? .Valid : .Invalid(errors)
}
I thought this was going to work but the compiler disagrees.
In the following example, even though the input is a String, rule1's InputType is a String, and rule2s InputType is a String...
func testThatItCanEvaluateMultipleRules() {
let rule1 = ValidationRuleCondition<String>(failureMessage: "message1") { $0.characters.count > 0 }
let rule2 = ValidationRuleLength(min: 1, failureMessage: "message2")
let invalid = Validator.validate(input: "", rules: [rule1, rule2])
XCTAssertEqual(invalid, .Invalid(["message1", "message2"]))
}
... I'm getting extremely helpful error message:
_ is not convertible to ValidationRuleLength
which is cryptic but suggests that the types should be exactly equal?
So my question is... how do I append different types that all conform to a protocol with an associated type into a collection?
Unsure how to achieve what I'm attempting, or if it's even possible?
EDIT
Here's it is without context:
protocol Foo {
typealias FooType
func doSomething(thing: FooType)
}
class Bar<T>: Foo {
typealias FooType = T
func doSomething(thing: T) {
print(thing)
}
}
class Baz: Foo {
typealias FooType = String
func doSomething(thing: String) {
print(thing)
}
}
func doSomethingWithFoos<F: Foo>(thing: [F]) {
print(thing)
}
let bar = Bar<String>()
let baz = Baz()
let foos: [Foo] = [bar, baz]
doSomethingWithFoos(foos)
Here we get:
Protocol Foo can only be used as a generic constraint because it has
Self or associated type requirements.
I understand that. What I need to say is something like:
doSomethingWithFoos<F: Foo where F.FooType == F.FooType>(thing: [F]) {
}
Protocols with type aliases cannot be used this way. Swift doesn't have a way to talk directly about meta-types like ValidationRule or Array. You can only deal with instantiations like ValidationRule where... or Array<String>. With typealiases, there's no way to get there directly. So we have to get there indirectly with type erasure.
Swift has several type-erasers. AnySequence, AnyGenerator, AnyForwardIndex, etc. These are generic versions of protocols. We can build our own AnyValidationRule:
struct AnyValidationRule<InputType>: ValidationRule {
private let validator: (InputType) -> Bool
init<Base: ValidationRule where Base.InputType == InputType>(_ base: Base) {
validator = base.validate
}
func validate(input: InputType) -> Bool { return validator(input) }
}
The deep magic here is validator. It's possible that there's some other way to do type erasure without a closure, but that's the best way I know. (I also hate the fact that Swift cannot handle validate being a closure property. In Swift, property getters aren't proper methods. So you need the extra indirection layer of validator.)
With that in place, you can make the kinds of arrays you wanted:
let len = ValidationRuleLength()
len.validate("stuff")
let cond = ValidationRuleCondition<String>()
cond.validate("otherstuff")
let rules = [AnyValidationRule(len), AnyValidationRule(cond)]
let passed = rules.reduce(true) { $0 && $1.validate("combined") }
Note that type erasure doesn't throw away type safety. It just "erases" a layer of implementation detail. AnyValidationRule<String> is still different from AnyValidationRule<Int>, so this will fail:
let len = ValidationRuleLength()
let condInt = ValidationRuleCondition<Int>()
let badRules = [AnyValidationRule(len), AnyValidationRule(condInt)]
// error: type of expression is ambiguous without more context
I have a method which does exactly the same thing for two types of data in Swift.
To keep things simple (and without duplicating a method) I pass AnyObject as an argument to my method which can be either of these two types. How to I unwrap it with an || (OR) statement so I can proceed? Or maybe this done otherwise?
func myFunc(data:AnyObject) {
if let data = data as? TypeOne {
// This works fine. But I need it to look something like unwrapping below
}
if let data = data as? TypeOne || let data = data as? TypeTwo { // <-- I need something like this
// Do my stuff here, but this doesn't work
}
}
I'm sure this is trivial in Swift, I just can't figure out how to make it work.
You can't unify two different casts of the same thing. You have to keep them separate because they are two different casts to two different types which the compiler needs to treat in two different ways.
var x = "howdy" as AnyObject
// x = 1 as AnyObject
// so x could have an underlying String or Int
switch x {
case let x as String:
print(x)
case let x as Int:
print(x)
default: break
}
You can call the same method from within those two different cases, if you have a way of passing a String or an Int to it; but that's the best you can do.
func printAnything(what:Any) {
print(what)
}
switch x {
case let x as String:
printAnything(x)
case let x as Int:
printAnything(x)
default: break
}
Of course you can ask
if (x is String || x is Int) {
but the problem is that you are no closer to performing an actual cast. The casts will still have to be performed separately.
Building on Clashsoft's comment, I think a protocol is the way to go here. Rather than pass in AnyObject and unwrap, you can represent the needed functionality in a protocol to which both types conform.
This ought to make the code easier to maintain, since you're coding toward specific behaviors rather then specific classes.
I mocked up some code in a playground that shows how this would work.
Hopefully it will be of some help!
protocol ObjectBehavior {
var nickname: String { get set }
}
class TypeOne: ObjectBehavior {
var nickname = "Type One"
}
class TypeTwo: ObjectBehavior {
var nickname = "Type Two"
}
func myFunc(data: ObjectBehavior) -> String {
return data.nickname
}
let object1 = TypeOne()
let object2 = TypeTwo()
println(myFunc(object1))
println(myFunc(object2))
Find if that shared code is exactly the same for both types. If yes:
protocol TypeOneOrTypeTwo {}
extension TypeOneOrTypeTwo {
func thatSharedCode() {
print("Hello, I am instance of \(self.dynamicType).")
}
}
extension TypeOne: TypeOneOrTypeTwo {}
extension TypeTwo: TypeOneOrTypeTwo {}
If not:
protocol TypeOneOrTypeTwo {
func thatSharedMethod()
}
extension TypeOne: TypeOneOrTypeTwo {
func thatSharedMethod() {
// code here:
}
}
extension TypeTwo: TypeOneOrTypeTwo {
func thatSharedMethod() {
// code here:
}
}
And here you go:
func myFunc(data: AnyObject) {
if let data = data as? TypeOneOrTypeTwo {
data.thatSharedCode() // Or `thatSharedMethod()` if your implementation differs for types.
}
}
You mean like this?
enum IntOrString {
case int(value: Int)
case string(value: String)
}
func parseInt(_ str: String) -> IntOrString {
if let intValue = Int(str) {
return IntOrString.int(value: intValue)
}
return IntOrString.string(value: str)
}
switch parseInt("123") {
case .int(let value):
print("int value \(value)")
case .string(let value):
print("string value \(value)")
}
switch parseInt("abc") {
case .int(let value):
print("int value \(value)")
case .string(let value):
print("string value \(value)")
}
output:
int value 123
string value abc
I have tried to boil this issue down to its simplest form with the following.
Setup
Xcode Version 6.1.1 (6A2008a)
An enum defined in MyEnum.swift:
internal enum MyEnum: Int {
case Zero = 0, One, Two
}
extension MyEnum {
init?(string: String) {
switch string.lowercaseString {
case "zero": self = .Zero
case "one": self = .One
case "two": self = .Two
default: return nil
}
}
}
and code that initializes the enum in another file, MyClass.swift:
internal class MyClass {
let foo = MyEnum(rawValue: 0) // Error
let fooStr = MyEnum(string: "zero")
func testFunc() {
let bar = MyEnum(rawValue: 1) // Error
let barStr = MyEnum(string: "one")
}
}
Error
Xcode gives me the following error when attempting to initialize MyEnum with its raw-value initializer:
Cannot convert the expression's type '(rawValue: IntegerLiteralConvertible)' to type 'MyEnum?'
Notes
Per the Swift Language Guide:
If you define an enumeration with a raw-value type, the enumeration automatically receives an initializer that takes a value of the raw value’s type (as a parameter called rawValue) and returns either an enumeration member or nil.
The custom initializer for MyEnum was defined in an extension to test whether the enum's raw-value initializer was being removed because of the following case from the Language Guide. However, it achieves the same error result.
Note that if you define a custom initializer for a value type, you will no longer have access to the default initializer (or the memberwise initializer, if it is a structure) for that type. [...]
If you want your custom value type to be initializable with the default initializer and memberwise initializer, and also with your own custom initializers, write your custom initializers in an extension rather than as part of the value type’s original implementation.
Moving the enum definition to MyClass.swift resolves the error for bar but not for foo.
Removing the custom initializer resolves both errors.
One workaround is to include the following function in the enum definition and use it in place of the provided raw-value initializer. So it seems as if adding a custom initializer has a similar effect to marking the raw-value initializer private.
init?(raw: Int) {
self.init(rawValue: raw)
}
Explicitly declaring protocol conformance to RawRepresentable in MyClass.swift resolves the inline error for bar, but results in a linker error about duplicate symbols (because raw-value type enums implicitly conform to RawRepresentable).
extension MyEnum: RawRepresentable {}
Can anyone provide a little more insight into what's going on here? Why isn't the raw-value initializer accessible?
This bug is solved in Xcode 7 and Swift 2
extension TemplateSlotType {
init?(rawString: String) {
// Check if string contains 'carrousel'
if rawString.rangeOfString("carrousel") != nil {
self.init(rawValue:"carrousel")
} else {
self.init(rawValue:rawString)
}
}
}
In your case this would result in the following extension:
extension MyEnum {
init?(string: String) {
switch string.lowercaseString {
case "zero":
self.init(rawValue:0)
case "one":
self.init(rawValue:1)
case "two":
self.init(rawValue:2)
default:
return nil
}
}
}
You can even make the code simpler and useful without switch cases, this way you don't need to add more cases when you add a new type.
enum VehicleType: Int, CustomStringConvertible {
case car = 4
case moped = 2
case truck = 16
case unknown = -1
// MARK: - Helpers
public var description: String {
switch self {
case .car: return "Car"
case .truck: return "Truck"
case .moped: return "Moped"
case .unknown: return "unknown"
}
}
static let all: [VehicleType] = [car, moped, truck]
init?(rawDescription: String) {
guard let type = VehicleType.all.first(where: { description == rawDescription })
else { return nil }
self = type
}
}
Yeah this is an annoying issue. I'm currently working around it using a global-scope function that acts as a factory, i.e.
func enumFromString(string:String) -> MyEnum? {
switch string {
case "One" : MyEnum(rawValue:1)
case "Two" : MyEnum(rawValue:2)
case "Three" : MyEnum(rawValue:3)
default : return nil
}
}
This works for Swift 4 on Xcode 9.2 together with my EnumSequence:
enum Word: Int, EnumSequenceElement, CustomStringConvertible {
case apple, cat, fun
var description: String {
switch self {
case .apple:
return "Apple"
case .cat:
return "Cat"
case .fun:
return "Fun"
}
}
}
let Words: [String: Word] = [
"A": .apple,
"C": .cat,
"F": .fun
]
extension Word {
var letter: String? {
return Words.first(where: { (_, word) -> Bool in
word == self
})?.key
}
init?(_ letter: String) {
if let word = Words[letter] {
self = word
} else {
return nil
}
}
}
for word in EnumSequence<Word>() {
if let letter = word.letter, let lhs = Word(letter), let rhs = Word(letter), lhs == rhs {
print("\(letter) for \(word)")
}
}
Output
A for Apple
C for Cat
F for Fun
Add this to your code:
extension MyEnum {
init?(rawValue: Int) {
switch rawValue {
case 0: self = .Zero
case 1: self = .One
case 2: self = .Two
default: return nil
}
}
}