In UIKit we could use an Extension to set hex color to almost everything, like in this tutorial. But when I'm trying to do it in SwiftUI, it's not possible, it looks like the SwiftUI does not get the UIColor
as parameter.
Text(text)
.color(UIColor.init(hex: "FFF"))
Error message:
Cannot convert value of type 'UIColor' to expected argument type 'Color?'
I even tried to make an extension for Color
, instead of UIColor
, but I haven't any luck.
My extension for Color
:
import SwiftUI
extension Color {
init(hex: String) {
let scanner = Scanner(string: hex)
scanner.scanLocation = 0
var rgbValue: UInt64 = 0
scanner.scanHexInt64(&rgbValue)
let r = (rgbValue & 0xff0000) >> 16
let g = (rgbValue & 0xff00) >> 8
let b = rgbValue & 0xff
self.init(
red: CGFloat(r) / 0xff,
green: CGFloat(g) / 0xff,
blue: CGFloat(b) / 0xff, alpha: 1
)
}
}
Error message:
Incorrect argument labels in call (have 'red:green:blue:alpha:', expected '_:red:green:blue:opacity:')
You're almost there, you were using the wrong initialiser parameter:
extension Color {
init(hex: String) {
let hex = hex.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int: UInt64 = 0
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (1, 1, 1, 0)
}
self.init(
.sRGB,
red: Double(r) / 255,
green: Double(g) / 255,
blue: Double(b) / 255,
opacity: Double(a) / 255
)
}
}