我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。

示例:我如何使用#ffffff作为颜色?


当前回答

Swift 5.3 & SwiftUI:通过UIColor支持十六进制和CSS颜色名称

依据代码

斯威夫特包

SwiftUI包

字符串的例子:

橙子、酸橙、番茄等。 Clear, Transparent, nil和空字符串产生[UIColor clearColor] 美国广播公司 abc7 # abc7 00飞行符 # 00飞行符 00 ffff77

操场上的输出:

其他回答

Swift 4:结合Sulthan和Luca Torella的回答:

extension UIColor {
    convenience init(hexFromString:String, alpha:CGFloat = 1.0) {
        var cString:String = hexFromString.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
        var rgbValue:UInt32 = 10066329 //color #999999 if string has wrong format

        if (cString.hasPrefix("#")) {
            cString.remove(at: cString.startIndex)
        }

        if ((cString.count) == 6) {
            Scanner(string: cString).scanHexInt32(&rgbValue)
        }

        self.init(
            red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
            green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
            blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
            alpha: alpha
        )
    }
}

使用例子:

let myColor = UIColor(hexFromString: "4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5", alpha: 0.5)

最新swift3版本

        extension UIColor {
convenience init(hexString: String) {
    let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
    var int = UInt32()
    Scanner(string: hex).scanHexInt32(&int)
    let a, r, g, b: UInt32
    switch hex.characters.count {
    case 3: // RGB (12-bit)
        (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
    case 6: // RGB (24-bit)
        (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
    case 8: // ARGB (32-bit)
        (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
    default:
        (a, r, g, b) = (255, 0, 0, 0)
    }
      self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue:      CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}

在你的类或任何你把hexcolor转换为uicolor的地方使用这种方法

             let color1 = UIColor(hexString: "#FF323232")

iOS 14, SwiftUI 2.0, swift 5.1, Xcode beta12

extension Color {
  static func hexColour(hexValue:UInt32)->Color
    {
      let red = Double((hexValue & 0xFF0000) >> 16) / 255.0
      let green = Double((hexValue & 0xFF00) >> 8) / 255.0
      let blue = Double(hexValue & 0xFF) / 255.0
      return Color(red:red, green:green, blue:blue)
    }
}

用十六进制数表示

let red = Color.hexColour(hexValue: 0xFF0000)

对于swift 3

extension String {
    var hexColor: UIColor {        
        let hex = trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt32()       
        Scanner(string: hex).scanHexInt32(&int)
        let a, r, g, b: UInt32
        switch hex.characters.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            return .clear
        }
        return UIColor(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}
extension UIColor {

    convenience init(r: CGFloat, g: CGFloat, b: CGFloat, a: CGFloat = 1) {
        self.init(red: r/255, green: g/255, blue: b/255, alpha: a)
    }

    convenience init(hex: Int, alpha: CGFloat = 1) {
        self.init(r: CGFloat((hex >> 16) & 0xff), g: CGFloat((hex >> 08) & 0xff), b: CGFloat((hex >> 00) & 0xff), a: alpha)
    }
}