我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。
示例:我如何使用#ffffff作为颜色?
我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。
示例:我如何使用#ffffff作为颜色?
当前回答
斯威夫特5
extension UIColor{
/// Converting hex string to UIColor
///
/// - Parameter hexString: input hex string
convenience init(hexString: String) {
let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt64()
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3:
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6:
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8:
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (255, 0, 0, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
使用UIColor调用(hexString: "你的十六进制字符串")
其他回答
我从这个回答中总结了一些想法,并针对iOS 13和Swift 5进行了更新。
extension UIColor {
convenience init(_ hex: String, alpha: CGFloat = 1.0) {
var cString = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if cString.hasPrefix("#") { cString.removeFirst() }
if cString.count != 6 {
self.init("ff0000") // return red color for wrong hex input
return
}
var rgbValue: UInt64 = 0
Scanner(string: cString).scanHexInt64(&rgbValue)
self.init(red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha)
}
}
然后你可以这样使用它:
UIColor("#ff0000") // with #
UIColor("ff0000") // without #
UIColor("ff0000", alpha: 0.5) // using optional alpha value
简单的颜色扩展Swift 5/SwiftUI
例子:
let myColor = Color(hex:0xF2C94C)
代码:
import Foundation
import SwiftUI
extension UIColor {
convenience init(hex: Int) {
let components = (
R: CGFloat((hex >> 16) & 0xff) / 255,
G: CGFloat((hex >> 08) & 0xff) / 255,
B: CGFloat((hex >> 00) & 0xff) / 255
)
self.init(red: components.R, green: components.G, blue: components.B, alpha: 1)
}
}
extension Color {
public init(hex: Int) {
self.init(UIColor(hex: hex))
}
}
Swift 4:结合Sulthan和Luca Torella的回答:
extension UIColor {
convenience init(hexFromString:String, alpha:CGFloat = 1.0) {
var cString:String = hexFromString.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
var rgbValue:UInt32 = 10066329 //color #999999 if string has wrong format
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) == 6) {
Scanner(string: cString).scanHexInt32(&rgbValue)
}
self.init(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha
)
}
}
使用例子:
let myColor = UIColor(hexFromString: "4F9BF5")
let myColor = UIColor(hexFromString: "#4F9BF5")
let myColor = UIColor(hexFromString: "#4F9BF5", alpha: 0.5)
这是我用的。适用于6和8字符的颜色字符串,带或不带#符号。在发布时默认为黑色,在调试时用无效字符串初始化时崩溃。
extension UIColor {
public convenience init(hex: String) {
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 1
let hexColor = hex.replacingOccurrences(of: "#", with: "")
let scanner = Scanner(string: hexColor)
var hexNumber: UInt64 = 0
var valid = false
if scanner.scanHexInt64(&hexNumber) {
if hexColor.count == 8 {
r = CGFloat((hexNumber & 0xff000000) >> 24) / 255
g = CGFloat((hexNumber & 0x00ff0000) >> 16) / 255
b = CGFloat((hexNumber & 0x0000ff00) >> 8) / 255
a = CGFloat(hexNumber & 0x000000ff) / 255
valid = true
}
else if hexColor.count == 6 {
r = CGFloat((hexNumber & 0xff0000) >> 16) / 255
g = CGFloat((hexNumber & 0x00ff00) >> 8) / 255
b = CGFloat(hexNumber & 0x0000ff) / 255
valid = true
}
}
#if DEBUG
assert(valid, "UIColor initialized with invalid hex string")
#endif
self.init(red: r, green: g, blue: b, alpha: a)
}
}
用法:
UIColor(hex: "#75CC83FF")
UIColor(hex: "75CC83FF")
UIColor(hex: "#75CC83")
UIColor(hex: "75CC83")
带验证的十六进制
根据爱德华多的回答
细节
Xcode 10.0, Swift 4.2 Xcode 10.2.1 (10E1001)
解决方案
import UIKit
extension UIColor {
convenience init(r: UInt8, g: UInt8, b: UInt8, alpha: CGFloat = 1.0) {
let divider: CGFloat = 255.0
self.init(red: CGFloat(r)/divider, green: CGFloat(g)/divider, blue: CGFloat(b)/divider, alpha: alpha)
}
private convenience init(rgbWithoutValidation value: Int32, alpha: CGFloat = 1.0) {
self.init(
r: UInt8((value & 0xFF0000) >> 16),
g: UInt8((value & 0x00FF00) >> 8),
b: UInt8(value & 0x0000FF),
alpha: alpha
)
}
convenience init?(rgb: Int32, alpha: CGFloat = 1.0) {
if rgb > 0xFFFFFF || rgb < 0 { return nil }
self.init(rgbWithoutValidation: rgb, alpha: alpha)
}
convenience init?(hex: String, alpha: CGFloat = 1.0) {
var charSet = CharacterSet.whitespacesAndNewlines
charSet.insert("#")
let _hex = hex.trimmingCharacters(in: charSet)
guard _hex.range(of: "^[0-9A-Fa-f]{6}$", options: .regularExpression) != nil else { return nil }
var rgb: UInt32 = 0
Scanner(string: _hex).scanHexInt32(&rgb)
self.init(rgbWithoutValidation: Int32(rgb), alpha: alpha)
}
}
使用
let alpha: CGFloat = 1.0
// Hex
print(UIColor(rgb: 0x4F9BF5) ?? "nil")
print(UIColor(rgb: 0x4F9BF5, alpha: alpha) ?? "nil")
print(UIColor(rgb: 5217269) ?? "nil")
print(UIColor(rgb: -5217269) ?? "nil") // = nil
print(UIColor(rgb: 0xFFFFFF1) ?? "nil") // = nil
// String
print(UIColor(hex: "4F9BF5") ?? "nil")
print(UIColor(hex: "4F9BF5", alpha: alpha) ?? "nil")
print(UIColor(hex: "#4F9BF5") ?? "nil")
print(UIColor(hex: "#4F9BF5", alpha: alpha) ?? "nil")
print(UIColor(hex: "#4F9BF56") ?? "nil") // = nil
print(UIColor(hex: "#blabla") ?? "nil") // = nil
// RGB
print(UIColor(r: 79, g: 155, b: 245))
print(UIColor(r: 79, g: 155, b: 245, alpha: alpha))
//print(UIColor(r: 792, g: 155, b: 245, alpha: alpha)) // Compiler will throw an error, r,g,b = [0...255]