I am trying to use hex color values in Swift, instead of the few standard ones that UIColor
allows you to use, but I have no idea how to do it.
Example: how would I use #ffffff
as a color?
#ffffff
are actually 3 color components in hexadecimal notation - red ff
, green ff
and blue ff
. You can write hexadecimal notation in Swift using 0x
prefix, e.g 0xFF
To simplify the conversion, let's create an initializer that takes integer (0 - 255) values:
extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")
self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}
convenience init(rgb: Int) {
self.init(
red: (rgb >> 16) & 0xFF,
green: (rgb >> 8) & 0xFF,
blue: rgb & 0xFF
)
}
}
Usage:
let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF)
let color2 = UIColor(rgb: 0xFFFFFF)
How to get alpha?
Depending on your use case, you can simply use the native UIColor.withAlphaComponent
method, e.g.
let semitransparentBlack = UIColor(rgb: 0x000000).withAlphaComponent(0.5)
Or you can add an additional (optional) parameter to the above methods:
convenience init(red: Int, green: Int, blue: Int, a: CGFloat = 1.0) {
self.init(
red: CGFloat(red) / 255.0,
green: CGFloat(green) / 255.0,
blue: CGFloat(blue) / 255.0,
alpha: a
)
}
convenience init(rgb: Int, a: CGFloat = 1.0) {
self.init(
red: (rgb >> 16) & 0xFF,
green: (rgb >> 8) & 0xFF,
blue: rgb & 0xFF,
a: a
)
}
(we cannot name the parameter alpha
because of a name collision with the existing initializer).
Called as:
let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0.5)
let color2 = UIColor(rgb: 0xFFFFFF, a: 0.5)
To get the alpha as an integer 0-255, we can
convenience init(red: Int, green: Int, blue: Int, a: Int = 0xFF) {
self.init(
red: CGFloat(red) / 255.0,
green: CGFloat(green) / 255.0,
blue: CGFloat(blue) / 255.0,
alpha: CGFloat(a) / 255.0
)
}
// let's suppose alpha is the first component (ARGB)
convenience init(argb: Int) {
self.init(
red: (argb >> 16) & 0xFF,
green: (argb >> 8) & 0xFF,
blue: argb & 0xFF,
a: (argb >> 24) & 0xFF
)
}
Called as
let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0xFF)
let color2 = UIColor(argb: 0xFFFFFFFF)
Or a combination of the previous methods. There is absolutely no need to use strings.
This is a function that takes a hex string and returns a UIColor.
(You can enter hex strings with either format: #ffffff
or ffffff
)
Usage:
var color1 = hexStringToUIColor("#d3d3d3")
Swift 5: (Swift 4+)
func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) != 6) {
return UIColor.gray
}
var rgbValue:UInt64 = 0
Scanner(string: cString).scanHexInt64(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
Swift 3:
func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.characters.count) != 6) {
return UIColor.gray
}
var rgbValue:UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
Swift 2:
func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet() as NSCharacterSet).uppercaseString
if (cString.hasPrefix("#")) {
cString = cString.substringFromIndex(cString.startIndex.advancedBy(1))
}
if ((cString.characters.count) != 6) {
return UIColor.grayColor()
}
var rgbValue:UInt32 = 0
NSScanner(string: cString).scanHexInt(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
Source: arshad/gist:de147c42d7b3063ef7bc
Edit: Updated the code. Thanks, Hlung, jaytrixz, Ahmad F, Kegham K, and Adam Waite!
Swift 5 (Swift 4, Swift 3) UIColor extension:
extension UIColor {
convenience init(hexString: String) {
let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt64()
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (255, 0, 0, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
Usage:
let darkGrey = UIColor(hexString: "#757575")
Swift 2.x version:
extension UIColor {
convenience init(hexString: String) {
let hex = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
var int = UInt32()
NSScanner(string: hex).scanHexInt(&int)
let a, r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (255, 0, 0, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
UIColor
:
extension UIColor {
convenience init(hex: Int) {
let components = (
R: CGFloat((hex >> 16) & 0xff) / 255,
G: CGFloat((hex >> 08) & 0xff) / 255,
B: CGFloat((hex >> 00) & 0xff) / 255
)
self.init(red: components.R, green: components.G, blue: components.B, alpha: 1)
}
}
CGColor
:
extension CGColor {
class func colorWithHex(hex: Int) -> CGColorRef {
return UIColor(hex: hex).CGColor
}
}
Usage
let purple = UIColor(hex: 0xAB47BC)
Swift 4 : Combining the answers of Sulthan and Luca Torella :
extension UIColor {
convenience init(hexFromString:String, alpha:CGFloat = 1.0) {
var cString:String = hexFromString.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
var rgbValue:UInt32 = 10066329 //color #999999 if string has wrong format
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) == 6) {
Scanner(string: cString).scanHexInt32(&rgbValue)
}
self.init(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha
)
}
}
Usage examples:
let myColor = UIColor(hexFromString: "4F9BF5")
let myColor = UIColor(hexFromString: "#4F9BF5")
let myColor = UIColor(hexFromString: "#4F9BF5", alpha: 0.5)
I've merged a few ideas from this thread of answers and updated it for iOS 13 & Swift 5.
extension UIColor {
convenience init(_ hex: String, alpha: CGFloat = 1.0) {
var cString = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if cString.hasPrefix("#") { cString.removeFirst() }
if cString.count != 6 {
self.init("ff0000") // return red color for wrong hex input
return
}
var rgbValue: UInt64 = 0
Scanner(string: cString).scanHexInt64(&rgbValue)
self.init(red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha)
}
}
You can then use it like this:
UIColor("#ff0000") // with #
UIColor("ff0000") // without #
UIColor("ff0000", alpha: 0.5) // using optional alpha value
The simplest way to add color programmatically is by using ColorLiteral.
Just add the property ColorLiteral as shown in the example, Xcode will prompt you with a whole list of colors which you can choose. The advantage of doing so is lesser code, add HEX values or RGB. You will also get the recently used colors from the storyboard.
With Swift 2.0 and Xcode 7.0.1 you can create this function:
// Creates a UIColor from a Hex string.
func colorWithHexString (hex:String) -> UIColor {
var cString:String = hex.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet()).uppercaseString
if (cString.hasPrefix("#")) {
cString = (cString as NSString).substringFromIndex(1)
}
if (cString.characters.count != 6) {
return UIColor.grayColor()
}
let rString = (cString as NSString).substringToIndex(2)
let gString = ((cString as NSString).substringFromIndex(2) as NSString).substringToIndex(2)
let bString = ((cString as NSString).substringFromIndex(4) as NSString).substringToIndex(2)
var r:CUnsignedInt = 0, g:CUnsignedInt = 0, b:CUnsignedInt = 0;
NSScanner(string: rString).scanHexInt(&r)
NSScanner(string: gString).scanHexInt(&g)
NSScanner(string: bString).scanHexInt(&b)
return UIColor(red: CGFloat(r) / 255.0, green: CGFloat(g) / 255.0, blue: CGFloat(b) / 255.0, alpha: CGFloat(1))
}
and then use it in this way:
let color1 = colorWithHexString("#1F437C")
Updated For Swift 4
func colorWithHexString (hex:String) -> UIColor {
var cString = hex.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString = (cString as NSString).substring(from: 1)
}
if (cString.characters.count != 6) {
return UIColor.gray
}
let rString = (cString as NSString).substring(to: 2)
let gString = ((cString as NSString).substring(from: 2) as NSString).substring(to: 2)
let bString = ((cString as NSString).substring(from: 4) as NSString).substring(to: 2)
var r:CUnsignedInt = 0, g:CUnsignedInt = 0, b:CUnsignedInt = 0;
Scanner(string: rString).scanHexInt32(&r)
Scanner(string: gString).scanHexInt32(&g)
Scanner(string: bString).scanHexInt32(&b)
return UIColor(red: CGFloat(r) / 255.0, green: CGFloat(g) / 255.0, blue: CGFloat(b) / 255.0, alpha: CGFloat(1))
}
Warning "'scanHexInt32' was deprecated in iOS 13.0" was fixed.
The sample should work on Swift2.2 and above(Swift2.x, Swift3.x, Swift4.x, Swift5.x):
extension UIColor {
// hex sample: 0xf43737
convenience init(_ hex: Int, alpha: Double = 1.0) {
self.init(red: CGFloat((hex >> 16) & 0xFF) / 255.0, green: CGFloat((hex >> 8) & 0xFF) / 255.0, blue: CGFloat((hex) & 0xFF) / 255.0, alpha: CGFloat(255 * alpha) / 255)
}
convenience init(_ hexString: String, alpha: Double = 1.0) {
let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt64()
Scanner(string: hex).scanHexInt64(&int)
let r, g, b: UInt64
switch hex.count {
case 3: // RGB (12-bit)
(r, g, b) = ((int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(r, g, b) = (int >> 16, int >> 8 & 0xFF, int & 0xFF)
default:
(r, g, b) = (1, 1, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(255 * alpha) / 255)
}
convenience init(r: CGFloat, g: CGFloat, b: CGFloat, a: CGFloat = 1) {
self.init(red: (r / 255), green: (g / 255), blue: (b / 255), alpha: a)
}
}
Use them like below:
UIColor(0xF54A45)
UIColor(0xF54A45, alpha: 0.7)
UIColor("#f44")
UIColor("#f44", alpha: 0.7)
UIColor("#F54A45")
UIColor("#F54A45", alpha: 0.7)
UIColor("F54A45")
UIColor("F54A45", alpha: 0.7)
UIColor(r: 245.0, g: 73, b: 69)
UIColor(r: 245.0, g: 73, b: 69, a: 0.7)
This answer shows how to do it in Obj-C. The bridge is to use
let rgbValue = 0xFFEEDD
let r = Float((rgbValue & 0xFF0000) >> 16)/255.0
let g = Float((rgbValue & 0xFF00) >> 8)/255.0
let b = Float((rgbValue & 0xFF))/255.0
self.backgroundColor = UIColor(red:r, green: g, blue: b, alpha: 1.0)
Here's what I'm using. Works with 6 and 8 character color strings, with or without the # symbol. Defaults to black in release and crashes in debug when initialized with an invalid string.
extension UIColor {
public convenience init(hex: String) {
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 1
let hexColor = hex.replacingOccurrences(of: "#", with: "")
let scanner = Scanner(string: hexColor)
var hexNumber: UInt64 = 0
var valid = false
if scanner.scanHexInt64(&hexNumber) {
if hexColor.count == 8 {
r = CGFloat((hexNumber & 0xff000000) >> 24) / 255
g = CGFloat((hexNumber & 0x00ff0000) >> 16) / 255
b = CGFloat((hexNumber & 0x0000ff00) >> 8) / 255
a = CGFloat(hexNumber & 0x000000ff) / 255
valid = true
}
else if hexColor.count == 6 {
r = CGFloat((hexNumber & 0xff0000) >> 16) / 255
g = CGFloat((hexNumber & 0x00ff00) >> 8) / 255
b = CGFloat(hexNumber & 0x0000ff) / 255
valid = true
}
}
#if DEBUG
assert(valid, "UIColor initialized with invalid hex string")
#endif
self.init(red: r, green: g, blue: b, alpha: a)
}
}
Usage:
UIColor(hex: "#75CC83FF")
UIColor(hex: "75CC83FF")
UIColor(hex: "#75CC83")
UIColor(hex: "75CC83")
Another method
Swift 3.0
Write a extension for UIColor
// To change the HexaDecimal value to Corresponding Color
extension UIColor
{
class func uicolorFromHex(_ rgbValue:UInt32, alpha : CGFloat)->UIColor
{
let red = CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0
let green = CGFloat((rgbValue & 0xFF00) >> 8) / 255.0
let blue = CGFloat(rgbValue & 0xFF) / 255.0
return UIColor(red:red, green:green, blue:blue, alpha: alpha)
}
}
you can directly create UIColor with hex like this
let carrot = UIColor.uicolorFromHex(0xe67e22, alpha: 1))
Here's a Swift extension on UIColor
that takes a hex string:
import UIKit
extension UIColor {
convenience init(hexString: String) {
// Trim leading '#' if needed
var cleanedHexString = hexString
if hexString.hasPrefix("#") {
// cleanedHexString = dropFirst(hexString) // Swift 1.2
cleanedHexString = String(hexString.characters.dropFirst()) // Swift 2
}
// String -> UInt32
var rgbValue: UInt32 = 0
NSScanner(string: cleanedHexString).scanHexInt(&rgbValue)
// UInt32 -> R,G,B
let red = CGFloat((rgbValue >> 16) & 0xff) / 255.0
let green = CGFloat((rgbValue >> 08) & 0xff) / 255.0
let blue = CGFloat((rgbValue >> 00) & 0xff) / 255.0
self.init(red: red, green: green, blue: blue, alpha: 1.0)
}
}
Latest swift3 Version
extension UIColor {
convenience init(hexString: String) {
let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (255, 0, 0, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
Use in your class or where ever you converted into hexcolor to uicolor like in this way
let color1 = UIColor(hexString: "#FF323232")
public static func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.characters.count) == 6) {
var rgbValue:UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}else if ((cString.characters.count) == 8) {
var rgbValue:UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0x00FF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x0000FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x000000FF) / 255.0,
alpha: CGFloat((rgbValue & 0xFF000000) >> 24) / 255.0
)
}else{
return UIColor.gray
}
}
How to use
var color: UIColor = hexStringToUIColor(hex: "#00ff00"); // Without transparency
var colorWithTransparency: UIColor = hexStringToUIColor(hex: "#dd00ff00"); // With transparency
Based on Eduardo answer
import UIKit
extension UIColor {
convenience init(r: UInt8, g: UInt8, b: UInt8, alpha: CGFloat = 1.0) {
let divider: CGFloat = 255.0
self.init(red: CGFloat(r)/divider, green: CGFloat(g)/divider, blue: CGFloat(b)/divider, alpha: alpha)
}
private convenience init(rgbWithoutValidation value: Int32, alpha: CGFloat = 1.0) {
self.init(
r: UInt8((value & 0xFF0000) >> 16),
g: UInt8((value & 0x00FF00) >> 8),
b: UInt8(value & 0x0000FF),
alpha: alpha
)
}
convenience init?(rgb: Int32, alpha: CGFloat = 1.0) {
if rgb > 0xFFFFFF || rgb < 0 { return nil }
self.init(rgbWithoutValidation: rgb, alpha: alpha)
}
convenience init?(hex: String, alpha: CGFloat = 1.0) {
var charSet = CharacterSet.whitespacesAndNewlines
charSet.insert("#")
let _hex = hex.trimmingCharacters(in: charSet)
guard _hex.range(of: "^[0-9A-Fa-f]{6}$", options: .regularExpression) != nil else { return nil }
var rgb: UInt32 = 0
Scanner(string: _hex).scanHexInt32(&rgb)
self.init(rgbWithoutValidation: Int32(rgb), alpha: alpha)
}
}
let alpha: CGFloat = 1.0
// Hex
print(UIColor(rgb: 0x4F9BF5) ?? "nil")
print(UIColor(rgb: 0x4F9BF5, alpha: alpha) ?? "nil")
print(UIColor(rgb: 5217269) ?? "nil")
print(UIColor(rgb: -5217269) ?? "nil") // = nil
print(UIColor(rgb: 0xFFFFFF1) ?? "nil") // = nil
// String
print(UIColor(hex: "4F9BF5") ?? "nil")
print(UIColor(hex: "4F9BF5", alpha: alpha) ?? "nil")
print(UIColor(hex: "#4F9BF5") ?? "nil")
print(UIColor(hex: "#4F9BF5", alpha: alpha) ?? "nil")
print(UIColor(hex: "#4F9BF56") ?? "nil") // = nil
print(UIColor(hex: "#blabla") ?? "nil") // = nil
// RGB
print(UIColor(r: 79, g: 155, b: 245))
print(UIColor(r: 79, g: 155, b: 245, alpha: alpha))
//print(UIColor(r: 792, g: 155, b: 245, alpha: alpha)) // Compiler will throw an error, r,g,b = [0...255]
Example:
let myColor = Color(hex:0xF2C94C)
Code:
import Foundation
import SwiftUI
extension UIColor {
convenience init(hex: Int) {
let components = (
R: CGFloat((hex >> 16) & 0xff) / 255,
G: CGFloat((hex >> 08) & 0xff) / 255,
B: CGFloat((hex >> 00) & 0xff) / 255
)
self.init(red: components.R, green: components.G, blue: components.B, alpha: 1)
}
}
extension Color {
public init(hex: Int) {
self.init(UIColor(hex: hex))
}
}
You can use it in swift 5
SWIFT 5
import UIKit
extension UIColor {
static func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) != 6) {
return UIColor.gray
}
var rgbValue:UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
Swift 2.0:
In viewDidLoad()
var viewColor:UIColor
viewColor = UIColor()
let colorInt:UInt
colorInt = 0x000000
viewColor = UIColorFromRGB(colorInt)
self.View.backgroundColor=viewColor
func UIColorFromRGB(rgbValue: UInt) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
extension UIColor{
/// Converting hex string to UIColor
///
/// - Parameter hexString: input hex string
convenience init(hexString: String) {
let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt64()
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3:
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6:
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8:
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (255, 0, 0, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
Call using UIColor(hexString: "your hex string")
iOS 14, SwiftUI 2.0, swift 5.1, Xcode beta12
extension Color {
static func hexColour(hexValue:UInt32)->Color
{
let red = Double((hexValue & 0xFF0000) >> 16) / 255.0
let green = Double((hexValue & 0xFF00) >> 8) / 255.0
let blue = Double(hexValue & 0xFF) / 255.0
return Color(red:red, green:green, blue:blue)
}
}
You call it with a hex number
let red = Color.hexColour(hexValue: 0xFF0000)
There are 7 hex color formats: ""#FF0000","0xFF0000", "FF0000", "F00", "red", 0x00FF00 , 16711935
NSColorParser.nsColor("#FF0000",1)//red nsColor
NSColorParser.nsColor("FF0",1)//red nsColor
NSColorParser.nsColor("0xFF0000",1)//red nsColor
NSColorParser.nsColor("#FF0000",1)//red nsColor
NSColorParser.nsColor("FF0000",1)//red nsColor
NSColorParser.nsColor(0xFF0000,1)//red nsColor
NSColorParser.nsColor(16711935,1)//red nsColor
CAUTION: This isn't a "one-file-solution", there are some dependencies, but hunting them down may be faster than researching this from scratch.
Permalink:
https://github.com/eonist/Element/wiki/Progress#supporting-7-hex-color-types
Swift 2.0
The code below is tested on xcode 7.2
import UIKit
extension UIColor{
public convenience init?(colorCodeInHex: String, alpha: Float = 1.0){
var filterColorCode:String = colorCodeInHex.stringByReplacingOccurrencesOfString("#", withString: "")
if filterColorCode.characters.count != 6 {
self.init(red: 0.0, green: 0.0, blue: 0.0, alpha: CGFloat(alpha))
return
}
filterColorCode = filterColorCode.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet()).uppercaseString
var range = Range(start: filterColorCode.startIndex.advancedBy(0), end: filterColorCode.startIndex.advancedBy(2))
let rString = filterColorCode.substringWithRange(range)
range = Range(start: filterColorCode.startIndex.advancedBy(2), end: filterColorCode.startIndex.advancedBy(4))
let gString = filterColorCode.substringWithRange(range)
range = Range(start: filterColorCode.startIndex.advancedBy(4), end: filterColorCode.startIndex.advancedBy(6))
let bString = filterColorCode.substringWithRange(range)
var r:CUnsignedInt = 0, g:CUnsignedInt = 0, b:CUnsignedInt = 0;
NSScanner(string: rString).scanHexInt(&r)
NSScanner(string: gString).scanHexInt(&g)
NSScanner(string: bString).scanHexInt(&b)
self.init(red: CGFloat(r) / 255.0, green: CGFloat(g) / 255.0, blue: CGFloat(b) / 255.0, alpha: CGFloat(alpha))
return
}
}
Swift 2.0:
Make an extension of UIColor.
extension UIColor {
convenience init(hexString:String) {
let hexString:NSString = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet())
let scanner = NSScanner(string: hexString as String)
if (hexString.hasPrefix("#")) {
scanner.scanLocation = 1
}
var color:UInt32 = 0
scanner.scanHexInt(&color)
let mask = 0x000000FF
let r = Int(color >> 16) & mask
let g = Int(color >> 8) & mask
let b = Int(color) & mask
let red = CGFloat(r) / 255.0
let green = CGFloat(g) / 255.0
let blue = CGFloat(b) / 255.0
self.init(red:red, green:green, blue:blue, alpha:1)
}
func toHexString() -> String {
var r:CGFloat = 0
var g:CGFloat = 0
var b:CGFloat = 0
var a:CGFloat = 0
getRed(&r, green: &g, blue: &b, alpha: &a)
let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0
return NSString(format:"#%06x", rgb) as String
}
}
Usage:
//Hex to Color
let countPartColor = UIColor(hexString: "E43038")
//Color to Hex
let colorHexString = UIColor(red: 228, green: 48, blue: 56, alpha: 1.0).toHexString()
For swift 3
extension String {
var hexColor: UIColor {
let hex = trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
return .clear
}
return UIColor(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
You can use this extension on UIColor which converts Your String (Hexadecimal , RGBA) to UIColor and vice versa.
extension UIColor {
//Convert RGBA String to UIColor object
//"rgbaString" must be separated by space "0.5 0.6 0.7 1.0" 50% of Red 60% of Green 70% of Blue Alpha 100%
public convenience init?(rgbaString : String){
self.init(ciColor: CIColor(string: rgbaString))
}
//Convert UIColor to RGBA String
func toRGBAString()-> String {
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 0
self.getRed(&r, green: &g, blue: &b, alpha: &a)
return "\(r) \(g) \(b) \(a)"
}
//return UIColor from Hexadecimal Color string
public convenience init?(hexString: String) {
let r, g, b, a: CGFloat
if hexString.hasPrefix("#") {
let start = hexString.index(hexString.startIndex, offsetBy: 1)
let hexColor = hexString.substring(from: start)
if hexColor.characters.count == 8 {
let scanner = Scanner(string: hexColor)
var hexNumber: UInt64 = 0
if scanner.scanHexInt64(&hexNumber) {
r = CGFloat((hexNumber & 0xff000000) >> 24) / 255
g = CGFloat((hexNumber & 0x00ff0000) >> 16) / 255
b = CGFloat((hexNumber & 0x0000ff00) >> 8) / 255
a = CGFloat(hexNumber & 0x000000ff) / 255
self.init(red: r, green: g, blue: b, alpha: a)
return
}
}
}
return nil
}
// Convert UIColor to Hexadecimal String
func toHexString() -> String {
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 0
self.getRed(&r, green: &g, blue: &b, alpha: &a)
return String(
format: "%02X%02X%02X",
Int(r * 0xff),
Int(g * 0xff),
Int(b * 0xff))
}
}
UIColor extension, This will greatly help you! (version:Swift 4.0)
import UIKit
extension UIColor {
/// rgb颜色
convenience init(r: CGFloat, g: CGFloat, b: CGFloat) {
self.init(red: r/255.0 ,green: g/255.0 ,blue: b/255.0 ,alpha:1.0)
}
/// 纯色(用于灰色)
convenience init(gray: CGFloat) {
self.init(red: gray/255.0 ,green: gray/255.0 ,blue: gray/255.0 ,alpha:1.0)
}
/// 随机色
class func randomCGColor() -> UIColor {
return UIColor(r: CGFloat(arc4random_uniform(256)), g: CGFloat(arc4random_uniform(256)), b: CGFloat(arc4random_uniform(256)))
}
/// hex颜色-Int
convenience init(hex:Int, alpha:CGFloat = 1.0) {
self.init(
red: CGFloat((hex & 0xFF0000) >> 16) / 255.0,
green: CGFloat((hex & 0x00FF00) >> 8) / 255.0,
blue: CGFloat((hex & 0x0000FF) >> 0) / 255.0,
alpha: alpha
)
}
/// hex颜色-String
convenience init(hexString: String){
var red: CGFloat = 0.0
var green: CGFloat = 0.0
var blue: CGFloat = 0.0
var alpha: CGFloat = 1.0
let scanner = Scanner(string: hexString)
var hexValue: CUnsignedLongLong = 0
if scanner.scanHexInt64(&hexValue) {
switch (hexString.characters.count) {
case 3:
red = CGFloat((hexValue & 0xF00) >> 8) / 15.0
green = CGFloat((hexValue & 0x0F0) >> 4) / 15.0
blue = CGFloat(hexValue & 0x00F) / 15.0
case 4:
red = CGFloat((hexValue & 0xF000) >> 12) / 15.0
green = CGFloat((hexValue & 0x0F00) >> 8) / 15.0
blue = CGFloat((hexValue & 0x00F0) >> 4) / 15.0
alpha = CGFloat(hexValue & 0x000F) / 15.0
case 6:
red = CGFloat((hexValue & 0xFF0000) >> 16) / 255.0
green = CGFloat((hexValue & 0x00FF00) >> 8) / 255.0
blue = CGFloat(hexValue & 0x0000FF) / 255.0
case 8:
alpha = CGFloat((hexValue & 0xFF000000) >> 24) / 255.0
red = CGFloat((hexValue & 0x00FF0000) >> 16) / 255.0
green = CGFloat((hexValue & 0x0000FF00) >> 8) / 255.0
blue = CGFloat(hexValue & 0x000000FF) / 255.0
default:
log.info("Invalid RGB string, number of characters after '#' should be either 3, 4, 6 or 8")
}
} else {
log.error("Scan hex error")
}
self.init(red:red, green:green, blue:blue, alpha:alpha)
}}
I like @Luca's answer as i think it's the most elegant.
However I don't want my colours specified in ARGB. I'd rather RGBA + also i needed to hack in the case of dealing with strings that specify 1 character for each of the channels "#FFFA".
This version also adds error throwing + strips the '#' character if it's included in the string. Here is my modified form for Swift.
public enum ColourParsingError: Error
{
case invalidInput(String)
}
extension UIColor {
public convenience init(hexString: String) throws
{
let hexString = hexString.replacingOccurrences(of: "#", with: "")
let hex = hexString.trimmingCharacters(in:NSCharacterSet.alphanumerics.inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.count
{
case 3: // RGB (12-bit)
(r, g, b,a) = ((int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17,255)
//iCSS specification in the form of #F0FA
case 4: // RGB (24-bit)
(r, g, b,a) = ((int >> 12) * 17, (int >> 8 & 0xF) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(r, g, b, a) = (int >> 16, int >> 8 & 0xFF, int & 0xFF,255)
case 8: // ARGB (32-bit)
(r, g, b, a) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
throw ColourParsingError.invalidInput("String is not a valid hex colour string: \(hexString)")
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
#ffffff
are actually 3 color components in hexadecimal notation - redff
, greenff
and blueff
. You can write hexadecimal notation in Swift using0x
prefix, e.g0xFF
. – Sulthan