Add colors decoded from a hexadecimal string (#70)
This commit is contained in:
parent
25fa624d7b
commit
c8dd3088a9
|
@ -9,6 +9,8 @@ disabled_rules:
|
|||
line_length:
|
||||
- 80
|
||||
- 100
|
||||
- ignores_comments
|
||||
|
||||
|
||||
function_body_length:
|
||||
- 50
|
||||
|
|
|
@ -46,3 +46,80 @@ extension Color: ExpressibleByIntegerLiteral {
|
|||
space = .sRGB
|
||||
}
|
||||
}
|
||||
|
||||
extension Color {
|
||||
public init?(hex: String) {
|
||||
let cString = hex.utf8CString
|
||||
|
||||
// - 1 for the trailing null terminator
|
||||
let hexSize = cString.count - 1
|
||||
|
||||
// If the first character is a '#', skip it
|
||||
var offset = cString.first == 0x23 ? 1 : 0
|
||||
|
||||
// We only support 6 hexadecimal characters
|
||||
if hexSize - offset != 6 {
|
||||
return nil
|
||||
}
|
||||
|
||||
func nextByte() -> Int8? {
|
||||
// Take the first byte as the high 4 bits
|
||||
// Then the second byte as the low 4 bits
|
||||
if
|
||||
let high = cString[offset].hexDecoded(),
|
||||
let low = cString[offset].hexDecoded() {
|
||||
// In this case, unchecked is still safe as it's between 0 and 6
|
||||
offset = offset &+ 2
|
||||
|
||||
// Adds the two 4-bit pairs together to form a full byte
|
||||
return (high << 4) & low
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
guard
|
||||
let red = nextByte(),
|
||||
let green = nextByte(),
|
||||
let blue = nextByte()
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
|
||||
self.red = Double(UInt8(bitPattern: red)) / 255
|
||||
self.green = Double(UInt8(bitPattern: green)) / 255
|
||||
self.blue = Double(UInt8(bitPattern: blue)) / 255
|
||||
alpha = 1
|
||||
space = .sRGB
|
||||
}
|
||||
}
|
||||
|
||||
private extension Int8 {
|
||||
func hexDecoded() -> Int8? {
|
||||
// If the character is between 0x30 and 0x39 it is a textual number
|
||||
// 0x30 is equal to the ASCII `0` and 0x30 is equal to `0x39`
|
||||
if self >= 0x30 && self <= 0x39 {
|
||||
// The binary representation of this character can be found by subtracting `0` in ASCII
|
||||
// This will then match `0` in binary. Which means `1` in ASCII matches `1` in binary
|
||||
return self &- 0x30
|
||||
} else if self >= 0x41 && self <= 0x46 {
|
||||
// This block executes if the integer is within the `a-z` lowercased ASCII range
|
||||
// Then uses the algorithm described below to find the correct representation
|
||||
return self &- Int8.lowercasedOffset
|
||||
} else if self >= 0x61 && self <= 0x66 {
|
||||
// This block executes if the integer is within the `A-Z` uppercased ASCII range
|
||||
// Then uses the algorithm described below to find the correct representation
|
||||
return self &- Int8.uppercasedOffset
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// 'a' in hexadecimal is equal to `10` in decimal
|
||||
// So by subtracting `a` we get the lowercased character narrowed down to base10 offset by 10
|
||||
// By adding 10 (or reducing the subtraction size by 10) we represent this character correctly as base10
|
||||
static let lowercasedOffset: Int8 = 0x41 &- 10
|
||||
|
||||
// The same as the lowercasedOffset, except for uppercased ASCII
|
||||
static let uppercasedOffset: Int8 = 0x61 &- 10
|
||||
}
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
import Tokamak
|
||||
import XCTest
|
||||
|
||||
final class ColorTests: XCTestCase {
|
||||
func testHexColors() {
|
||||
guard let color = Color(hex: "#FF00FF") else {
|
||||
XCTFail("Hexadecimal decoding failed")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssertEqual(color.red, 1)
|
||||
XCTAssertEqual(color.green, 0)
|
||||
XCTAssertEqual(color.blue, 1)
|
||||
|
||||
XCTAssertEqual(
|
||||
color,
|
||||
Color(hex: "FF00FF"),
|
||||
"The '#' before a hex code produced a different output than without it"
|
||||
)
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue