10
votes

I want to create an Integer-to-Hex function for all integer types.

For 1-byte Int8, it returns two letters, eg 0A

For 2-byte Int16, it returns four letters, eg 0A0B

for 8-byte Int64, it returns 16 letters, eg, 0102030405060708

func hex(v: Int) -> String {
    var s = ""
    var i = v
    for _ in 0..<sizeof(Int)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

func hex(v: Int64) -> String {
    var s = ""
    var i = v
    for _ in 0..<sizeof(Int64)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

func hex(v: Int32) -> String {
    var s = ""
    var i = v
    for _ in 0..<sizeof(Int32)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

func hex(v: Int16) -> String {
    var s = ""
    var i = v
    for _ in 0..<sizeof(Int16)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

func hex(v: Int8) -> String {
    var s = ""
    var i = v
    for _ in 0..<sizeof(Int8)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

The above code works fine.

I then tried to create a generic version like this:

func hex<T: IntegerType>(v: T) -> String {
    var s = ""
    var i = v
    for _ in 0..<sizeof(T)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

When compiling this code, I got the error: T is not convertible to Int

What is the correct way to achieve this task?

5
Why don't you just use String(radix:)? Does the result that it gives seem so wrong? Added an answer suggesting this. Unfortunately your question does not include test data and desired results so it is difficult to guess precisely what you're after. - matt
You should accept an answer... - gsamaras

5 Answers

3
votes

Very simple solution is to coalesce the input value into IntMax with .toIntMax().:

func hex<T: IntegerType>(v: T) -> String {
    var s = ""
    var i = v.toIntMax()
    for _ in 0..<sizeof(T)*2 {
        s = String(format: "%x", i & 0xF) + s
        i >>= 4
    }
    return s
}

Note: This works with only 0...Int64.max values.


But, I would do:

func hex<T: IntegerType>(v: T) -> String {
    return String(format:"%0\(sizeof(T) * 2)x", v.toIntMax())
}

Note: This works with only 0...UInt32.max values.


Added: This works with all available integer types/values.

func hex<T:IntegerType>(var v:T) -> String {
    var s = ""
    for _ in 0..<sizeof(T) * 2 {
        s = String(format: "%X", (v & 0xf).toIntMax()) + s
        v /= 16
    }
    return s
}
  • .toIntMax() to cast T to concrete integer type.
  • / 16 instead of >> 4.
2
votes

It isn't clear from your question why you are not using the built-in initializer that already does this for you:

let i = // some kind of integer
var s = String(i, radix:16)

If you don't like the resulting format of s, it is surely a lot easier to uppercase it and pad it out with extra characters than to go through all the work you're doing here.

2
votes

The problem here is that while >> is defined for all the integer types, IntegerType does not guarantee its presence. IntegerType conforms to IntegerArithmeticType, which gives you +, - etc., and BitwiseOperationsType, which gives you &, | etc. But it doesn't look like >> is in either of them.

Bit of a bodge, but you could extend the integers with a new protocol, let's say Shiftable, and then require that:

protocol Shiftable {
    func >>(lhs: Self, rhs: Self) -> Self
    // + other shifting operators
}

extension Int: Shiftable {
  // nothing actually needed here
}

extension Int16: Shiftable { } // etc

// still need IntegerType if you want to do other operations
// (or alternatively Shiftable could require IntegerType conformance)
func shiftIt<I: protocol<IntegerType, Shiftable>>(i: I) {
    println(i+1 >> 4)
}

shiftIt(5000)
shiftIt(5000 as Int16)

edit: oop, looks like similar troubles with String(format: ...), here is the best I could come up with:

edit2: as @rintaro ponts .toIntMax() is a way simpler solution to this, but it kinda takes the fun out of figuring out how to make it work totally generically :-)

func hex<T: protocol<IntegerType,Shiftable>>(v: T) -> String {

    // In creating this dictionary, the IntegerLiterals should
    // be converted to type T, which means you can use a type
    // T to look them up.  Hopefully the optimizer will only
    // run this code once per version of this function...
    let hexVals: [T:Character] = [
        0:"0", 1:"1", 2:"2", 3:"3", 4:"4",
        5:"5", 6:"6", 7:"7", 8:"8", 9:"9",
        10:"A", 11:"B", 12:"C", 13:"D", 14:"E", 15:"F"
    ]

    var chars: [Character] = []
    var i = v
    for _ in 0..<sizeof(T)*2 {
        chars.append(hexVals[(i & 0xF)] ?? "?")
        i = i >> 4
    }
    return String(lazy(chars).reverse())
}
0
votes

Thank you all for the input.

The first version of the generic functions I created was:

func hex<T: UnsignedIntegerType>(v: T) -> String {
    var fmt = "%0\(sizeof(T)*2)"
    fmt += (sizeof(T) > 4) ? "llx" : "x"
    return String(format: fmt, v.toUIntMax())
}

func hex<T: SignedIntegerType>(v: T) -> String {
    var fmt = "%0\(sizeof(T)*2)"
    fmt += (sizeof(T) > 4) ? "llx" : "x"
    return String(format: fmt, v.toIntMax())
}

I used the following code to test the two functions

println("=== 64-bit ===")
println(hex(UInt64.max))
println(hex(UInt64.min))
println(hex(Int64.max))
println(hex(Int64.min))

println("=== 32-bit ===")
println(hex(UInt32.max))
println(hex(UInt32.min))
println(hex(Int32.max))
println(hex(Int32.min))

println("=== 16-bit ===")
println(hex(UInt16.max))
println(hex(UInt16.min))
println(hex(Int16.max))
println(hex(Int16.min))

println("=== 8-bit ===")
println(hex(UInt8.max))
println(hex(UInt8.min))
println(hex(Int8.max))
println(hex(Int8.min))

The output for 16-bit and 8-bit negative integers is apparently wrong.

=== 64-bit ===
ffffffffffffffff
0000000000000000
7fffffffffffffff
8000000000000000
=== 32-bit ===
ffffffff
00000000
7fffffff
80000000
=== 16-bit ===
ffff
0000
7fff
ffff8000
=== 8-bit ===
ff
00
7f
ffffff80

This is caused by the %x specifier, which expects 32-bit integers only. It generates wrong output for negative Int8 and Int16.

String(format: '%x', Int16.min)   // outputs ffff8000
String(format: '%x', Int8.min)    // outputs ffffff80

The second approach is to use the bitwise operators instead:

func hex<T: SignedIntegerType>(v: T) -> String {
    var s = ""
    var i = v.toIntMax()
    for _ in 0..<sizeof(T)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

func hex<T: UnsignedIntegerType>(v: T) -> String {
    var s = ""
    var i = v.toUIntMax()
    for _ in 0..<sizeof(T)*2 {
        s = String(format: "%x", i & 0xF) + s
        i = i >> 4
    }
    return s
}

So far, they seem to work fine for all integers, negative and positive. The test code outputs:

=== 64-bit ===
ffffffffffffffff
0000000000000000
7fffffffffffffff
8000000000000000
=== 32-bit ===
ffffffff
00000000
7fffffff
80000000
=== 16-bit ===
ffff
0000
7fff
8000
=== 8-bit ===
ff
00
7f
80
0
votes

Another possible solution as a Swift 2 protocol extension method, using the print width modifier constants from <inttypes.h>:

extension IntegerType where Self: CVarArgType {
    var hex : String {
        let format : String
        switch (sizeofValue(self)) {
        case 1:
            format = "%02" + __PRI_8_LENGTH_MODIFIER__ + "X"
        case 2:
            format = "%04" + PRIX16
        case 4:
            format = "%08" + PRIX32
        case 8:
            format = "%016" + __PRI_64_LENGTH_MODIFIER__ + "X"
        default:
            fatalError("Unexpected integer size")
        }
        return String(format: format, self)
    }
}

This works correctly for the full range of all signed and unsigned integer types:

UInt8.max.hex // FF
Int8.max.hex  // 7F
Int8.min.hex  // 80

UInt16.max.hex // FFFF
Int16.max.hex  // 7FFF
Int16.min.hex  // 8000

UInt32.max.hex // FFFFFFFF
Int32.max.hex  // 7FFFFFFF
Int32.min.hex  // 80000000

UInt64.max.hex // FFFFFFFFFFFFFFFF
Int64.max.hex  // 7FFFFFFFFFFFFFFF
Int64.min.hex  // 8000000000000000

Update for Swift 3:

extension Integer where Self: CVarArg {
    var hex : String {
        let format : String
        switch MemoryLayout.size(ofValue: self) {
        case 1:
            format = "%02" + __PRI_8_LENGTH_MODIFIER__ + "X"
        case 2:
            format = "%04" + PRIX16
        case 4:
            format = "%08" + PRIX32
        case 8:
            format = "%016" + __PRI_64_LENGTH_MODIFIER__ + "X"
        default:
            fatalError("Unexpected integer size")
        }
        return String(format: format, self)
    }
}