|
9 | 9 | //
|
10 | 10 | //===----------------------------------------------------------------------===//
|
11 | 11 |
|
12 |
| -public typealias BitFieldStorage = FixedWidthInteger & UnsignedInteger |
| 12 | +// Explore bit field refactor: |
| 13 | +// * requires variadic pack iteration |
| 14 | +// * requires no metadata-less variadic packs |
| 15 | +// - protocol BitField with (least|most) significant bit requirements |
| 16 | +// - FixedWidthInteger.subscript[(variadic T: BitField)] -> Storage |
| 17 | + |
| 18 | +extension FixedWidthInteger { |
| 19 | + @inline(__always) |
| 20 | + static func bitRangeWithinBounds(bits bitRange: Range<Int>) -> Bool { |
| 21 | + bitRange.lowerBound >= 0 && bitRange.upperBound <= Self.bitWidth |
| 22 | + } |
| 23 | + |
| 24 | + subscript(bits bitRange: Range<Int>) -> Self { |
| 25 | + @inline(__always) get { |
| 26 | + precondition(Self.bitRangeWithinBounds(bits: bitRange)) |
| 27 | + let bitWidth = bitRange.upperBound - bitRange.lowerBound |
| 28 | + let bitMask: Self = 1 << bitWidth &- 1 |
| 29 | + return (self >> bitRange.lowerBound) & bitMask |
| 30 | + } |
| 31 | + |
| 32 | + @inline(__always) set { |
| 33 | + precondition(Self.bitRangeWithinBounds(bits: bitRange)) |
| 34 | + let bitWidth = bitRange.upperBound - bitRange.lowerBound |
| 35 | + let bitMask: Self = 1 << bitWidth &- 1 |
| 36 | + self &= ~(bitMask << bitRange.lowerBound) |
| 37 | + self |= (newValue & bitMask) << bitRange.lowerBound |
| 38 | + } |
| 39 | + } |
| 40 | +} |
| 41 | + |
| 42 | +extension FixedWidthInteger { |
| 43 | + static func bitRangesCoalesced(bits bitRanges: [Range<Int>]) -> Bool { |
| 44 | + let bitRanges = bitRanges.sorted { $0.lowerBound < $1.lowerBound } |
| 45 | + var lowerBound = -1 |
| 46 | + for bitRange in bitRanges { |
| 47 | + // Specifically ensure that the bit ranges dont overlap, e.g. the |
| 48 | + // following ranges are not valid: 0..<1, 0..<2. This is to ensure ranges |
| 49 | + // are coalesced before iterating reduce the number of mask and shift |
| 50 | + // operations needed. |
| 51 | + guard lowerBound <= bitRange.lowerBound else { return false } |
| 52 | + lowerBound = bitRange.upperBound |
| 53 | + } |
| 54 | + return true |
| 55 | + } |
| 56 | + |
| 57 | + subscript(bits bitRanges: [Range<Int>]) -> Self { |
| 58 | + @inline(__always) get { |
| 59 | + precondition(Self.bitRangesCoalesced(bits: bitRanges)) |
| 60 | + |
| 61 | + var currentShift = 0 |
| 62 | + var value: Self = 0 |
| 63 | + for bitRange in bitRanges { |
| 64 | + let valueSlice = self[bits: bitRange] |
| 65 | + value |= valueSlice << currentShift |
| 66 | + let bitWidth = bitRange.upperBound - bitRange.lowerBound |
| 67 | + currentShift += bitWidth |
| 68 | + } |
| 69 | + return value |
| 70 | + } |
| 71 | + |
| 72 | + @inline(__always) set { |
| 73 | + precondition(Self.bitRangesCoalesced(bits: bitRanges)) |
| 74 | + |
| 75 | + var newValue = newValue |
| 76 | + for bitRange in bitRanges { |
| 77 | + self[bits: bitRange] = newValue |
| 78 | + let bitWidth = bitRange.upperBound - bitRange.lowerBound |
| 79 | + newValue >>= bitWidth |
| 80 | + } |
| 81 | + } |
| 82 | + } |
| 83 | +} |
13 | 84 |
|
14 | 85 | public protocol BitField {
|
15 |
| - associatedtype RawStorage: BitFieldStorage |
| 86 | + associatedtype Storage: FixedWidthInteger & UnsignedInteger |
| 87 | + |
| 88 | + static func insert(_ value: Storage, into storage: inout Storage) |
| 89 | + static func extract(from storage: Storage) -> Storage |
| 90 | +} |
| 91 | + |
| 92 | +public protocol ContiguousBitField: BitField { |
16 | 93 | static var bitRange: Range<Int> { get }
|
17 | 94 | static var bitWidth: Int { get }
|
18 | 95 | static var bitOffset: Int { get }
|
19 |
| - static var bitMask: RawStorage { get } |
| 96 | + static var bitMask: Storage { get } |
| 97 | +} |
| 98 | + |
| 99 | +extension ContiguousBitField { |
| 100 | + public static var bitWidth: Int { |
| 101 | + Self.bitRange.upperBound - Self.bitRange.lowerBound |
| 102 | + } |
| 103 | + public static var bitOffset: Int { Self.bitRange.lowerBound } |
| 104 | + public static var bitMask: Storage { (1 << Self.bitWidth) &- 1 } |
| 105 | +} |
| 106 | + |
| 107 | +extension ContiguousBitField { |
| 108 | + // FIXME: value.bitWidth <= Self.bitWidth <= Storage.bitWidth |
| 109 | + @inline(__always) |
| 110 | + public static func insert(_ value: Storage, into storage: inout Storage) { |
| 111 | + storage[bits: Self.bitRange] = value |
| 112 | + } |
| 113 | + |
| 114 | + @inline(__always) |
| 115 | + public static func extract(from storage: Storage) -> Storage { |
| 116 | + storage[bits: Self.bitRange] |
| 117 | + } |
20 | 118 | }
|
21 | 119 |
|
22 |
| -extension BitField { |
23 |
| - public static var bitWidth: Int { self.bitRange.count } |
24 |
| - public static var bitOffset: Int { self.bitRange.lowerBound } |
25 |
| - public static var bitMask: RawStorage { (1 << self.bitWidth) - 1 } |
| 120 | +public protocol DiscontiguousBitField: BitField { |
| 121 | + /// - Precondition: Bit bitRanges must not overlap and must be sorted by from |
| 122 | + /// lowest to highest bit index |
| 123 | + static var bitRanges: [Range<Int>] { get } |
| 124 | +} |
| 125 | + |
| 126 | +extension DiscontiguousBitField { |
| 127 | + @inline(__always) |
| 128 | + public static func insert(_ value: Storage, into storage: inout Storage) { |
| 129 | + storage[bits: Self.bitRanges] = value |
| 130 | + } |
| 131 | + |
| 132 | + @inline(__always) |
| 133 | + public static func extract(from storage: Storage) -> Storage { |
| 134 | + storage[bits: Self.bitRanges] |
| 135 | + } |
26 | 136 | }
|
0 commit comments