62 using value_type = std::conditional_t<IsConst, VT const, VT>;
63 using size_type = size_t;
64 using index_type = size_t;
65 using pointer = value_type*;
67 static index_type
constexpr kValueSize =
sizeof(value_type) * 8;
68 static index_type
constexpr kOne = 1;
71 index_type int_pos{0};
72 index_type bit_pos{0};
76 value_type* bits_{
nullptr};
77 size_type n_values_{0};
78 static_assert(!std::is_signed<VT>::value,
"Must use an unsiged type as the underlying storage.");
86 pos_v.int_pos = pos / kValueSize;
87 pos_v.bit_pos = pos % kValueSize;
92 BitFieldContainer() =
default;
93 XGBOOST_DEVICE explicit BitFieldContainer(common::Span<value_type> bits)
94 : bits_{bits.data()}, n_values_{bits.size()} {}
95 BitFieldContainer(BitFieldContainer
const& other) =
default;
96 BitFieldContainer(BitFieldContainer&& other) =
default;
97 BitFieldContainer &operator=(BitFieldContainer
const &that) =
default;
98 BitFieldContainer &operator=(BitFieldContainer &&that) =
default;
106 XGBOOST_DEVICE static size_t ComputeStorageSize(index_type size) {
107 return common::DivRoundUp(size, kValueSize);
109#if defined(__CUDA_ARCH__)
110 __device__ BitFieldContainer& operator|=(BitFieldContainer
const& rhs) {
111 auto tid = blockIdx.x * blockDim.x + threadIdx.x;
112 size_t min_size = min(
NumValues(), rhs.NumValues());
113 if (tid < min_size) {
114 Data()[tid] |= rhs.Data()[tid];
119 BitFieldContainer& operator|=(BitFieldContainer
const& rhs) {
120 size_t min_size = std::min(
NumValues(), rhs.NumValues());
121 for (
size_t i = 0; i < min_size; ++i) {
122 Data()[i] |= rhs.Data()[i];
128#if defined(__CUDA_ARCH__)
129 __device__ BitFieldContainer& operator&=(BitFieldContainer
const& rhs) {
130 size_t min_size = min(
NumValues(), rhs.NumValues());
131 auto tid = blockIdx.x * blockDim.x + threadIdx.x;
132 if (tid < min_size) {
133 Data()[tid] &= rhs.Data()[tid];
138 BitFieldContainer& operator&=(BitFieldContainer
const& rhs) {
139 size_t min_size = std::min(
NumValues(), rhs.NumValues());
140 for (
size_t i = 0; i < min_size; ++i) {
141 Data()[i] &= rhs.Data()[i];
147#if defined(__CUDA_ARCH__)
148 __device__
auto Set(index_type pos)
noexcept(
true) {
149 Pos pos_v = Direction::Shift(ToBitPos(pos));
150 value_type& value = Data()[pos_v.int_pos];
151 value_type set_bit = kOne << pos_v.bit_pos;
152 using Type =
typename dh::detail::AtomicDispatcher<
sizeof(value_type)>::Type;
153 atomicOr(
reinterpret_cast<Type *
>(&value), set_bit);
155 __device__
void Clear(index_type pos)
noexcept(
true) {
156 Pos pos_v = Direction::Shift(ToBitPos(pos));
157 value_type& value = Data()[pos_v.int_pos];
158 value_type clear_bit = ~(kOne << pos_v.bit_pos);
159 using Type =
typename dh::detail::AtomicDispatcher<
sizeof(value_type)>::Type;
160 atomicAnd(
reinterpret_cast<Type *
>(&value), clear_bit);
163 void Set(index_type pos)
noexcept(
true) {
164 Pos pos_v = Direction::Shift(ToBitPos(pos));
165 value_type& value = Data()[pos_v.int_pos];
166 value_type set_bit = kOne << pos_v.bit_pos;
169 void Clear(index_type pos)
noexcept(
true) {
170 Pos pos_v = Direction::Shift(ToBitPos(pos));
171 value_type& value = Data()[pos_v.int_pos];
172 value_type clear_bit = ~(kOne << pos_v.bit_pos);
178 pos_v = Direction::Shift(pos_v);
180 value_type
const value = Data()[pos_v.int_pos];
181 value_type
const test_bit = kOne << pos_v.bit_pos;
182 value_type result = test_bit & value;
183 return static_cast<bool>(result);
185 [[nodiscard]]
XGBOOST_DEVICE bool Check(index_type pos)
const noexcept(
true) {
186 Pos pos_v = ToBitPos(pos);
201 XGBOOST_DEVICE pointer Data() const noexcept(true) {
return bits_; }
203 inline friend std::ostream& operator<<(std::ostream& os,
204 BitFieldContainer<VT, Direction, IsConst> field) {
206 <<
"storage size: " << field.NumValues() <<
"\n";
207 for (
typename common::Span<value_type>::index_type i = 0; i < field.NumValues(); ++i) {
208 std::bitset<BitFieldContainer<VT, Direction, IsConst>::kValueSize> bset(field.Data()[i]);