234 using host_mirror_space =
235 typename ViewTraits<Key, Device, void, void>::host_mirror_space;
242 using key_type = std::remove_const_t<declared_key_type>;
243 using const_key_type = std::add_const_t<key_type>;
246 using declared_value_type = Value;
247 using value_type = std::remove_const_t<declared_value_type>;
248 using const_value_type = std::add_const_t<value_type>;
250 using device_type = Device;
251 using execution_space =
typename Device::execution_space;
268 static constexpr bool is_set = std::is_void_v<value_type>;
269 static constexpr bool has_const_key =
270 std::is_same_v<const_key_type, declared_key_type>;
271 static constexpr bool has_const_value =
272 is_set || std::is_same_v<const_value_type, declared_value_type>;
274 static constexpr bool is_insertable_map =
275 !has_const_key && (is_set || !has_const_value);
276 static constexpr bool is_modifiable_map = has_const_key && !has_const_value;
277 static constexpr bool is_const_map = has_const_key && has_const_value;
284 using histogram_type = Impl::UnorderedMapHistogram<const_map_type>;
290 using impl_value_type = std::conditional_t<is_set, int, declared_value_type>;
292 using key_type_view = std::conditional_t<
296 using value_type_view = std::conditional_t<
297 is_insertable_map || is_modifiable_map,
301 using size_type_view = std::conditional_t<
305 using bitset_type = std::conditional_t<is_insertable_map, Bitset<Device>,
308 enum { modified_idx = 0, erasable_idx = 1, failed_insert_idx = 2 };
309 enum { num_scalars = 3 };
315 using default_op_type =
330 template <
class... P>
334 : m_bounded_insert(
true), m_hasher(
hasher), m_equal_to(equal_to) {
335 if (!is_insertable_map) {
336 Kokkos::Impl::throw_runtime_exception(
337 "Cannot construct a non-insertable (i.e. const key_type) "
343 static_assert(alloc_prop_t::initialize,
344 "Allocation property 'initialize' should be true.");
346 !alloc_prop_t::has_pointer,
347 "Allocation properties should not contain the 'pointer' property.");
352 Impl::with_properties_if_unset(
arg_prop, std::string(
"UnorderedMap"));
359 Impl::get_property<Impl::LabelTag>(
prop_copy) +
" - size"));
361 m_available_indexes =
362 bitset_type(Kokkos::Impl::append_to_label(
prop_copy,
" - bitset"),
365 m_hash_lists = size_type_view(
369 m_next_index = size_type_view(
374 m_keys = key_type_view(Kokkos::Impl::append_to_label(
prop_copy,
" - keys"),
378 value_type_view(Kokkos::Impl::append_to_label(
prop_copy,
" - values"),
382 scalars_view(Kokkos::Impl::append_to_label(
prop_copy,
" - scalars"));
390 if constexpr (alloc_prop_t::has_execution_space) {
391 const auto &space = Impl::get_property<Impl::ExecutionSpaceTag>(
arg_prop);
392 Kokkos::deep_copy(space, m_hash_lists, invalid_index);
393 Kokkos::deep_copy(space, m_next_index, invalid_index);
395 Kokkos::deep_copy(m_hash_lists, invalid_index);
396 Kokkos::deep_copy(m_next_index, invalid_index);
400 void reset_failed_insert_flag() { reset_flag(failed_insert_idx); }
402 histogram_type get_histogram() {
return histogram_type(*
this); }
406 m_bounded_insert =
true;
410 m_available_indexes.clear();
412 Kokkos::deep_copy(m_hash_lists, invalid_index);
413 Kokkos::deep_copy(m_next_index, invalid_index);
415 const key_type
tmp = key_type();
416 Kokkos::deep_copy(m_keys,
tmp);
418 Kokkos::deep_copy(m_scalars, 0);
422 KOKKOS_INLINE_FUNCTION
constexpr bool is_allocated()
const {
423 return (m_keys.is_allocated() && (is_set || m_values.is_allocated()) &&
424 m_scalars.is_allocated());
443 if (!is_insertable_map)
return false;
452 tmp.m_bounded_insert =
false;
453 Impl::UnorderedMapRehash<insertable_map_type>
f(
tmp, *
this);
456 tmp.m_bounded_insert = bounded_insert;
473 m_size() = m_available_indexes.count();
474 reset_flag(modified_idx);
486 bool erasable()
const {
487 return is_insertable_map ? get_flag(erasable_idx) :
false;
491 bool result = !erasable();
492 if (is_insertable_map && result) {
493 execution_space().fence(
494 "Kokkos::UnorderedMap::begin_erase: fence before setting erasable "
496 set_flag(erasable_idx);
502 bool result = erasable();
503 if (is_insertable_map && result) {
504 execution_space().fence(
505 "Kokkos::UnorderedMap::end_erase: fence before erasing");
506 Impl::UnorderedMapErase<declared_map_type> f(*
this);
508 execution_space().fence(
509 "Kokkos::UnorderedMap::end_erase: fence after erasing");
510 reset_flag(erasable_idx);
519 KOKKOS_FORCEINLINE_FUNCTION
532 KOKKOS_INLINE_FUNCTION
549 template <
typename InsertOpType = default_op_type>
550 KOKKOS_INLINE_FUNCTION insert_result
551 insert(key_type
const &
k, impl_value_type
const &
v = impl_value_type(),
553 if constexpr (is_set) {
554 static_assert(std::is_same_v<InsertOpType, default_op_type>,
555 "Insert Operations are not supported on sets.");
560 if (!is_insertable_map ||
capacity() == 0
u ||
561 m_scalars((
int)erasable_idx)) {
565 if (!m_scalars((
int)modified_idx)) {
566 m_scalars((
int)modified_idx) =
true;
588 : m_available_indexes.max_hint();
601#ifdef KOKKOS_ENABLE_SYCL
607 KOKKOS_IMPL_NONTEMPORAL_PREFETCH_LOAD(
608 &m_keys[
curr != invalid_index ?
curr : 0]);
612 while (
curr != invalid_index && !m_equal_to(
614 Kokkos::atomic_load(&m_keys[
curr])
620 result.increment_list_position();
623#ifdef KOKKOS_ENABLE_SYCL
628 KOKKOS_IMPL_NONTEMPORAL_PREFETCH_LOAD(
629 &m_keys[
curr != invalid_index ?
curr : 0]);
634 if (
curr != invalid_index) {
639 if (!m_available_indexes.reset(
new_index)) {
640 Kokkos::printf(
"Unable to free existing\n");
645 if constexpr (!is_set) {
666 }
else if (m_available_indexes.set(
index_hint)) {
669 KOKKOS_IMPL_NONTEMPORAL_PREFETCH_STORE(&m_keys[
new_index]);
671#ifdef KOKKOS_ENABLE_SYCL
678 KOKKOS_IMPL_NONTEMPORAL_PREFETCH_STORE(&m_values[
new_index]);
679#ifdef KOKKOS_ENABLE_SYCL
680 Kokkos::atomic_store(&m_values[
new_index],
v);
686#ifndef KOKKOS_ENABLE_SYCL
699 curr == atomic_compare_exchange(
712 KOKKOS_INLINE_FUNCTION
713 bool erase(key_type
const &
k)
const {
716 if (is_insertable_map && 0
u <
capacity() && m_scalars((
int)erasable_idx)) {
717 if (!m_scalars((
int)modified_idx)) {
718 m_scalars((
int)modified_idx) =
true;
721 size_type index =
find(k);
722 if (valid_at(index)) {
723 m_available_indexes.reset(index);
738 KOKKOS_INLINE_FUNCTION
741 ? m_hash_lists(m_hasher(
k) % m_hash_lists.extent(0))
744 KOKKOS_IMPL_NONTEMPORAL_PREFETCH_LOAD(
745 &m_keys[
curr != invalid_index ?
curr : 0]);
746 while (
curr != invalid_index && !m_equal_to(m_keys[
curr],
k)) {
747 KOKKOS_IMPL_NONTEMPORAL_PREFETCH_LOAD(
748 &m_keys[
curr != invalid_index ?
curr : 0]);
759 KOKKOS_INLINE_FUNCTION
770 template <
typename Dummy = value_type>
771 KOKKOS_FORCEINLINE_FUNCTION std::enable_if_t<
772 !std::is_void_v<Dummy>,
773 std::conditional_t<has_const_value, impl_value_type, impl_value_type &>>
785 KOKKOS_FORCEINLINE_FUNCTION
791 KOKKOS_FORCEINLINE_FUNCTION
792 bool valid_at(size_type
i)
const {
return m_available_indexes.test(
i); }
794 template <
typename SKey,
typename SValue>
796 UnorderedMap<SKey, SValue, Device, Hasher, EqualTo>
const &src,
798 Impl::UnorderedMapCanAssign<declared_key_type, declared_value_type,
799 SKey, SValue>::value,
801 : m_bounded_insert(src.m_bounded_insert),
802 m_hasher(src.m_hasher),
803 m_equal_to(src.m_equal_to),
805 m_available_indexes(src.m_available_indexes),
806 m_hash_lists(src.m_hash_lists),
807 m_next_index(src.m_next_index),
809 m_values(src.m_values),
810 m_scalars(src.m_scalars) {}
812 template <
typename SKey,
typename SValue>
814 Impl::UnorderedMapCanAssign<declared_key_type, declared_value_type, SKey,
817 operator=(UnorderedMap<SKey, SValue, Device, Hasher, EqualTo>
const &src) {
818 m_bounded_insert = src.m_bounded_insert;
819 m_hasher = src.m_hasher;
820 m_equal_to = src.m_equal_to;
822 m_available_indexes = src.m_available_indexes;
823 m_hash_lists = src.m_hash_lists;
824 m_next_index = src.m_next_index;
826 m_values = src.m_values;
827 m_scalars = src.m_scalars;
833 template <
typename SKey,
typename SValue,
typename SDevice>
834 std::enable_if_t<std::is_same_v<std::remove_const_t<SKey>, key_type> &&
835 std::is_same_v<std::remove_const_t<SValue>, value_type>>
837 UnorderedMap<SKey, SValue, SDevice, Hasher, EqualTo>
const &src) {
838 if (m_hash_lists.data() != src.m_hash_lists.data()) {
846 template <
typename SKey,
typename SValue,
typename SDevice>
847 std::enable_if_t<std::is_same_v<std::remove_const_t<SKey>, key_type> &&
848 std::is_same_v<std::remove_const_t<SValue>, value_type>>
850 UnorderedMap<SKey, SValue, SDevice, Hasher, EqualTo>
const &src) {
851 insertable_map_type tmp;
853 tmp.m_bounded_insert = src.m_bounded_insert;
854 tmp.m_hasher = src.m_hasher;
855 tmp.m_equal_to = src.m_equal_to;
856 tmp.m_size() = src.m_size();
857 tmp.m_available_indexes = bitset_type(src.capacity());
858 tmp.m_hash_lists = size_type_view(
859 view_alloc(WithoutInitializing,
"UnorderedMap hash list"),
860 src.m_hash_lists.extent(0));
861 tmp.m_next_index = size_type_view(
862 view_alloc(WithoutInitializing,
"UnorderedMap next index"),
863 src.m_next_index.extent(0));
865 key_type_view(view_alloc(WithoutInitializing,
"UnorderedMap keys"),
866 src.m_keys.extent(0));
868 value_type_view(view_alloc(WithoutInitializing,
"UnorderedMap values"),
869 src.m_values.extent(0));
870 tmp.m_scalars = scalars_view(
"UnorderedMap scalars");
877 template <
typename SKey,
typename SValue,
typename SDevice>
878 std::enable_if_t<std::is_same_v<std::remove_const_t<SKey>, key_type> &&
879 std::is_same_v<std::remove_const_t<SValue>, value_type>>
881 UnorderedMap<SKey, SValue, SDevice, Hasher, EqualTo>
const &src) {
882#ifndef KOKKOS_ENABLE_DEPRECATED_CODE_4
884 KOKKOS_EXPECTS(
capacity() == src.capacity());
888#ifdef KOKKOS_ENABLE_DEPRECATION_WARNINGS
889 Kokkos::Impl::log_warning(
890 "Warning: deep_copy_view() allocating views is deprecated. Must call "
891 "with UnorderedMaps of identical capacity, or use "
892 "create_copy_view().\n");
897 if (m_hash_lists.data() != src.m_hash_lists.data()) {
898 Kokkos::deep_copy(m_available_indexes, src.m_available_indexes);
901 typename device_type::execution_space exec_space{};
903 Kokkos::deep_copy(exec_space, m_hash_lists, src.m_hash_lists);
904 Kokkos::deep_copy(exec_space, m_next_index, src.m_next_index);
905 Kokkos::deep_copy(exec_space, m_keys, src.m_keys);
907 Kokkos::deep_copy(exec_space, m_values, src.m_values);
909 Kokkos::deep_copy(exec_space, m_scalars, src.m_scalars);
912 "Kokkos::UnorderedMap::deep_copy_view: fence after copy to dst.");
918 bool modified()
const {
return get_flag(modified_idx); }
920 void set_flag(
int flag)
const {
921 auto scalar = Kokkos::subview(m_scalars, flag);
922 Kokkos::deep_copy(
typename device_type::execution_space{}, scalar,
923 static_cast<int>(
true));
925 "Kokkos::UnorderedMap::set_flag: fence after copying flag from "
929 void reset_flag(
int flag)
const {
930 auto scalar = Kokkos::subview(m_scalars, flag);
931 Kokkos::deep_copy(
typename device_type::execution_space{}, scalar,
932 static_cast<int>(
false));
934 "Kokkos::UnorderedMap::reset_flag: fence after copying flag from "
938 bool get_flag(
int flag)
const {
939 const auto scalar = Kokkos::subview(m_scalars, flag);
941 Kokkos::deep_copy(
typename device_type::execution_space{}, result, scalar);
943 "Kokkos::UnorderedMap::get_flag: fence after copy to return value in "
948 static uint32_t calculate_capacity(uint32_t capacity_hint) {
951 ? ((
static_cast<uint32_t
>(7ull * capacity_hint / 6u) + 127u) /
958 bool m_bounded_insert;
959 hasher_type m_hasher;
960 equal_to_type m_equal_to;
961 using shared_size_t = View<size_type, Kokkos::DefaultHostExecutionSpace>;
962 shared_size_t m_size;
963 bitset_type m_available_indexes;
964 size_type_view m_hash_lists;
965 size_type_view m_next_index;
966 key_type_view m_keys;
967 value_type_view m_values;
968 scalars_view m_scalars;
970 template <
typename KKey,
typename VValue,
typename DDevice,
typename HHash,
972 friend class UnorderedMap;
974 template <
typename UMap>
975 friend struct Impl::UnorderedMapErase;
977 template <
typename UMap>
978 friend struct Impl::UnorderedMapHistogram;
980 template <
typename UMap>
981 friend struct Impl::UnorderedMapPrint;