Skip to content

Commit

Permalink
add arrayDims() getter to all mappings
Browse files Browse the repository at this point in the history
This avoids accessing the arrayDimsSize member and thus allows mappings which do not store their size in the mapping object itself.
  • Loading branch information
bernhardmgruber committed Apr 23, 2021
1 parent 9170b45 commit c223329
Show file tree
Hide file tree
Showing 13 changed files with 61 additions and 7 deletions.
1 change: 1 addition & 0 deletions include/llama/Concepts.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ namespace llama
concept Mapping = requires(M m) {
typename M::ArrayDims;
typename M::RecordDim;
{ m.arrayDims() } -> std::same_as<typename M::ArrayDims>;
{ M::blobCount } -> std::convertible_to<std::size_t>;
Array<int, M::blobCount>{}; // validates constexpr-ness
{ m.blobSize(std::size_t{}) } -> std::same_as<std::size_t>;
Expand Down
2 changes: 1 addition & 1 deletion include/llama/DumpMapping.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ namespace llama

std::vector<FieldBox<Mapping::ArrayDims::rank>> infos;

for (auto adCoord : ArrayDimsIndexRange{mapping.arrayDimsSize})
for (auto adCoord : ArrayDimsIndexRange{mapping.arrayDims()})
{
forEachLeaf<RecordDim>(
[&](auto coord)
Expand Down
2 changes: 1 addition & 1 deletion include/llama/Proofs.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ namespace llama
{
if (collision)
return;
for (auto ad : llama::ArrayDimsIndexRange{m.arrayDimsSize})
for (auto ad : llama::ArrayDimsIndexRange{m.arrayDims()})
{
using Type
= llama::GetType<typename Mapping::RecordDim, decltype(coord)>;
Expand Down
4 changes: 2 additions & 2 deletions include/llama/View.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -938,12 +938,12 @@ namespace llama

auto begin() -> iterator
{
return {ArrayDimsIndexRange<ArrayDims::rank>{mapping.arrayDimsSize}.begin(), this};
return {ArrayDimsIndexRange<ArrayDims::rank>{mapping.arrayDims()}.begin(), this};
}

auto end() -> iterator
{
return {ArrayDimsIndexRange<ArrayDims::rank>{mapping.arrayDimsSize}.end(), this};
return {ArrayDimsIndexRange<ArrayDims::rank>{mapping.arrayDims()}.end(), this};
}

Mapping mapping;
Expand Down
6 changes: 6 additions & 0 deletions include/llama/mapping/AoS.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ namespace llama::mapping
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return arrayDimsSize;
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto blobSize(std::size_t) const -> std::size_t
{
return LinearizeArrayDimsFunctor{}.size(arrayDimsSize) * sizeOf<RecordDim, AlignAndPad>;
Expand All @@ -43,6 +48,7 @@ namespace llama::mapping
return {0, offset};
}

private:
ArrayDims arrayDimsSize;
};

Expand Down
6 changes: 6 additions & 0 deletions include/llama/mapping/AoSoA.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,11 @@ namespace llama::mapping
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return arrayDimsSize;
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto blobSize(std::size_t) const -> std::size_t
{
return LinearizeArrayDimsFunctor{}.size(arrayDimsSize) * sizeOf<RecordDim>;
Expand All @@ -64,6 +69,7 @@ namespace llama::mapping
return {0, offset};
}

private:
ArrayDims arrayDimsSize;
};

Expand Down
5 changes: 5 additions & 0 deletions include/llama/mapping/Heatmap.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ namespace llama::mapping
Heatmap(Heatmap&&) noexcept = default;
auto operator=(Heatmap&&) noexcept -> Heatmap& = default;

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return mapping.arrayDims();
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto blobSize(std::size_t i) const -> std::size_t
{
LLAMA_FORCE_INLINE_RECURSIVE
Expand Down
9 changes: 9 additions & 0 deletions include/llama/mapping/One.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,15 @@ namespace llama::mapping
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
// TODO: not sure if this is the right approach, since we take any ArrayDims in the ctor
ArrayDims ad;
for (auto i = 0; i < ArrayDims::rank; i++)
ad[i] = 1;
return ad;
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto blobSize(std::size_t) const -> std::size_t
{
return sizeOf<RecordDim>;
Expand Down
6 changes: 6 additions & 0 deletions include/llama/mapping/SoA.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@ namespace llama::mapping
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return arrayDimsSize;
}

LLAMA_FN_HOST_ACC_INLINE
constexpr auto blobSize(std::size_t blobIndex) const -> std::size_t
{
Expand Down Expand Up @@ -95,6 +100,7 @@ namespace llama::mapping
}
}

private:
ArrayDims arrayDimsSize;
};

Expand Down
8 changes: 6 additions & 2 deletions include/llama/mapping/Split.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,15 @@ namespace llama::mapping
constexpr Split() = default;

LLAMA_FN_HOST_ACC_INLINE
constexpr Split(ArrayDims size) : arrayDimsSize(size), mapping1(size), mapping2(size)
constexpr Split(ArrayDims size) : mapping1(size), mapping2(size)
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return mapping1.arrayDims();
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto blobSize(std::size_t i) const -> std::size_t
{
if constexpr (SeparateBlobs)
Expand Down Expand Up @@ -135,7 +140,6 @@ namespace llama::mapping
}

public:
ArrayDims arrayDimsSize = {};
Mapping1 mapping1;
Mapping2 mapping2;
};
Expand Down
5 changes: 5 additions & 0 deletions include/llama/mapping/Trace.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,11 @@ namespace llama::mapping
}
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return mapping.arrayDims();
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto blobSize(std::size_t i) const -> std::size_t
{
LLAMA_FORCE_INLINE_RECURSIVE
Expand Down
2 changes: 1 addition & 1 deletion tests/iterator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ TEST_CASE("iterator.transform_reduce")
vd(tag::Y{}) = ++i;
vd(tag::Z{}) = ++i;
}
// returned type is a llama::One<Particle>
// returned type is a llama::One<Position>
auto [sumX, sumY, sumZ]
= std::transform_reduce(begin(aosView), end(aosView), begin(soaView), llama::One<Position>{});

Expand Down
12 changes: 12 additions & 0 deletions tests/proofs.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,11 @@ namespace
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return arrayDimsSize;
}

constexpr auto blobSize(std::size_t) const -> std::size_t
{
return std::reduce(std::begin(arrayDimsSize), std::end(arrayDimsSize), std::size_t{1}, std::multiplies{})
Expand All @@ -69,6 +74,7 @@ namespace
return {0, 0};
}

private:
ArrayDims arrayDimsSize;
};
} // namespace
Expand Down Expand Up @@ -100,6 +106,11 @@ namespace
{
}

LLAMA_FN_HOST_ACC_INLINE constexpr auto arrayDims() const -> ArrayDims
{
return arrayDimsSize;
}

constexpr auto blobSize(std::size_t) const -> std::size_t
{
return Modulus * llama::sizeOf<RecordDim>;
Expand All @@ -114,6 +125,7 @@ namespace
return {blob, offset};
}

private:
ArrayDims arrayDimsSize;
};
} // namespace
Expand Down

0 comments on commit c223329

Please sign in to comment.