Tpetra parallel linear algebra Version of the Day
Loading...
Searching...
No Matches
Tpetra_Details_iallreduce.cpp
1// @HEADER
2// *****************************************************************************
3// Tpetra: Templated Linear Algebra Services Package
4//
5// Copyright 2008 NTESS and the Tpetra contributors.
6// SPDX-License-Identifier: BSD-3-Clause
7// *****************************************************************************
8// @HEADER
9
11
12#ifdef HAVE_TPETRACORE_MPI
13#include "Teuchos_DefaultMpiComm.hpp" // only needs to be in .cpp file
14#endif // HAVE_TPETRACORE_MPI
15#include "Teuchos_DefaultSerialComm.hpp" // only needs to be in .cpp file
16
17namespace Tpetra {
18namespace Details {
19
20#ifdef HAVE_TPETRACORE_MPI
21std::string getMpiErrorString(const int errCode) {
22 // Space for storing the error string returned by MPI.
23 // Leave room for null termination, since I don't know if MPI does this.
24 char errString[MPI_MAX_ERROR_STRING + 1];
25 int errStringLen = MPI_MAX_ERROR_STRING; // output argument
26 (void)MPI_Error_string(errCode, errString, &errStringLen);
27 // errStringLen on output is the number of characters written.
28 // I'm not sure (the MPI 3.0 Standard doesn't say) if this
29 // includes the '\0', so I'll make sure. We reserved space for
30 // the extra '\0' if needed.
31 if (errString[errStringLen - 1] != '\0') {
32 errString[errStringLen] = '\0';
33 }
34 return std::string(errString); // This copies the original string.
35}
36#endif // HAVE_TPETRACORE_MPI
37
38namespace Impl {
39
40std::shared_ptr<CommRequest>
41emptyCommRequest() {
42 return std::shared_ptr<CommRequest>(new CommRequest());
43}
44
45#ifdef HAVE_TPETRACORE_MPI
46
47#if MPI_VERSION >= 3
48MPI_Request
49iallreduceRaw(const void* sendbuf,
50 void* recvbuf,
51 const int count,
52 MPI_Datatype mpiDatatype,
53 const Teuchos::EReductionType op,
54 MPI_Comm comm) {
55 MPI_Op rawOp = ::Teuchos::Details::getMpiOpForEReductionType(op);
56 MPI_Request req = MPI_REQUEST_NULL;
57 int err = MPI_SUCCESS;
58 if (sendbuf == recvbuf) {
59 // Fix for #850. This only works if rawComm is an
60 // intracommunicator. Intercommunicators don't have an in-place
61 // option for collectives.
62 err = MPI_Iallreduce(MPI_IN_PLACE, recvbuf, count, mpiDatatype,
63 rawOp, comm, &req);
64 } else {
65 err = MPI_Iallreduce(sendbuf, recvbuf, count, mpiDatatype,
66 rawOp, comm, &req);
67 }
68 TEUCHOS_TEST_FOR_EXCEPTION(err != MPI_SUCCESS, std::runtime_error,
69 "MPI_Iallreduce failed with the following error: "
70 << getMpiErrorString(err));
71 return req;
72}
73#endif // MPI >= 3
74
75void allreduceRaw(const void* sendbuf,
76 void* recvbuf,
77 const int count,
78 MPI_Datatype mpiDatatype,
79 const Teuchos::EReductionType op,
80 MPI_Comm comm) {
81 MPI_Op rawOp = ::Teuchos::Details::getMpiOpForEReductionType(op);
82 int err = MPI_SUCCESS;
83 if (sendbuf == recvbuf) {
84 err = MPI_Allreduce(MPI_IN_PLACE, recvbuf,
85 count, mpiDatatype, rawOp, comm);
86 } else {
87 // OpenMPI 1.6.5 insists on void*, not const void*, for sendbuf.
88 (void)MPI_Allreduce(const_cast<void*>(sendbuf), recvbuf,
89 count, mpiDatatype, rawOp, comm);
90 }
91 TEUCHOS_TEST_FOR_EXCEPTION(err != MPI_SUCCESS, std::runtime_error,
92 "MPI_Allreduce failed with the following error: "
93 << getMpiErrorString(err));
94}
95
96#endif // HAVE_TPETRACORE_MPI
97
98} // namespace Impl
99
100std::shared_ptr<CommRequest>
101iallreduce(const int localValue,
102 int& globalValue,
103 const ::Teuchos::EReductionType op,
104 const ::Teuchos::Comm<int>& comm) {
105 // Input: needs to be an owning view containing localValue
106 Kokkos::View<int*, Kokkos::HostSpace> localView(
107 Kokkos::ViewAllocateWithoutInitializing("localValue"), 1);
108 localView(0) = localValue;
109 Kokkos::View<int*, Kokkos::HostSpace, Kokkos::MemoryTraits<Kokkos::Unmanaged>>
110 globalView(&globalValue, 1);
111 return ::Tpetra::Details::iallreduce<decltype(localView), decltype(globalView)>(localView, globalView, op, comm);
112}
113
114} // namespace Details
115} // namespace Tpetra
Declaration of Tpetra::Details::iallreduce.
Implementation details of Tpetra.
Namespace Tpetra contains the class and methods constituting the Tpetra library.