5#ifndef DUNE_COMMON_PARALLEL_MPICOMMUNICATION_HH
6#define DUNE_COMMON_PARALLEL_MPICOMMUNICATION_HH
39 template<
typename Type,
typename BinaryFunction,
typename Enable=
void>
48 op = std::make_unique<MPI_Op>();
69 Generic_MPI_Op (
const Generic_MPI_Op& ) {}
70 static std::unique_ptr<MPI_Op> op;
74 template<
typename Type,
typename BinaryFunction,
typename Enable>
75 std::unique_ptr<MPI_Op> Generic_MPI_Op<Type,BinaryFunction, Enable>::op;
77#define ComposeMPIOp(func,op) \
78 template<class T, class S> \
79 class Generic_MPI_Op<T, func<S>, std::enable_if_t<MPITraits<S>::is_intrinsic> >{ \
81 static MPI_Op get(){ \
85 Generic_MPI_Op () {} \
86 Generic_MPI_Op (const Generic_MPI_Op & ) {} \
118 DUNE_THROW(
ParallelError,
"You must call MPIHelper::instance(argc,argv) in your main() function before using the MPI Communication!");
188 static_assert(!
mpi_data.static_size,
"rrecv work only for non-static-sized types.");
311 template<
class TIN,
class TOUT = std::vector<TIN>>
344 template<
class TIN,
class TOUT = TIN>
373 template<
typename T,
typename T1>
382 template<
class TIN,
class TOUT = TIN>
392 communicator, &
future.req_);
406 template<
typename BinaryFunction,
typename Type>
409 Type* out =
new Type[
len];
416 template<
typename BinaryFunction,
typename Type>
427 template<
class BinaryFunction,
class TIN,
class TOUT = TIN>
437 communicator, &
future.req_);
442 template<
class BinaryFunction,
class T>
449 communicator, &
future.req_);
454 template<
typename BinaryFunction,
typename Type>
A few common exception classes.
Implements an utility class that provides collective communication methods for sequential programs.
Interface class to translate objects to a MPI_Datatype, void* and size used for MPI calls.
#define ComposeMPIOp(func, op)
Definition mpicommunication.hh:77
Traits classes for mapping types onto MPI_Datatype.
helper classes to provide unique types for standard functions
size_type size() const
Get the number of elements in the list.
Definition arraylist.hh:472
#define DUNE_THROW(E, m)
Definition exceptions.hh:218
Dune namespace.
Definition alignedallocator.hh:13
auto getMPIData(T &t)
Definition mpidata.hh:43
A dynamically growing random access list.
Definition arraylist.hh:62
A traits class describing the mapping of types onto MPI_Datatypes.
Definition mpitraits.hh:41
Definition binaryfunctions.hh:18
Definition binaryfunctions.hh:34
Default exception if an error in the parallel communication of the program occurred.
Definition exceptions.hh:287
Collective communication interface and sequential default implementation.
Definition communication.hh:100
int size() const
Number of processes in set, is greater than 0.
Definition communication.hh:126
Definition mpicommunication.hh:41
static MPI_Op get()
Definition mpicommunication.hh:44
int max(T *inout, int len) const
Compute the maximum of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:259
int allgatherv(const T *in, int sendDataLen, T *out, int *recvDataLen, int *displ) const
Gathers data of variable length from all tasks and distribute it to all.
Definition mpicommunication.hh:398
T max(const T &in) const
Compute the maximum of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:250
MPIFuture< T > ibroadcast(T &&data, int root) const
Distribute an array from the process with rank root to all other processes nonblocking.
Definition mpicommunication.hh:288
MPIFuture< void > ibarrier() const
Nonblocking barrier.
Definition mpicommunication.hh:271
MPIFuture< const T > isend(const T &&data, int dest_rank, int tag) const
Sends the data to the dest_rank nonblocking.
Definition mpicommunication.hh:150
T recv(T &&data, int source_rank, int tag, MPI_Status *status=MPI_STATUS_IGNORE) const
Receives the data from the source_rank.
Definition mpicommunication.hh:161
int barrier() const
Wait until all processes have arrived at this point in the program.
Definition mpicommunication.hh:265
int rank() const
Return rank, is between 0 and size()-1.
Definition mpicommunication.hh:128
int scatterv(const T *sendData, int *sendDataLen, int *displ, T *recvData, int recvDataLen, int root) const
Scatter arrays of variable length from a root to all other tasks.
Definition mpicommunication.hh:359
MPIFuture< TOUT, TIN > iallgather(TIN &&data_in, TOUT &&data_out) const
Gathers data from all tasks and distribute it to all nonblocking.
Definition mpicommunication.hh:383
Type allreduce(Type &&in) const
Definition mpicommunication.hh:417
int sum(T *inout, int len) const
Compute the sum of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:210
int broadcast(T *inout, int len, int root) const
Distribute an array from the process with rank root to all other processes.
Definition mpicommunication.hh:281
MPIFuture< T > iallreduce(T &&data) const
Compute something over all processes nonblocking.
Definition mpicommunication.hh:443
T sum(const T &in) const
Compute the sum of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:201
int allreduce(const Type *in, Type *out, int len) const
Definition mpicommunication.hh:455
MPIFuture< TOUT, TIN > iallreduce(TIN &&data_in, TOUT &&data_out) const
Compute something over all processes nonblocking.
Definition mpicommunication.hh:428
int size() const
Number of processes in set, is greater than 0.
Definition mpicommunication.hh:134
int gather(const T *in, T *out, int len, int root) const
Gather arrays on root task.
Definition mpicommunication.hh:303
int allreduce(Type *inout, int len) const
Compute something over all processes for each component of an array and return the result in every pr...
Definition mpicommunication.hh:407
T rrecv(T &&data, int source_rank, int tag, MPI_Status *status=MPI_STATUS_IGNORE) const
Definition mpicommunication.hh:182
int scatter(const T *sendData, T *recvData, int len, int root) const
Scatter array from a root to all other task.
Definition mpicommunication.hh:336
MPIFuture< T > irecv(T &&data, int source_rank, int tag) const
Receives the data from the source_rank nonblocking.
Definition mpicommunication.hh:172
int prod(T *inout, int len) const
Compute the product of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:226
MPIFuture< TOUT, TIN > igather(TIN &&data_in, TOUT &&data_out, int root) const
Gather arrays on root task nonblocking.
Definition mpicommunication.hh:312
T min(const T &in) const
Compute the minimum of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:233
Communication(const MPI_Comm &c=MPI_COMM_WORLD)
Instantiation using a MPI communicator.
Definition mpicommunication.hh:111
MPIFuture< TOUT, TIN > iscatter(TIN &&data_in, TOUT &&data_out, int root) const
Scatter array from a root to all other task nonblocking.
Definition mpicommunication.hh:345
int gatherv(const T *in, int sendDataLen, T *out, int *recvDataLen, int *displ, int root) const
Gather arrays of variable size on root task.
Definition mpicommunication.hh:326
int min(T *inout, int len) const
Compute the minimum of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:242
int allgather(const T *sbuf, int count, T1 *rbuf) const
Gathers data from all tasks and distribute it to all.
Definition mpicommunication.hh:374
int send(const T &data, int dest_rank, int tag) const
Sends the data to the dest_rank.
Definition mpicommunication.hh:141
T prod(const T &in) const
Compute the product of the argument over all processes and return the result in every process....
Definition mpicommunication.hh:217