|
template<typename T_SEND , typename T_RECV > |
int | allgather (T_SEND const *const sendbuf, int sendcount, T_RECV *const recvbuf, int recvcount, MPI_Comm MPI_PARAM(comm)) |
|
template<typename T_SEND , typename T_RECV > |
int | allgatherv (T_SEND const *const sendbuf, int sendcount, T_RECV *const recvbuf, int *recvcounts, int *displacements, MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
void | allGather (T const myValue, array1d< T > &allValues, MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
int | allGather (arrayView1d< T const > const &sendValues, array1d< T > &allValues, MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
int | allReduce (T const *const sendbuf, T *const recvbuf, int const count, MPI_Op const MPI_PARAM(op), MPI_Comm const MPI_PARAM(comm)) |
|
template<typename T > |
int | reduce (T const *const sendbuf, T *const recvbuf, int const count, MPI_Op const MPI_PARAM(op), int root, MPI_Comm const MPI_PARAM(comm)) |
|
template<typename T > |
int | scan (T const *const sendbuf, T *const recvbuf, int count, MPI_Op MPI_PARAM(op), MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
int | exscan (T const *const MPI_PARAM(sendbuf), T *const recvbuf, int count, MPI_Op MPI_PARAM(op), MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
int | bcast (T *const MPI_PARAM(buffer), int MPI_PARAM(count), int MPI_PARAM(root), MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
void | broadcast (T &MPI_PARAM(value), int MPI_PARAM(srcRank), MPI_Comm MPI_PARAM(comm)) |
|
template<typename TS , typename TR > |
int | gather (TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int MPI_PARAM(root), MPI_Comm MPI_PARAM(comm)) |
|
template<typename TS , typename TR > |
int | gatherv (TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *MPI_PARAM(displs), int MPI_PARAM(root), MPI_Comm MPI_PARAM(comm)) |
|
template<typename T > |
int | iRecv (T *const buf, int count, int MPI_PARAM(source), int tag, MPI_Comm MPI_PARAM(comm), MPI_Request *MPI_PARAM(request)) |
|
template<typename T > |
int | iSend (T const *const buf, int count, int MPI_PARAM(dest), int tag, MPI_Comm MPI_PARAM(comm), MPI_Request *MPI_PARAM(request)) |
|
|
static std::map< int, std::pair< int, void * > > & | getTagToPointersMap () |
|
static int | nodeCommSize () |
| Compute the number of ranks allocated on the same node. More...
|
|
template<typename T_SEND , typename T_RECV > |
static int | allgather (T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm) |
| Strongly typed wrapper around MPI_Allgather. More...
|
|
template<typename T_SEND , typename T_RECV > |
static int | allgatherv (T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm) |
| Strongly typed wrapper around MPI_Allgatherv. More...
|
|
template<typename T > |
static void | allGather (T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for MPI_Allgather. More...
|
|
template<typename T > |
static int | allGather (arrayView1d< T const > const &sendbuf, array1d< T > &recvbuf, MPI_Comm comm=MPI_COMM_GEOS) |
|
template<typename T > |
static int | allReduce (T const *sendbuf, T *recvbuf, int count, MPI_Op op, MPI_Comm comm=MPI_COMM_GEOS) |
| Strongly typed wrapper around MPI_Allreduce. More...
|
|
template<typename T > |
static T | allReduce (T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience wrapper for the MPI_Allreduce function. More...
|
|
template<typename T > |
static void | allReduce (Span< T const > src, Span< T > dst, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience wrapper for the MPI_Allreduce function. Version for sequences. More...
|
|
template<typename T > |
static int | reduce (T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS) |
| Strongly typed wrapper around MPI_Reduce. More...
|
|
template<typename T > |
static T | reduce (T const &value, Reduction const op, int root, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience wrapper for the MPI_Reduce function. More...
|
|
template<typename T > |
static void | reduce (Span< T const > src, Span< T > dst, Reduction const op, int root, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience wrapper for the MPI_Reduce function. Version for sequences. More...
|
|
template<typename T > |
static int | scan (T const *sendbuf, T *recvbuf, int count, MPI_Op op, MPI_Comm comm) |
|
template<typename T > |
static int | exscan (T const *sendbuf, T *recvbuf, int count, MPI_Op op, MPI_Comm comm) |
|
template<typename T > |
static int | bcast (T *buffer, int count, int root, MPI_Comm comm) |
| Strongly typed wrapper around MPI_Bcast. More...
|
|
template<typename T > |
static void | broadcast (T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for MPI_Broadcast. More...
|
|
template<typename TS , typename TR > |
static int | gather (TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm) |
| Strongly typed wrapper around MPI_Gather(). More...
|
|
template<typename TS , typename TR > |
static int | gatherv (TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm) |
| Strongly typed wrapper around MPI_Gatherv. More...
|
|
static MPI_Op | getMpiOp (Reduction const op) |
| Returns an MPI_Op associated with our strongly typed Reduction enum. More...
|
|
template<typename T > |
static int | recv (array1d< T > &buf, int MPI_PARAM(source), int tag, MPI_Comm MPI_PARAM(comm), MPI_Status *MPI_PARAM(request)) |
|
template<typename T > |
static int | iSend (arrayView1d< T > const &buf, int MPI_PARAM(dest), int tag, MPI_Comm MPI_PARAM(comm), MPI_Request *MPI_PARAM(request)) |
|
template<typename T > |
static int | iRecv (T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request) |
| Strongly typed wrapper around MPI_Irecv() More...
|
|
template<typename T > |
static int | iSend (T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request) |
| Strongly typed wrapper around MPI_Isend() More...
|
|
template<typename U , typename T > |
static U | prefixSum (T const value, MPI_Comm comm=MPI_COMM_GEOS) |
| Compute exclusive prefix sum and full sum. More...
|
|
template<typename T > |
static T | sum (T const &value, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for a MPI_Allreduce using a MPI_SUM operation. More...
|
|
template<typename T > |
static void | sum (Span< T const > src, Span< T > dst, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for a MPI_Allreduce using a MPI_SUM operation. More...
|
|
template<typename T > |
static T | min (T const &value, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for a MPI_Allreduce using a MPI_MIN operation. More...
|
|
template<typename T > |
static void | min (Span< T const > src, Span< T > dst, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for a MPI_Allreduce using a MPI_MIN operation. More...
|
|
template<typename T > |
static T | max (T const &value, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for a MPI_Allreduce using a MPI_MAX operation. More...
|
|
template<typename T > |
static void | max (Span< T const > src, Span< T > dst, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for a MPI_Allreduce using a MPI_MAX operation. More...
|
|
template<typename T > |
static T | maxValLoc (T localValueLocation, MPI_Comm comm=MPI_COMM_GEOS) |
| Convenience function for MPI_Gather using a MPI_MAX operation on struct of value and location. More...
|
|
template<> |
void | broadcast (string &MPI_PARAM(value), int MPI_PARAM(srcRank), MPI_Comm MPI_PARAM(comm)) |
|
|
- Parameters
-
[in] | sendbuf | Pointer to the memory to read the sent data from. |
[out] | recvbuf | Pointer to the memory to write the received data in. |
[in] | count | The number of data entries that are being communicated. |
[in] | datatype | The MPI_Datatype that is being communicated. |
[in] | op | The collective MPI_Op to apply for the function. |
[in] | comm | The MPI_Comm communicator that the function will act on. |
Please see standard MPI documentation for a detailed description of the parameters for each function that is being wrapped
|
static void | barrier (MPI_Comm const &MPI_PARAM(comm)=MPI_COMM_GEOS) |
|
static int | cartCoords (MPI_Comm comm, int rank, int maxdims, int coords[]) |
|
static int | cartCreate (MPI_Comm comm_old, int ndims, const int dims[], const int periods[], int reorder, MPI_Comm *comm_cart) |
|
static int | cartRank (MPI_Comm comm, const int coords[]) |
|
static void | commFree (MPI_Comm &comm) |
|
static int | commRank (MPI_Comm const &MPI_PARAM(comm)=MPI_COMM_GEOS) |
|
static int | commSize (MPI_Comm const &MPI_PARAM(comm)=MPI_COMM_GEOS) |
|
static bool | commCompare (MPI_Comm const &comm1, MPI_Comm const &comm2) |
|
static bool | initialized () |
|
static int | init (int *argc, char ***argv) |
|
static void | finalize () |
|
static MPI_Comm | commDup (MPI_Comm const comm) |
|
static MPI_Comm | commSplit (MPI_Comm const comm, int color, int key) |
|
static int | test (MPI_Request *request, int *flag, MPI_Status *status) |
|
static int | testAny (int count, MPI_Request array_of_requests[], int *idx, int *flags, MPI_Status array_of_statuses[]) |
|
static int | testSome (int count, MPI_Request array_of_requests[], int *outcount, int array_of_indices[], MPI_Status array_of_statuses[]) |
|
static int | testAll (int count, MPI_Request array_of_requests[], int *flags, MPI_Status array_of_statuses[]) |
|
static int | check (MPI_Request *request, int *flag, MPI_Status *status) |
|
static int | checkAny (int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[]) |
|
static int | checkAll (int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[]) |
|
static int | wait (MPI_Request *request, MPI_Status *status) |
|
static int | waitAny (int count, MPI_Request array_of_requests[], int *indx, MPI_Status array_of_statuses[]) |
|
static int | waitSome (int count, MPI_Request array_of_requests[], int *outcount, int array_of_indices[], MPI_Status array_of_statuses[]) |
|
static int | waitAll (int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[]) |
|
static double | wtime (void) |
|
static int | activeWaitAny (const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func) |
|
static int | activeWaitSome (const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func) |
|
static int | activeWaitSomeCompletePhase (const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases) |
|
static int | activeWaitOrderedCompletePhase (const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases) |
|
This struct is a wrapper for all mpi.h functions that are used in GEOSX, and provides a collection of convenience functions to make using the raw mpi functions simpler.
The static wrapper functions around the mpi.h function are named by removing the "MPI_" from the beginning of the native mpi function name. For instance the "Comm_rank()" function calls "MPI_Comm_rank()". Since all wrapper functions are static, the should be referred to by their scoped name, for example "MpiWrapper::commRank()".
Definition at line 117 of file MpiWrapper.hpp.