20 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
21 #define GEOS_COMMON_MPIWRAPPER_HPP_
29 #if defined(GEOS_USE_MPI)
31 #define MPI_PARAM( x ) x
33 #define MPI_PARAM( x )
36 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
37 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
38 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
41 typedef int MPI_Datatype;
42 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
43 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
44 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
45 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
46 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
47 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
48 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
49 #define MPI_INT ((MPI_Datatype)0x4c000405)
50 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
51 #define MPI_LONG ((MPI_Datatype)0x4c000807)
52 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
53 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
54 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
55 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
56 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
57 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
58 #define MPI_LONG_LONG MPI_LONG_LONG_INT
62 #define MPI_MAX (MPI_Op)(0x58000001)
63 #define MPI_MIN (MPI_Op)(0x58000002)
64 #define MPI_SUM (MPI_Op)(0x58000003)
65 #define MPI_PROD (MPI_Op)(0x58000004)
66 #define MPI_LAND (MPI_Op)(0x58000005)
67 #define MPI_BAND (MPI_Op)(0x58000006)
68 #define MPI_LOR (MPI_Op)(0x58000007)
69 #define MPI_BOR (MPI_Op)(0x58000008)
70 #define MPI_LXOR (MPI_Op)(0x58000009)
71 #define MPI_BXOR (MPI_Op)(0x5800000a)
72 #define MPI_MINLOC (MPI_Op)(0x5800000b)
73 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
74 #define MPI_REPLACE (MPI_Op)(0x5800000d)
75 #define MPI_NO_OP (MPI_Op)(0x5800000e)
78 #define MPI_UNDEFINED (-32766)
79 #define MPI_STATUS_IGNORE (MPI_Status *)1
80 #define MPI_STATUSES_IGNORE (MPI_Status *)1
81 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
82 typedef int MPI_Request;
85 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
95 #define MPI_CHECK_ERROR( error ) ((void) error)
97 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
153 template<
typename FIRST,
typename SECOND >
176 static void barrier( MPI_Comm
const & MPI_PARAM( comm )=
MPI_COMM_GEOS );
178 static int cartCoords( MPI_Comm comm,
int rank,
int maxdims,
int coords[] );
180 static int cartCreate( MPI_Comm comm_old,
int ndims,
const int dims[],
const int periods[],
181 int reorder, MPI_Comm * comm_cart );
183 static int cartRank( MPI_Comm comm,
const int coords[] );
185 static void commFree( MPI_Comm & comm );
187 static int commRank( MPI_Comm
const & MPI_PARAM( comm )=
MPI_COMM_GEOS );
189 static int commSize( MPI_Comm
const & MPI_PARAM( comm )=
MPI_COMM_GEOS );
191 static bool commCompare( MPI_Comm
const & comm1, MPI_Comm
const & comm2 );
193 static bool initialized();
195 static int init(
int * argc,
char * * * argv );
203 static MPI_Comm commDup( MPI_Comm
const comm );
205 static MPI_Comm commSplit( MPI_Comm
const comm,
int color,
int key );
207 static int test( MPI_Request * request,
int * flag,
MPI_Status * status );
209 static int testAny(
int count, MPI_Request array_of_requests[],
int * idx,
int * flags,
MPI_Status array_of_statuses[] );
211 static int testSome(
int count, MPI_Request array_of_requests[],
int * outcount,
int array_of_indices[],
MPI_Status array_of_statuses[] );
213 static int testAll(
int count, MPI_Request array_of_requests[],
int * flags,
MPI_Status array_of_statuses[] );
235 static int checkAny(
int count, MPI_Request array_of_requests[],
int * idx,
int * flag,
MPI_Status array_of_statuses[] );
246 static int checkAll(
int count, MPI_Request array_of_requests[],
int * flag,
MPI_Status array_of_statuses[] );
248 static int wait( MPI_Request * request,
MPI_Status * status );
250 static int waitAny(
int count, MPI_Request array_of_requests[],
int * indx,
MPI_Status array_of_statuses[] );
252 static int waitSome(
int count, MPI_Request array_of_requests[],
int * outcount,
int array_of_indices[],
MPI_Status array_of_statuses[] );
254 static int waitAll(
int count, MPI_Request array_of_requests[],
MPI_Status array_of_statuses[] );
256 static double wtime(
void );
269 MPI_Request array_of_requests[],
271 std::function< MPI_Request (
int ) > func );
283 MPI_Request array_of_requests[],
285 std::function< MPI_Request (
int ) > func );
300 stdVector< std::tuple< MPI_Request *,
MPI_Status *, std::function< MPI_Request (
int ) > > >
const & phases );
316 stdVector< std::tuple< MPI_Request *,
MPI_Status *, std::function< MPI_Request (
int ) > > >
const & phases );
319 #if !defined(GEOS_USE_MPI)
320 static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
322 static std::map< int, std::pair< int, void * > > tagToPointers;
323 return tagToPointers;
344 template<
typename T_SEND,
typename T_RECV >
363 template<
typename T_SEND,
typename T_RECV >
377 template<
typename T >
380 template<
typename T >
385 template<
typename T >
398 template<
typename T >
409 template<
typename T >
420 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
432 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
433 static void allReduce( SRC_CONTAINER_TYPE
const & src, DST_CONTAINER_TYPE & dst,
int const count,
Reduction const op, MPI_Comm
const comm );
444 template<
typename FIRST,
typename SECOND, PairReduction OP >
458 template<
typename FIRST,
typename SECOND,
typename CONTAINER, PairReduction OP >
471 template<
typename T >
472 static int reduce( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op,
int root, MPI_Comm comm =
MPI_COMM_GEOS );
482 template<
typename T >
493 template<
typename T >
497 template<
typename T >
498 static int scan( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op, MPI_Comm comm =
MPI_COMM_GEOS );
500 template<
typename T >
501 static int exscan( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op, MPI_Comm comm =
MPI_COMM_GEOS );
511 template<
typename T >
521 template<
typename T >
536 template<
typename TS,
typename TR >
537 static int gather( TS
const *
const sendbuf,
555 template<
typename T,
typename DST_CONTAINER,
556 typename = std::enable_if_t<
557 std::is_trivially_copyable_v< T > &&
558 std::is_same_v< decltype(std::declval< DST_CONTAINER >().size()),
std::size_t > &&
559 std::is_same_v< decltype(std::declval< DST_CONTAINER >().data()), T * > > >
561 DST_CONTAINER & destValuesBuffer,
579 template<
typename TS,
typename TR >
583 const int * recvcounts,
595 template<
typename T >
597 int MPI_PARAM( source ),
599 MPI_Comm MPI_PARAM( comm ),
602 template<
typename T >
604 int MPI_PARAM( dest ),
606 MPI_Comm MPI_PARAM( comm ),
607 MPI_Request * MPI_PARAM( request ) );
619 template<
typename T >
625 MPI_Request * request );
636 template<
typename T >
637 static int send( T
const *
const buf,
653 template<
typename T >
654 static int iSend( T
const *
const buf,
659 MPI_Request * request );
668 template<
typename U,
typename T >
676 template<
typename T >
685 template<
typename T >
693 template<
typename T >
702 template<
typename T >
711 template<
typename FIRST,
typename SECOND >
720 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
728 template<
typename T >
737 template<
typename T >
746 template<
typename FIRST,
typename SECOND >
755 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
769 template<
typename T >
770 static int allReduce( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op, MPI_Comm comm =
MPI_COMM_GEOS );
777 struct ManagedResources
780 std::set< MPI_Op > m_mpiOps;
783 std::set< MPI_Datatype > m_mpiTypes;
796 ManagedResources & getManagedResources();
798 template<
typename T,
typename ENABLE =
void >
799 struct MpiTypeImpl {};
801 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
802 template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
804 ADD_MPI_TYPE_MAP(
float, MPI_FLOAT );
805 ADD_MPI_TYPE_MAP(
double, MPI_DOUBLE );
807 ADD_MPI_TYPE_MAP(
char, MPI_CHAR );
808 ADD_MPI_TYPE_MAP(
signed char, MPI_SIGNED_CHAR );
809 ADD_MPI_TYPE_MAP(
unsigned char, MPI_UNSIGNED_CHAR );
811 ADD_MPI_TYPE_MAP(
int, MPI_INT );
812 ADD_MPI_TYPE_MAP(
long int, MPI_LONG );
813 ADD_MPI_TYPE_MAP(
long long int, MPI_LONG_LONG );
815 ADD_MPI_TYPE_MAP(
unsigned int, MPI_UNSIGNED );
816 ADD_MPI_TYPE_MAP(
unsigned long int, MPI_UNSIGNED_LONG );
817 ADD_MPI_TYPE_MAP(
unsigned long long int, MPI_UNSIGNED_LONG_LONG );
819 #undef ADD_MPI_TYPE_MAP
821 template<
typename T >
822 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
824 static MPI_Datatype get() {
return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
827 template<
typename T >
828 MPI_Datatype getMpiType()
830 return MpiTypeImpl< T >::get();
833 template<
typename FIRST,
typename SECOND >
834 MPI_Datatype getMpiPairType()
836 static_assert(
"no default implementation, please add a template specialization and add it in the \"testMpiWrapper\" unit test." );
839 template<> MPI_Datatype getMpiPairType< int, int >();
840 template<> MPI_Datatype getMpiPairType< long int, int >();
841 template<> MPI_Datatype getMpiPairType< long int, long int >();
842 template<> MPI_Datatype getMpiPairType< long long int, long long int >();
843 template<> MPI_Datatype getMpiPairType< float, int >();
844 template<> MPI_Datatype getMpiPairType< double, int >();
845 template<> MPI_Datatype getMpiPairType< double, long int >();
846 template<> MPI_Datatype getMpiPairType< double, long long int >();
847 template<> MPI_Datatype getMpiPairType< double, double >();
850 template<
typename FIRST,
typename SECOND, MpiWrapper::PairReduction OP >
851 MPI_Op getMpiPairReductionOp()
853 static auto const createOpHolder = [] () {
854 using PAIR_T = MpiWrapper::PairType< FIRST, SECOND >;
856 auto const customOpFunc =
857 [](
void * invec,
void * inoutvec,
int * len, MPI_Datatype * )
859 for(
int i = 0; i < *len; ++i )
861 PAIR_T & in =
static_cast< PAIR_T *
>(invec)[i];
862 PAIR_T & inout =
static_cast< PAIR_T *
>(inoutvec)[i];
865 if( std::tie( in.first, in.second ) < std::tie( inout.first, inout.second ) )
870 if( std::tie( in.first, in.second ) > std::tie( inout.first, inout.second ) )
879 internal::getManagedResources().m_mpiOps.emplace( mpiOp );
883 static MPI_Op mpiOp{ createOpHolder() };
918 GEOS_ERROR(
"Unsupported reduction operation" );
923 template<
typename T_SEND,
typename T_RECV >
926 T_RECV *
const recvbuf,
928 MPI_Comm MPI_PARAM( comm ) )
931 return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
932 recvbuf, recvcount, internal::getMpiType< T_RECV >(),
935 static_assert( std::is_same< T_SEND, T_RECV >::value,
936 "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
938 std::copy( sendbuf, sendbuf + sendcount, recvbuf )
943 template<
typename T_SEND,
typename T_RECV >
946 T_RECV *
const recvbuf,
949 MPI_Comm MPI_PARAM( comm ) )
952 return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
953 recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
956 static_assert( std::is_same< T_SEND, T_RECV >::value,
957 "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
959 std::copy( sendbuf, sendbuf + sendcount, recvbuf )
965 template<
typename T >
969 int const mpiSize = commSize( comm );
970 allValues.resize( mpiSize );
972 MPI_Datatype
const MPI_TYPE = internal::getMpiType< T >();
974 MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
977 allValues.resize( 1 );
978 allValues[0] = myValue;
982 template<
typename T >
984 array1d< T > & allValues,
985 MPI_Comm MPI_PARAM( comm ) )
987 int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
989 int const mpiSize = commSize( comm );
990 allValues.resize( mpiSize * sendSize );
991 return MPI_Allgather( sendValues.data(),
993 internal::getMpiType< T >(),
996 internal::getMpiType< T >(),
1000 allValues.resize( sendSize );
1003 allValues[a] = sendValues[a];
1009 template<
typename T >
1010 int MpiWrapper::allGatherv( arrayView1d< T const >
const & sendValues,
1011 array1d< T > & allValues,
1012 MPI_Comm MPI_PARAM( comm ) )
1014 int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
1016 int const mpiSize = commSize( comm );
1017 array1d< int > counts;
1019 array1d< int > displs( mpiSize + 1 );
1020 std::partial_sum( counts.begin(), counts.end(), displs.begin() + 1 );
1021 allValues.resize( displs.back() );
1022 return MPI_Allgatherv( sendValues.data(),
1024 internal::getMpiType< T >(),
1028 internal::getMpiType< T >(),
1032 allValues.resize( sendSize );
1035 allValues[a] = sendValues[a];
1041 template<
typename T >
1045 MPI_Op
const MPI_PARAM( op ),
1046 MPI_Comm
const MPI_PARAM( comm ) )
1049 MPI_Datatype
const mpiType = internal::getMpiType< T >();
1050 return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
1052 if( sendbuf != recvbuf )
1054 memcpy( recvbuf, sendbuf, count *
sizeof( T ) );
1060 template<
typename T >
1064 MPI_Op
const MPI_PARAM( op ),
1066 MPI_Comm
const MPI_PARAM( comm ) )
1069 MPI_Datatype
const mpiType = internal::getMpiType< T >();
1070 return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
1072 if( sendbuf != recvbuf )
1074 memcpy( recvbuf, sendbuf, count *
sizeof( T ) );
1080 template<
typename T >
1081 int MpiWrapper::scan( T
const *
const sendbuf,
1084 MPI_Op MPI_PARAM( op ),
1085 MPI_Comm MPI_PARAM( comm ) )
1088 return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1090 memcpy( recvbuf, sendbuf, count*
sizeof(T) );
1095 template<
typename T >
1096 int MpiWrapper::exscan( T
const *
const MPI_PARAM( sendbuf ),
1099 MPI_Op MPI_PARAM( op ),
1100 MPI_Comm MPI_PARAM( comm ) )
1103 return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1105 memset( recvbuf, 0, count*
sizeof(T) );
1110 template<
typename T >
1112 int MPI_PARAM( count ),
1113 int MPI_PARAM( root ),
1114 MPI_Comm MPI_PARAM( comm ) )
1117 return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
1124 template<
typename T >
1125 void MpiWrapper::broadcast( T & MPI_PARAM( value ),
int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
1128 MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
1134 void MpiWrapper::broadcast< string >(
string & MPI_PARAM( value ),
1135 int MPI_PARAM( srcRank ),
1136 MPI_Comm MPI_PARAM( comm ) )
1139 int size = LvArray::integerConversion< int >( value.size() );
1141 value.resize( size );
1142 MPI_Bcast(
const_cast< char *
>( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
1146 template<
typename TS,
typename TR >
1151 int MPI_PARAM( root ),
1152 MPI_Comm MPI_PARAM( comm ) )
1155 return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
1156 recvbuf, recvcount, internal::getMpiType< TR >(),
1159 static_assert( std::is_same< TS, TR >::value,
1160 "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1161 std::size_t const sendBufferSize = sendcount *
sizeof(TS);
1162 std::size_t const recvBufferSize = recvcount *
sizeof(TR);
1163 GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize,
"size of send buffer and receive buffer are not equal" );
1164 memcpy( recvbuf, sendbuf, sendBufferSize );
1169 template<
typename T,
typename DST_CONTAINER,
typename >
1171 DST_CONTAINER & destValuesBuffer,
1173 MPI_Comm MPI_PARAM( comm ) )
1175 if( commRank() == 0 )
1177 "Receive buffer is not large enough to contain the values to receive." );
1179 return MPI_Gather( &value,
sizeof( T ), internal::getMpiType< uint8_t >(),
1180 destValuesBuffer.data(),
sizeof( T ), internal::getMpiType< uint8_t >(),
1183 memcpy( destValuesBuffer.data(), &value, sendBufferSize );
1188 template<
typename TS,
typename TR >
1192 const int * recvcounts,
1193 const int * MPI_PARAM( displs ),
1194 int MPI_PARAM( root ),
1195 MPI_Comm MPI_PARAM( comm ) )
1198 return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
1199 recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
1202 static_assert( std::is_same< TS, TR >::value,
1203 "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1204 std::size_t const sendBufferSize = sendcount *
sizeof(TS);
1205 std::size_t const recvBufferSize = recvcounts[0] *
sizeof(TR);
1206 GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize,
"size of send buffer and receive buffer are not equal" );
1207 memcpy( recvbuf, sendbuf, sendBufferSize );
1212 template<
typename T >
1215 int MPI_PARAM( source ),
1217 MPI_Comm MPI_PARAM( comm ),
1218 MPI_Request * MPI_PARAM( request ) )
1222 "Attempting to use an MPI_Request that is still in use." );
1223 return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
1225 std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1226 std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1228 if( iPointer==pointerMap.end() )
1230 pointerMap.insert( {tag, {1, buf} } );
1235 "Tag does is assigned, but pointer was not set by iSend." );
1236 memcpy( buf, iPointer->second.second, count*
sizeof(T) );
1237 pointerMap.erase( iPointer );
1243 template<
typename T >
1244 int MpiWrapper::recv( array1d< T > & buf,
1245 int MPI_PARAM( source ),
1247 MPI_Comm MPI_PARAM( comm ),
1253 MPI_Probe( source, tag, comm, &status );
1254 MPI_Get_count( &status, MPI_CHAR, &count );
1257 buf.resize( count /
sizeof( T ) );
1259 return MPI_Recv(
reinterpret_cast< char *
>( buf.data() ),
1272 template<
typename T >
1273 int MpiWrapper::iSend( arrayView1d< T >
const & buf,
1274 int MPI_PARAM( dest ),
1276 MPI_Comm MPI_PARAM( comm ),
1277 MPI_Request * MPI_PARAM( request ) )
1281 "Attempting to use an MPI_Request that is still in use." );
1282 return MPI_Isend(
reinterpret_cast< void const *
>( buf.data() ),
1283 buf.size() *
sizeof( T ),
1295 template<
typename T >
1303 return MPI_Send( buf, count, internal::getMpiType< T >(), dest, tag, comm );
1309 template<
typename T >
1310 int MpiWrapper::iSend( T
const *
const buf,
1312 int MPI_PARAM( dest ),
1314 MPI_Comm MPI_PARAM( comm ),
1315 MPI_Request * MPI_PARAM( request ) )
1319 "Attempting to use an MPI_Request that is still in use." );
1320 return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1322 std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1323 std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1325 if( iPointer==pointerMap.end() )
1327 pointerMap.insert( {tag, {0,
const_cast< T *
>(buf)}
1333 "Tag does is assigned, but pointer was not set by iRecv." );
1334 memcpy( iPointer->second.second, buf, count*
sizeof(T) );
1335 pointerMap.erase( iPointer );
1341 template<
typename U,
typename T >
1347 U
const convertedValue = value;
1348 int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1349 MPI_CHECK_ERROR( error );
1351 if( commRank() == 0 )
1360 template<
typename T >
1368 template<
typename T >
1375 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
1379 "The type in the source container must be trivially copyable." );
1381 "The type in the destination container must be trivially copyable." );
1384 "Source and destination containers must have the same value type." );
1390 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
1394 "The type in the source container must be trivially copyable." );
1396 "The type in the destination container must be trivially copyable." );
1399 "Source and destination containers must have the same value type." );
1401 allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ),
getMpiOp( op ), comm );
1404 template<
typename T >
1410 template<
typename T >
1416 template<
typename T >
1422 template<
typename T >
1428 template<
typename T >
1434 template<
typename T >
1441 template<
typename T >
1449 template<
typename T >
1456 template<
typename FIRST,
typename SECOND, MpiWrapper::PairReduction const OP >
1461 auto const type = internal::getMpiPairType< FIRST, SECOND >();
1462 auto const mpiOp = internal::getMpiPairReductionOp< FIRST, SECOND, OP >();
1464 MPI_Allreduce( MPI_IN_PLACE, &pair, 1, type, mpiOp, comm );
1471 template<
typename FIRST,
typename SECOND,
typename CONTAINER, MpiWrapper::PairReduction const OP >
1476 std::function< PAIR_T( PAIR_T, PAIR_T ) >
const getMin = []( PAIR_T
const & a, PAIR_T
const & b ) {
1477 return ( std::tie( a.first, a.second ) < std::tie( b.first, b.second ) ) ? a : b;
1479 std::function< PAIR_T( PAIR_T, PAIR_T ) >
const getMax = []( PAIR_T
const & a, PAIR_T
const & b ) {
1480 return ( std::tie( a.first, a.second ) > std::tie( b.first, b.second ) ) ? a : b;
1482 PAIR_T
const defaultPair{
1483 OP ==
PairReduction::Min ? std::numeric_limits< FIRST >::max() : std::numeric_limits< FIRST >::lowest(),
1484 OP ==
PairReduction::Min ? std::numeric_limits< SECOND >::max() : std::numeric_limits< SECOND >::lowest()
1487 PAIR_T pair = std::accumulate( pairs.begin(), pairs.end(), defaultPair,
1489 return allReduce< FIRST, SECOND, OP >( pair, comm );
1492 template<
typename FIRST,
typename SECOND >
1494 {
return allReduce< FIRST, SECOND, PairReduction::Min >( pair, comm ); }
1496 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
1498 {
return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Min >( pairs, comm ); }
1500 template<
typename FIRST,
typename SECOND >
1502 {
return allReduce< FIRST, SECOND, PairReduction::Max >( pair, comm ); }
1504 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
1506 {
return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Max >( pairs, comm ); }
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
#define GEOS_ERROR_IF_NE(lhs, rhs)
Raise a hard error if two values are not equal.
#define GEOS_ASSERT_GE(lhs, rhs)
Assert that one value compares greater than or equal to the other in debug builds.
#define GEOS_ERROR_IF_LT_MSG(lhs, rhs, msg)
Raise a hard error if one value compares less than the other.
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Lightweight non-owning wrapper over a contiguous range of elements.
constexpr T * data() const noexcept
constexpr size_type size() const noexcept
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
std::size_t size_t
Unsigned size type.
Array< T, 1 > array1d
Alias for 1D array.
internal::StdVectorWrapper< T, Allocator, USE_STD_CONTAINER_BOUNDS_CHECKING > stdVector
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgatherv.
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
@ Max
Max pair first value.
@ Min
Min pair first value.
static int activeWaitSomeCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int bcast(T *buffer, int count, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Bcast.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Allgather.
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgather.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static int send(T const *const buf, int count, int dest, int tag, MPI_Comm comm)
Strongly typed wrapper around MPI_Send()
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOS)
Compute exclusive prefix sum and full sum.
static int gather(T const &value, DST_CONTAINER &destValuesBuffer, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static T allReduce(T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS)
Convenience wrapper for the MPI_Allreduce function.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static int activeWaitOrderedCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gatherv.
static void finalize()
Free MPI managed resources, then call MPI_Finalize(). Please note that once called,...
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Broadcast.
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Reduce.
Trait to retrieve the value_type or ValueType of a type T.