20 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
21 #define GEOS_COMMON_MPIWRAPPER_HPP_
29 #if defined(GEOS_USE_MPI)
31 #define MPI_PARAM( x ) x
33 #define MPI_PARAM( x )
36 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
37 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
38 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
41 typedef int MPI_Datatype;
42 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
43 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
44 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
45 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
46 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
47 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
48 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
49 #define MPI_INT ((MPI_Datatype)0x4c000405)
50 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
51 #define MPI_LONG ((MPI_Datatype)0x4c000807)
52 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
53 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
54 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
55 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
56 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
57 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
58 #define MPI_LONG_LONG MPI_LONG_LONG_INT
62 #define MPI_MAX (MPI_Op)(0x58000001)
63 #define MPI_MIN (MPI_Op)(0x58000002)
64 #define MPI_SUM (MPI_Op)(0x58000003)
65 #define MPI_PROD (MPI_Op)(0x58000004)
66 #define MPI_LAND (MPI_Op)(0x58000005)
67 #define MPI_BAND (MPI_Op)(0x58000006)
68 #define MPI_LOR (MPI_Op)(0x58000007)
69 #define MPI_BOR (MPI_Op)(0x58000008)
70 #define MPI_LXOR (MPI_Op)(0x58000009)
71 #define MPI_BXOR (MPI_Op)(0x5800000a)
72 #define MPI_MINLOC (MPI_Op)(0x5800000b)
73 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
74 #define MPI_REPLACE (MPI_Op)(0x5800000d)
75 #define MPI_NO_OP (MPI_Op)(0x5800000e)
78 #define MPI_UNDEFINED (-32766)
79 #define MPI_STATUS_IGNORE (MPI_Status *)1
80 #define MPI_STATUSES_IGNORE (MPI_Status *)1
81 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
82 typedef int MPI_Request;
85 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
95 #define MPI_CHECK_ERROR( error ) ((void) error)
97 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
153 template<
typename FIRST,
typename SECOND >
176 static void barrier( MPI_Comm
const & MPI_PARAM( comm )=
MPI_COMM_GEOS );
178 static int cartCoords( MPI_Comm comm,
int rank,
int maxdims,
int coords[] );
180 static int cartCreate( MPI_Comm comm_old,
int ndims,
const int dims[],
const int periods[],
181 int reorder, MPI_Comm * comm_cart );
183 static int cartRank( MPI_Comm comm,
const int coords[] );
185 static void commFree( MPI_Comm & comm );
187 static int commRank( MPI_Comm
const & MPI_PARAM( comm )=
MPI_COMM_GEOS );
189 static int commSize( MPI_Comm
const & MPI_PARAM( comm )=
MPI_COMM_GEOS );
191 static bool commCompare( MPI_Comm
const & comm1, MPI_Comm
const & comm2 );
193 static bool initialized();
195 static int init(
int * argc,
char * * * argv );
203 static MPI_Comm commDup( MPI_Comm
const comm );
205 static MPI_Comm commSplit( MPI_Comm
const comm,
int color,
int key );
207 static int test( MPI_Request * request,
int * flag,
MPI_Status * status );
209 static int testAny(
int count, MPI_Request array_of_requests[],
int * idx,
int * flags,
MPI_Status array_of_statuses[] );
211 static int testSome(
int count, MPI_Request array_of_requests[],
int * outcount,
int array_of_indices[],
MPI_Status array_of_statuses[] );
213 static int testAll(
int count, MPI_Request array_of_requests[],
int * flags,
MPI_Status array_of_statuses[] );
235 static int checkAny(
int count, MPI_Request array_of_requests[],
int * idx,
int * flag,
MPI_Status array_of_statuses[] );
246 static int checkAll(
int count, MPI_Request array_of_requests[],
int * flag,
MPI_Status array_of_statuses[] );
248 static int wait( MPI_Request * request,
MPI_Status * status );
250 static int waitAny(
int count, MPI_Request array_of_requests[],
int * indx,
MPI_Status array_of_statuses[] );
252 static int waitSome(
int count, MPI_Request array_of_requests[],
int * outcount,
int array_of_indices[],
MPI_Status array_of_statuses[] );
254 static int waitAll(
int count, MPI_Request array_of_requests[],
MPI_Status array_of_statuses[] );
256 static double wtime(
void );
269 MPI_Request array_of_requests[],
271 std::function< MPI_Request (
int ) > func );
283 MPI_Request array_of_requests[],
285 std::function< MPI_Request (
int ) > func );
300 std::vector< std::tuple< MPI_Request *,
MPI_Status *, std::function< MPI_Request (
int ) > > >
const & phases );
316 std::vector< std::tuple< MPI_Request *,
MPI_Status *, std::function< MPI_Request (
int ) > > >
const & phases );
319 #if !defined(GEOS_USE_MPI)
320 static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
322 static std::map< int, std::pair< int, void * > > tagToPointers;
323 return tagToPointers;
344 template<
typename T_SEND,
typename T_RECV >
363 template<
typename T_SEND,
typename T_RECV >
377 template<
typename T >
380 template<
typename T >
393 template<
typename T >
404 template<
typename T >
415 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
427 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
428 static void allReduce( SRC_CONTAINER_TYPE
const & src, DST_CONTAINER_TYPE & dst,
int const count,
Reduction const op, MPI_Comm
const comm );
439 template<
typename FIRST,
typename SECOND, PairReduction OP >
453 template<
typename FIRST,
typename SECOND,
typename CONTAINER, PairReduction OP >
466 template<
typename T >
467 static int reduce( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op,
int root, MPI_Comm comm =
MPI_COMM_GEOS );
477 template<
typename T >
488 template<
typename T >
492 template<
typename T >
493 static int scan( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op, MPI_Comm comm =
MPI_COMM_GEOS );
495 template<
typename T >
496 static int exscan( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op, MPI_Comm comm =
MPI_COMM_GEOS );
506 template<
typename T >
516 template<
typename T >
531 template<
typename TS,
typename TR >
532 static int gather( TS
const *
const sendbuf,
550 template<
typename T,
typename DST_CONTAINER,
551 typename = std::enable_if_t<
552 std::is_trivially_copyable_v< T > &&
553 std::is_same_v< decltype(std::declval< DST_CONTAINER >().size()),
std::size_t > &&
554 std::is_same_v< decltype(std::declval< DST_CONTAINER >().data()), T * > > >
556 DST_CONTAINER & destValuesBuffer,
574 template<
typename TS,
typename TR >
578 const int * recvcounts,
590 template<
typename T >
592 int MPI_PARAM( source ),
594 MPI_Comm MPI_PARAM( comm ),
597 template<
typename T >
599 int MPI_PARAM( dest ),
601 MPI_Comm MPI_PARAM( comm ),
602 MPI_Request * MPI_PARAM( request ) );
614 template<
typename T >
620 MPI_Request * request );
632 template<
typename T >
633 static int iSend( T
const *
const buf,
638 MPI_Request * request );
647 template<
typename U,
typename T >
655 template<
typename T >
664 template<
typename T >
672 template<
typename T >
681 template<
typename T >
690 template<
typename FIRST,
typename SECOND >
699 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
707 template<
typename T >
716 template<
typename T >
725 template<
typename FIRST,
typename SECOND >
734 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
748 template<
typename T >
749 static int allReduce( T
const * sendbuf, T * recvbuf,
int count, MPI_Op op, MPI_Comm comm =
MPI_COMM_GEOS );
756 struct ManagedResources
759 std::set< MPI_Op > m_mpiOps;
762 std::set< MPI_Datatype > m_mpiTypes;
775 ManagedResources & getManagedResources();
777 template<
typename T,
typename ENABLE =
void >
778 struct MpiTypeImpl {};
780 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
781 template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
783 ADD_MPI_TYPE_MAP(
float, MPI_FLOAT );
784 ADD_MPI_TYPE_MAP(
double, MPI_DOUBLE );
786 ADD_MPI_TYPE_MAP(
char, MPI_CHAR );
787 ADD_MPI_TYPE_MAP(
signed char, MPI_SIGNED_CHAR );
788 ADD_MPI_TYPE_MAP(
unsigned char, MPI_UNSIGNED_CHAR );
790 ADD_MPI_TYPE_MAP(
int, MPI_INT );
791 ADD_MPI_TYPE_MAP(
long int, MPI_LONG );
792 ADD_MPI_TYPE_MAP(
long long int, MPI_LONG_LONG );
794 ADD_MPI_TYPE_MAP(
unsigned int, MPI_UNSIGNED );
795 ADD_MPI_TYPE_MAP(
unsigned long int, MPI_UNSIGNED_LONG );
796 ADD_MPI_TYPE_MAP(
unsigned long long int, MPI_UNSIGNED_LONG_LONG );
798 #undef ADD_MPI_TYPE_MAP
800 template<
typename T >
801 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
803 static MPI_Datatype get() {
return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
806 template<
typename T >
807 MPI_Datatype getMpiType()
809 return MpiTypeImpl< T >::get();
812 template<
typename FIRST,
typename SECOND >
813 MPI_Datatype getMpiPairType()
815 static_assert(
"no default implementation, please add a template specialization and add it in the \"testMpiWrapper\" unit test." );
818 template<> MPI_Datatype getMpiPairType< int, int >();
819 template<> MPI_Datatype getMpiPairType< long int, int >();
820 template<> MPI_Datatype getMpiPairType< long int, long int >();
821 template<> MPI_Datatype getMpiPairType< long long int, long long int >();
822 template<> MPI_Datatype getMpiPairType< float, int >();
823 template<> MPI_Datatype getMpiPairType< double, int >();
824 template<> MPI_Datatype getMpiPairType< double, long int >();
825 template<> MPI_Datatype getMpiPairType< double, long long int >();
826 template<> MPI_Datatype getMpiPairType< double, double >();
829 template<
typename FIRST,
typename SECOND, MpiWrapper::PairReduction OP >
830 MPI_Op getMpiPairReductionOp()
832 static auto const createOpHolder = [] () {
833 using PAIR_T = MpiWrapper::PairType< FIRST, SECOND >;
835 auto const customOpFunc =
836 [](
void * invec,
void * inoutvec,
int * len, MPI_Datatype * )
838 for(
int i = 0; i < *len; ++i )
840 PAIR_T & in =
static_cast< PAIR_T *
>(invec)[i];
841 PAIR_T & inout =
static_cast< PAIR_T *
>(inoutvec)[i];
844 if( std::tie( in.first, in.second ) < std::tie( inout.first, inout.second ) )
849 if( std::tie( in.first, in.second ) > std::tie( inout.first, inout.second ) )
858 internal::getManagedResources().m_mpiOps.emplace( mpiOp );
862 static MPI_Op mpiOp{ createOpHolder() };
897 GEOS_ERROR(
"Unsupported reduction operation" );
902 template<
typename T_SEND,
typename T_RECV >
905 T_RECV *
const recvbuf,
907 MPI_Comm MPI_PARAM( comm ) )
910 return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
911 recvbuf, recvcount, internal::getMpiType< T_RECV >(),
914 static_assert( std::is_same< T_SEND, T_RECV >::value,
915 "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
917 std::copy( sendbuf, sendbuf + sendcount, recvbuf )
922 template<
typename T_SEND,
typename T_RECV >
925 T_RECV *
const recvbuf,
928 MPI_Comm MPI_PARAM( comm ) )
931 return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
932 recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
935 static_assert( std::is_same< T_SEND, T_RECV >::value,
936 "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
938 std::copy( sendbuf, sendbuf + sendcount, recvbuf )
944 template<
typename T >
948 int const mpiSize = commSize( comm );
949 allValues.resize( mpiSize );
951 MPI_Datatype
const MPI_TYPE = internal::getMpiType< T >();
953 MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
956 allValues.resize( 1 );
957 allValues[0] = myValue;
961 template<
typename T >
963 array1d< T > & allValues,
964 MPI_Comm MPI_PARAM( comm ) )
966 int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
968 int const mpiSize = commSize( comm );
969 allValues.resize( mpiSize * sendSize );
970 return MPI_Allgather( sendValues.data(),
972 internal::getMpiType< T >(),
975 internal::getMpiType< T >(),
979 allValues.resize( sendSize );
982 allValues[a] = sendValues[a];
988 template<
typename T >
992 MPI_Op
const MPI_PARAM( op ),
993 MPI_Comm
const MPI_PARAM( comm ) )
996 MPI_Datatype
const mpiType = internal::getMpiType< T >();
997 return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
999 if( sendbuf != recvbuf )
1001 memcpy( recvbuf, sendbuf, count *
sizeof( T ) );
1007 template<
typename T >
1011 MPI_Op
const MPI_PARAM( op ),
1013 MPI_Comm
const MPI_PARAM( comm ) )
1016 MPI_Datatype
const mpiType = internal::getMpiType< T >();
1017 return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
1019 if( sendbuf != recvbuf )
1021 memcpy( recvbuf, sendbuf, count *
sizeof( T ) );
1027 template<
typename T >
1028 int MpiWrapper::scan( T
const *
const sendbuf,
1031 MPI_Op MPI_PARAM( op ),
1032 MPI_Comm MPI_PARAM( comm ) )
1035 return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1037 memcpy( recvbuf, sendbuf, count*
sizeof(T) );
1042 template<
typename T >
1043 int MpiWrapper::exscan( T
const *
const MPI_PARAM( sendbuf ),
1046 MPI_Op MPI_PARAM( op ),
1047 MPI_Comm MPI_PARAM( comm ) )
1050 return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1052 memset( recvbuf, 0, count*
sizeof(T) );
1057 template<
typename T >
1059 int MPI_PARAM( count ),
1060 int MPI_PARAM( root ),
1061 MPI_Comm MPI_PARAM( comm ) )
1064 return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
1071 template<
typename T >
1072 void MpiWrapper::broadcast( T & MPI_PARAM( value ),
int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
1075 MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
1081 void MpiWrapper::broadcast< string >(
string & MPI_PARAM( value ),
1082 int MPI_PARAM( srcRank ),
1083 MPI_Comm MPI_PARAM( comm ) )
1086 int size = LvArray::integerConversion< int >( value.size() );
1088 value.resize( size );
1089 MPI_Bcast(
const_cast< char *
>( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
1093 template<
typename TS,
typename TR >
1098 int MPI_PARAM( root ),
1099 MPI_Comm MPI_PARAM( comm ) )
1102 return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
1103 recvbuf, recvcount, internal::getMpiType< TR >(),
1106 static_assert( std::is_same< TS, TR >::value,
1107 "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1108 std::size_t const sendBufferSize = sendcount *
sizeof(TS);
1109 std::size_t const recvBufferSize = recvcount *
sizeof(TR);
1110 GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize,
"size of send buffer and receive buffer are not equal" );
1111 memcpy( recvbuf, sendbuf, sendBufferSize );
1116 template<
typename T,
typename DST_CONTAINER,
typename >
1118 DST_CONTAINER & destValuesBuffer,
1120 MPI_Comm MPI_PARAM( comm ) )
1122 if( commRank() == 0 )
1124 "Receive buffer is not large enough to contain the values to receive." );
1126 return MPI_Gather( &value,
sizeof( T ), internal::getMpiType< uint8_t >(),
1127 destValuesBuffer.data(),
sizeof( T ), internal::getMpiType< uint8_t >(),
1130 memcpy( destValuesBuffer.data(), &value, sendBufferSize );
1135 template<
typename TS,
typename TR >
1139 const int * recvcounts,
1140 const int * MPI_PARAM( displs ),
1141 int MPI_PARAM( root ),
1142 MPI_Comm MPI_PARAM( comm ) )
1145 return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
1146 recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
1149 static_assert( std::is_same< TS, TR >::value,
1150 "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1151 std::size_t const sendBufferSize = sendcount *
sizeof(TS);
1152 std::size_t const recvBufferSize = recvcounts[0] *
sizeof(TR);
1153 GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize,
"size of send buffer and receive buffer are not equal" );
1154 memcpy( recvbuf, sendbuf, sendBufferSize );
1159 template<
typename T >
1162 int MPI_PARAM( source ),
1164 MPI_Comm MPI_PARAM( comm ),
1165 MPI_Request * MPI_PARAM( request ) )
1169 "Attempting to use an MPI_Request that is still in use." );
1170 return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
1172 std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1173 std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1175 if( iPointer==pointerMap.end() )
1177 pointerMap.insert( {tag, {1, buf} } );
1182 "Tag does is assigned, but pointer was not set by iSend." );
1183 memcpy( buf, iPointer->second.second, count*
sizeof(T) );
1184 pointerMap.erase( iPointer );
1190 template<
typename T >
1191 int MpiWrapper::recv( array1d< T > & buf,
1192 int MPI_PARAM( source ),
1194 MPI_Comm MPI_PARAM( comm ),
1200 MPI_Probe( source, tag, comm, &status );
1201 MPI_Get_count( &status, MPI_CHAR, &count );
1204 buf.resize( count /
sizeof( T ) );
1206 return MPI_Recv(
reinterpret_cast< char *
>( buf.data() ),
1219 template<
typename T >
1220 int MpiWrapper::iSend( arrayView1d< T >
const & buf,
1221 int MPI_PARAM( dest ),
1223 MPI_Comm MPI_PARAM( comm ),
1224 MPI_Request * MPI_PARAM( request ) )
1228 "Attempting to use an MPI_Request that is still in use." );
1229 return MPI_Isend(
reinterpret_cast< void const *
>( buf.data() ),
1230 buf.size() *
sizeof( T ),
1242 template<
typename T >
1243 int MpiWrapper::iSend( T
const *
const buf,
1245 int MPI_PARAM( dest ),
1247 MPI_Comm MPI_PARAM( comm ),
1248 MPI_Request * MPI_PARAM( request ) )
1252 "Attempting to use an MPI_Request that is still in use." );
1253 return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1255 std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1256 std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1258 if( iPointer==pointerMap.end() )
1260 pointerMap.insert( {tag, {0,
const_cast< T *
>(buf)}
1266 "Tag does is assigned, but pointer was not set by iRecv." );
1267 memcpy( iPointer->second.second, buf, count*
sizeof(T) );
1268 pointerMap.erase( iPointer );
1274 template<
typename U,
typename T >
1280 U
const convertedValue = value;
1281 int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1282 MPI_CHECK_ERROR( error );
1284 if( commRank() == 0 )
1293 template<
typename T >
1301 template<
typename T >
1308 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
1312 "The type in the source container must be trivially copyable." );
1314 "The type in the destination container must be trivially copyable." );
1317 "Source and destination containers must have the same value type." );
1323 template<
typename SRC_CONTAINER_TYPE,
typename DST_CONTAINER_TYPE >
1327 "The type in the source container must be trivially copyable." );
1329 "The type in the destination container must be trivially copyable." );
1332 "Source and destination containers must have the same value type." );
1334 allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ),
getMpiOp( op ), comm );
1337 template<
typename T >
1343 template<
typename T >
1349 template<
typename T >
1355 template<
typename T >
1361 template<
typename T >
1367 template<
typename T >
1374 template<
typename T >
1382 template<
typename T >
1389 template<
typename FIRST,
typename SECOND, MpiWrapper::PairReduction const OP >
1394 auto const type = internal::getMpiPairType< FIRST, SECOND >();
1395 auto const mpiOp = internal::getMpiPairReductionOp< FIRST, SECOND, OP >();
1397 MPI_Allreduce( MPI_IN_PLACE, &pair, 1, type, mpiOp, comm );
1404 template<
typename FIRST,
typename SECOND,
typename CONTAINER, MpiWrapper::PairReduction const OP >
1409 std::function< PAIR_T( PAIR_T, PAIR_T ) >
const getMin = []( PAIR_T
const & a, PAIR_T
const & b ) {
1410 return ( std::tie( a.first, a.second ) < std::tie( b.first, b.second ) ) ? a : b;
1412 std::function< PAIR_T( PAIR_T, PAIR_T ) >
const getMax = []( PAIR_T
const & a, PAIR_T
const & b ) {
1413 return ( std::tie( a.first, a.second ) > std::tie( b.first, b.second ) ) ? a : b;
1415 PAIR_T
const defaultPair{
1416 OP ==
PairReduction::Min ? std::numeric_limits< FIRST >::max() : std::numeric_limits< FIRST >::lowest(),
1417 OP ==
PairReduction::Min ? std::numeric_limits< SECOND >::max() : std::numeric_limits< SECOND >::lowest()
1420 PAIR_T pair = std::accumulate( pairs.begin(), pairs.end(), defaultPair,
1422 return allReduce< FIRST, SECOND, OP >( pair, comm );
1425 template<
typename FIRST,
typename SECOND >
1427 {
return allReduce< FIRST, SECOND, PairReduction::Min >( pair, comm ); }
1429 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
1431 {
return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Min >( pairs, comm ); }
1433 template<
typename FIRST,
typename SECOND >
1435 {
return allReduce< FIRST, SECOND, PairReduction::Max >( pair, comm ); }
1437 template<
typename FIRST,
typename SECOND,
typename CONTAINER >
1439 {
return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Max >( pairs, comm ); }
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
#define GEOS_ERROR_IF_NE(lhs, rhs)
Raise a hard error if two values are not equal.
#define GEOS_ASSERT_GE(lhs, rhs)
Assert that one value compares greater than or equal to the other in debug builds.
#define GEOS_ERROR_IF_LT_MSG(lhs, rhs, msg)
Raise a hard error if one value compares less than the other.
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Lightweight non-owning wrapper over a contiguous range of elements.
constexpr T * data() const noexcept
constexpr size_type size() const noexcept
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
std::size_t size_t
Unsigned size type.
Array< T, 1 > array1d
Alias for 1D array.
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
static int activeWaitSomeCompletePhase(const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgatherv.
static int activeWaitOrderedCompletePhase(const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
@ Max
Max pair first value.
@ Min
Min pair first value.
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int bcast(T *buffer, int count, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Bcast.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Allgather.
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgather.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOS)
Compute exclusive prefix sum and full sum.
static int gather(T const &value, DST_CONTAINER &destValuesBuffer, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static T allReduce(T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS)
Convenience wrapper for the MPI_Allreduce function.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gatherv.
static void finalize()
Free MPI managed resources, then call MPI_Finalize(). Please note that once called,...
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Broadcast.
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Reduce.
Trait to retrieve the value_type or ValueType of a type T.