GEOS
MpiWrapper.hpp
Go to the documentation of this file.
1 /*
2  * ------------------------------------------------------------------------------------------------------------
3  * SPDX-License-Identifier: LGPL-2.1-only
4  *
5  * Copyright (c) 2016-2024 Lawrence Livermore National Security LLC
6  * Copyright (c) 2018-2024 TotalEnergies
7  * Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University
8  * Copyright (c) 2023-2024 Chevron
9  * Copyright (c) 2019- GEOS/GEOSX Contributors
10  * All rights reserved
11  *
12  * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details.
13  * ------------------------------------------------------------------------------------------------------------
14  */
15 
20 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
21 #define GEOS_COMMON_MPIWRAPPER_HPP_
22 
23 #include "common/DataTypes.hpp"
24 #include "common/Span.hpp"
25 #include "common/TypesHelpers.hpp"
26 
27 #include <numeric>
28 
29 #if defined(GEOS_USE_MPI)
30  #include <mpi.h>
31 #define MPI_PARAM( x ) x
32 #else
33 #define MPI_PARAM( x )
34 typedef int MPI_Comm;
35 
36 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
37 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
38 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
39 
40 
41 typedef int MPI_Datatype;
42 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
43 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
44 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
45 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
46 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
47 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
48 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
49 #define MPI_INT ((MPI_Datatype)0x4c000405)
50 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
51 #define MPI_LONG ((MPI_Datatype)0x4c000807)
52 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
53 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
54 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
55 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
56 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
57 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
58 #define MPI_LONG_LONG MPI_LONG_LONG_INT
59 
60 typedef int MPI_Op;
61 
62 #define MPI_MAX (MPI_Op)(0x58000001)
63 #define MPI_MIN (MPI_Op)(0x58000002)
64 #define MPI_SUM (MPI_Op)(0x58000003)
65 #define MPI_PROD (MPI_Op)(0x58000004)
66 #define MPI_LAND (MPI_Op)(0x58000005)
67 #define MPI_BAND (MPI_Op)(0x58000006)
68 #define MPI_LOR (MPI_Op)(0x58000007)
69 #define MPI_BOR (MPI_Op)(0x58000008)
70 #define MPI_LXOR (MPI_Op)(0x58000009)
71 #define MPI_BXOR (MPI_Op)(0x5800000a)
72 #define MPI_MINLOC (MPI_Op)(0x5800000b)
73 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
74 #define MPI_REPLACE (MPI_Op)(0x5800000d)
75 #define MPI_NO_OP (MPI_Op)(0x5800000e)
76 
77 #define MPI_SUCCESS 0 /* Successful return code */
78 #define MPI_UNDEFINED (-32766)
79 #define MPI_STATUS_IGNORE (MPI_Status *)1
80 #define MPI_STATUSES_IGNORE (MPI_Status *)1
81 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
82 typedef int MPI_Request;
83 
84 typedef int MPI_Info;
85 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
86 
87 struct MPI_Status
88 {
89  int junk;
90 };
91 
92 #endif
93 
94 #if defined(NDEBUG)
95 #define MPI_CHECK_ERROR( error ) ((void) error)
96 #else
97 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
98 #endif
99 
100 
101 namespace geos
102 {
103 
105 #ifdef GEOS_USE_MPI
106 extern MPI_Comm MPI_COMM_GEOS;
107 #else
108 extern int MPI_COMM_GEOS;
109 #endif
110 
121 {
122 public:
123 
128  enum class Reduction
129  {
130  Max,
131  Min,
132  Sum,
133  Prod,
134  LogicalAnd,
135  LogicalOr,
136  };
137 
142  enum class PairReduction
143  {
144  Max,
145  Min,
146  };
147 
153  template< typename FIRST, typename SECOND >
154  struct PairType
155  {
156  FIRST first;
157  SECOND second;
158  };
159 
160  MpiWrapper() = delete;
161 
175 
176  static void barrier( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
177 
178  static int cartCoords( MPI_Comm comm, int rank, int maxdims, int coords[] );
179 
180  static int cartCreate( MPI_Comm comm_old, int ndims, const int dims[], const int periods[],
181  int reorder, MPI_Comm * comm_cart );
182 
183  static int cartRank( MPI_Comm comm, const int coords[] );
184 
185  static void commFree( MPI_Comm & comm );
186 
187  static int commRank( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
188 
189  static int commSize( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
190 
191  static bool commCompare( MPI_Comm const & comm1, MPI_Comm const & comm2 );
192 
193  static bool initialized();
194 
195  static int init( int * argc, char * * * argv );
196 
201  static void finalize();
202 
203  static MPI_Comm commDup( MPI_Comm const comm );
204 
205  static MPI_Comm commSplit( MPI_Comm const comm, int color, int key );
206 
207  static int test( MPI_Request * request, int * flag, MPI_Status * status );
208 
209  static int testAny( int count, MPI_Request array_of_requests[], int * idx, int * flags, MPI_Status array_of_statuses[] );
210 
211  static int testSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
212 
213  static int testAll( int count, MPI_Request array_of_requests[], int * flags, MPI_Status array_of_statuses[] );
214 
221  static int check( MPI_Request * request, int * flag, MPI_Status * status );
222 
235  static int checkAny( int count, MPI_Request array_of_requests[], int * idx, int * flag, MPI_Status array_of_statuses[] );
236 
246  static int checkAll( int count, MPI_Request array_of_requests[], int * flag, MPI_Status array_of_statuses[] );
247 
248  static int wait( MPI_Request * request, MPI_Status * status );
249 
250  static int waitAny( int count, MPI_Request array_of_requests[], int * indx, MPI_Status array_of_statuses[] );
251 
252  static int waitSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
253 
254  static int waitAll( int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[] );
255 
256  static double wtime( void );
257 
258 
268  static int activeWaitAny( const int count,
269  MPI_Request array_of_requests[],
270  MPI_Status array_of_statuses[],
271  std::function< MPI_Request ( int ) > func );
272 
282  static int activeWaitSome( const int count,
283  MPI_Request array_of_requests[],
284  MPI_Status array_of_statuses[],
285  std::function< MPI_Request ( int ) > func );
286 
299  static int activeWaitSomeCompletePhase( const int participants,
300  stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
301 
315  static int activeWaitOrderedCompletePhase( const int participants,
316  stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
318 
319 #if !defined(GEOS_USE_MPI)
320  static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
321  {
322  static std::map< int, std::pair< int, void * > > tagToPointers;
323  return tagToPointers;
324  }
325 #endif
326 
331  static int nodeCommSize();
332 
344  template< typename T_SEND, typename T_RECV >
345  static int allgather( T_SEND const * sendbuf,
346  int sendcount,
347  T_RECV * recvbuf,
348  int recvcount,
349  MPI_Comm comm = MPI_COMM_GEOS );
350 
363  template< typename T_SEND, typename T_RECV >
364  static int allgatherv( T_SEND const * sendbuf,
365  int sendcount,
366  T_RECV * recvbuf,
367  int * recvcounts,
368  int * displacements,
369  MPI_Comm comm = MPI_COMM_GEOS );
370 
377  template< typename T >
378  static void allGather( T const myValue, array1d< T > & allValues, MPI_Comm comm = MPI_COMM_GEOS );
379 
380  template< typename T >
381  static int allGather( arrayView1d< T const > const & sendbuf,
382  array1d< T > & recvbuf,
383  MPI_Comm comm = MPI_COMM_GEOS );
384 
385  template< typename T >
386  static int allGatherv( arrayView1d< T const > const & sendbuf,
387  array1d< T > & recvbuf,
388  MPI_Comm comm = MPI_COMM_GEOS );
389 
398  template< typename T >
399  static T allReduce( T const & value, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
400 
409  template< typename T >
410  static void allReduce( Span< T const > src, Span< T > dst, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
411 
420  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
421  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm = MPI_COMM_GEOS );
422 
432  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
433  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm );
434 
444  template< typename FIRST, typename SECOND, PairReduction OP >
446  MPI_Comm comm = MPI_COMM_GEOS );
447 
458  template< typename FIRST, typename SECOND, typename CONTAINER, PairReduction OP >
459  static PairType< FIRST, SECOND > allReduce( CONTAINER const & pairs,
460  MPI_Comm comm = MPI_COMM_GEOS );
461 
471  template< typename T >
472  static int reduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, int root, MPI_Comm comm = MPI_COMM_GEOS );
473 
482  template< typename T >
483  static T reduce( T const & value, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
484 
493  template< typename T >
494  static void reduce( Span< T const > src, Span< T > dst, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
495 
496 
497  template< typename T >
498  static int scan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
499 
500  template< typename T >
501  static int exscan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
502 
511  template< typename T >
512  static int bcast( T * buffer, int count, int root, MPI_Comm comm = MPI_COMM_GEOS );
513 
514 
521  template< typename T >
522  static void broadcast( T & value, int srcRank = 0, MPI_Comm comm = MPI_COMM_GEOS );
523 
536  template< typename TS, typename TR >
537  static int gather( TS const * const sendbuf,
538  int sendcount,
539  TR * const recvbuf,
540  int recvcount,
541  int root,
542  MPI_Comm comm = MPI_COMM_GEOS );
543 
555  template< typename T, typename DST_CONTAINER,
556  typename = std::enable_if_t<
557  std::is_trivially_copyable_v< T > &&
558  std::is_same_v< decltype(std::declval< DST_CONTAINER >().size()), std::size_t > &&
559  std::is_same_v< decltype(std::declval< DST_CONTAINER >().data()), T * > > >
560  static int gather( T const & value,
561  DST_CONTAINER & destValuesBuffer,
562  int root,
563  MPI_Comm comm = MPI_COMM_GEOS );
564 
579  template< typename TS, typename TR >
580  static int gatherv( TS const * const sendbuf,
581  int sendcount,
582  TR * const recvbuf,
583  const int * recvcounts,
584  const int * displs,
585  int root,
586  MPI_Comm comm = MPI_COMM_GEOS );
587 
593  static MPI_Op getMpiOp( Reduction const op );
594 
595  template< typename T >
596  static int recv( array1d< T > & buf,
597  int MPI_PARAM( source ),
598  int tag,
599  MPI_Comm MPI_PARAM( comm ),
600  MPI_Status * MPI_PARAM( request ) );
601 
602  template< typename T >
603  static int iSend( arrayView1d< T > const & buf,
604  int MPI_PARAM( dest ),
605  int tag,
606  MPI_Comm MPI_PARAM( comm ),
607  MPI_Request * MPI_PARAM( request ) );
608 
619  template< typename T >
620  static int iRecv( T * const buf,
621  int count,
622  int source,
623  int tag,
624  MPI_Comm comm,
625  MPI_Request * request );
626 
636  template< typename T >
637  static int send( T const * const buf,
638  int count,
639  int dest,
640  int tag,
641  MPI_Comm comm );
642 
653  template< typename T >
654  static int iSend( T const * const buf,
655  int count,
656  int dest,
657  int tag,
658  MPI_Comm comm,
659  MPI_Request * request );
660 
668  template< typename U, typename T >
669  static U prefixSum( T const value, MPI_Comm comm = MPI_COMM_GEOS );
670 
676  template< typename T >
677  static T sum( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
678 
685  template< typename T >
686  static void sum( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
687 
693  template< typename T >
694  static T min( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
695 
702  template< typename T >
703  static void min( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
704 
711  template< typename FIRST, typename SECOND >
712  static PairType< FIRST, SECOND > min( PairType< FIRST, SECOND > const & pair, MPI_Comm comm = MPI_COMM_GEOS );
713 
720  template< typename FIRST, typename SECOND, typename CONTAINER >
721  static PairType< FIRST, SECOND > min( CONTAINER const & pairs, MPI_Comm comm = MPI_COMM_GEOS );
722 
728  template< typename T >
729  static T max( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
730 
737  template< typename T >
738  static void max( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
739 
746  template< typename FIRST, typename SECOND >
747  static PairType< FIRST, SECOND > max( PairType< FIRST, SECOND > const & pair, MPI_Comm comm = MPI_COMM_GEOS );
748 
755  template< typename FIRST, typename SECOND, typename CONTAINER >
756  static PairType< FIRST, SECOND > max( CONTAINER const & pairs, MPI_Comm comm = MPI_COMM_GEOS );
757 
758 private:
759 
769  template< typename T >
770  static int allReduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
771 };
772 
773 namespace internal
774 {
775 
777 struct ManagedResources
778 {
779  // The list of managed MPI_Op instances
780  std::set< MPI_Op > m_mpiOps;
781 
782  // The list of managed MPI_Type instances
783  std::set< MPI_Datatype > m_mpiTypes;
784 
789  void finalize();
790 };
791 
796 ManagedResources & getManagedResources();
797 
798 template< typename T, typename ENABLE = void >
799 struct MpiTypeImpl {};
800 
801 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
802  template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
803 
804 ADD_MPI_TYPE_MAP( float, MPI_FLOAT );
805 ADD_MPI_TYPE_MAP( double, MPI_DOUBLE );
806 
807 ADD_MPI_TYPE_MAP( char, MPI_CHAR );
808 ADD_MPI_TYPE_MAP( signed char, MPI_SIGNED_CHAR );
809 ADD_MPI_TYPE_MAP( unsigned char, MPI_UNSIGNED_CHAR );
810 
811 ADD_MPI_TYPE_MAP( int, MPI_INT );
812 ADD_MPI_TYPE_MAP( long int, MPI_LONG );
813 ADD_MPI_TYPE_MAP( long long int, MPI_LONG_LONG );
814 
815 ADD_MPI_TYPE_MAP( unsigned int, MPI_UNSIGNED );
816 ADD_MPI_TYPE_MAP( unsigned long int, MPI_UNSIGNED_LONG );
817 ADD_MPI_TYPE_MAP( unsigned long long int, MPI_UNSIGNED_LONG_LONG );
818 
819 #undef ADD_MPI_TYPE_MAP
820 
821 template< typename T >
822 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
823 {
824  static MPI_Datatype get() { return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
825 };
826 
827 template< typename T >
828 MPI_Datatype getMpiType()
829 {
830  return MpiTypeImpl< T >::get();
831 }
832 
833 template< typename FIRST, typename SECOND >
834 MPI_Datatype getMpiPairType()
835 {
836  static_assert( "no default implementation, please add a template specialization and add it in the \"testMpiWrapper\" unit test." );
837  return {};
838 }
839 template<> MPI_Datatype getMpiPairType< int, int >();
840 template<> MPI_Datatype getMpiPairType< long int, int >();
841 template<> MPI_Datatype getMpiPairType< long int, long int >();
842 template<> MPI_Datatype getMpiPairType< long long int, long long int >();
843 template<> MPI_Datatype getMpiPairType< float, int >();
844 template<> MPI_Datatype getMpiPairType< double, int >();
845 template<> MPI_Datatype getMpiPairType< double, long int >();
846 template<> MPI_Datatype getMpiPairType< double, long long int >();
847 template<> MPI_Datatype getMpiPairType< double, double >();
848 
849 // It is advised to always use this custom operator for pairs as MPI_MAXLOC is not a true lexicographical comparator.
850 template< typename FIRST, typename SECOND, MpiWrapper::PairReduction OP >
851 MPI_Op getMpiPairReductionOp()
852 {
853  static auto const createOpHolder = [] () {
854  using PAIR_T = MpiWrapper::PairType< FIRST, SECOND >;
855 
856  auto const customOpFunc =
857  []( void * invec, void * inoutvec, int * len, MPI_Datatype * )
858  {
859  for( int i = 0; i < *len; ++i )
860  {
861  PAIR_T & in = static_cast< PAIR_T * >(invec)[i];
862  PAIR_T & inout = static_cast< PAIR_T * >(inoutvec)[i];
863  if constexpr ( OP == MpiWrapper::PairReduction::Min )
864  {
865  if( std::tie( in.first, in.second ) < std::tie( inout.first, inout.second ) )
866  inout = in;
867  }
868  else
869  {
870  if( std::tie( in.first, in.second ) > std::tie( inout.first, inout.second ) )
871  inout = in;
872  }
873  }
874  };
875 
876  MPI_Op mpiOp;
877  GEOS_ERROR_IF_NE( MPI_Op_create( customOpFunc, 1, &mpiOp ), MPI_SUCCESS );
878  // Resource registered to be destroyed at MpiWrapper::finalize().
879  internal::getManagedResources().m_mpiOps.emplace( mpiOp );
880  return mpiOp;
881  };
882  // Static storage to ensure the MPI operation is created only once and reused for all calls to this function.
883  static MPI_Op mpiOp{ createOpHolder() };
884  return mpiOp;
885 }
886 
887 }
888 
889 inline MPI_Op MpiWrapper::getMpiOp( Reduction const op )
890 {
891  switch( op )
892  {
893  case Reduction::Sum:
894  {
895  return MPI_SUM;
896  }
897  case Reduction::Min:
898  {
899  return MPI_MIN;
900  }
901  case Reduction::Max:
902  {
903  return MPI_MAX;
904  }
905  case Reduction::Prod:
906  {
907  return MPI_PROD;
908  }
910  {
911  return MPI_LAND;
912  }
914  {
915  return MPI_LOR;
916  }
917  default:
918  GEOS_ERROR( "Unsupported reduction operation" );
919  return MPI_NO_OP;
920  }
921 }
922 
923 template< typename T_SEND, typename T_RECV >
924 int MpiWrapper::allgather( T_SEND const * const sendbuf,
925  int sendcount,
926  T_RECV * const recvbuf,
927  int recvcount,
928  MPI_Comm MPI_PARAM( comm ) )
929 {
930 #ifdef GEOS_USE_MPI
931  return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
932  recvbuf, recvcount, internal::getMpiType< T_RECV >(),
933  comm );
934 #else
935  static_assert( std::is_same< T_SEND, T_RECV >::value,
936  "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
937  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
938  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
939  return 0;
940 #endif
941 }
942 
943 template< typename T_SEND, typename T_RECV >
944 int MpiWrapper::allgatherv( T_SEND const * const sendbuf,
945  int sendcount,
946  T_RECV * const recvbuf,
947  int * recvcounts,
948  int * displacements,
949  MPI_Comm MPI_PARAM( comm ) )
950 {
951 #ifdef GEOS_USE_MPI
952  return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
953  recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
954  comm );
955 #else
956  static_assert( std::is_same< T_SEND, T_RECV >::value,
957  "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
958  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
959  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
960  return 0;
961 #endif
962 }
963 
964 
965 template< typename T >
966 void MpiWrapper::allGather( T const myValue, array1d< T > & allValues, MPI_Comm MPI_PARAM( comm ) )
967 {
968 #ifdef GEOS_USE_MPI
969  int const mpiSize = commSize( comm );
970  allValues.resize( mpiSize );
971 
972  MPI_Datatype const MPI_TYPE = internal::getMpiType< T >();
973 
974  MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
975 
976 #else
977  allValues.resize( 1 );
978  allValues[0] = myValue;
979 #endif
980 }
981 
982 template< typename T >
983 int MpiWrapper::allGather( arrayView1d< T const > const & sendValues,
984  array1d< T > & allValues,
985  MPI_Comm MPI_PARAM( comm ) )
986 {
987  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
988 #ifdef GEOS_USE_MPI
989  int const mpiSize = commSize( comm );
990  allValues.resize( mpiSize * sendSize );
991  return MPI_Allgather( sendValues.data(),
992  sendSize,
993  internal::getMpiType< T >(),
994  allValues.data(),
995  sendSize,
996  internal::getMpiType< T >(),
997  comm );
998 
999 #else
1000  allValues.resize( sendSize );
1001  for( localIndex a=0; a<sendSize; ++a )
1002  {
1003  allValues[a] = sendValues[a];
1004  }
1005  return 0;
1006 #endif
1007 }
1008 
1009 template< typename T >
1010 int MpiWrapper::allGatherv( arrayView1d< T const > const & sendValues,
1011  array1d< T > & allValues,
1012  MPI_Comm MPI_PARAM( comm ) )
1013 {
1014  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
1015 #ifdef GEOS_USE_MPI
1016  int const mpiSize = commSize( comm );
1017  array1d< int > counts;
1018  allGather( sendSize, counts, comm );
1019  array1d< int > displs( mpiSize + 1 );
1020  std::partial_sum( counts.begin(), counts.end(), displs.begin() + 1 );
1021  allValues.resize( displs.back() );
1022  return MPI_Allgatherv( sendValues.data(),
1023  sendSize,
1024  internal::getMpiType< T >(),
1025  allValues.data(),
1026  counts.data(),
1027  displs.data(),
1028  internal::getMpiType< T >(),
1029  comm );
1030 
1031 #else
1032  allValues.resize( sendSize );
1033  for( localIndex a=0; a<sendSize; ++a )
1034  {
1035  allValues[a] = sendValues[a];
1036  }
1037  return 0;
1038 #endif
1039 }
1040 
1041 template< typename T >
1042 int MpiWrapper::allReduce( T const * const sendbuf,
1043  T * const recvbuf,
1044  int const count,
1045  MPI_Op const MPI_PARAM( op ),
1046  MPI_Comm const MPI_PARAM( comm ) )
1047 {
1048 #ifdef GEOS_USE_MPI
1049  MPI_Datatype const mpiType = internal::getMpiType< T >();
1050  return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
1051 #else
1052  if( sendbuf != recvbuf )
1053  {
1054  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
1055  }
1056  return 0;
1057 #endif
1058 }
1059 
1060 template< typename T >
1061 int MpiWrapper::reduce( T const * const sendbuf,
1062  T * const recvbuf,
1063  int const count,
1064  MPI_Op const MPI_PARAM( op ),
1065  int root,
1066  MPI_Comm const MPI_PARAM( comm ) )
1067 {
1068 #ifdef GEOS_USE_MPI
1069  MPI_Datatype const mpiType = internal::getMpiType< T >();
1070  return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
1071 #else
1072  if( sendbuf != recvbuf )
1073  {
1074  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
1075  }
1076  return 0;
1077 #endif
1078 }
1079 
1080 template< typename T >
1081 int MpiWrapper::scan( T const * const sendbuf,
1082  T * const recvbuf,
1083  int count,
1084  MPI_Op MPI_PARAM( op ),
1085  MPI_Comm MPI_PARAM( comm ) )
1086 {
1087 #ifdef GEOS_USE_MPI
1088  return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1089 #else
1090  memcpy( recvbuf, sendbuf, count*sizeof(T) );
1091  return 0;
1092 #endif
1093 }
1094 
1095 template< typename T >
1096 int MpiWrapper::exscan( T const * const MPI_PARAM( sendbuf ),
1097  T * const recvbuf,
1098  int count,
1099  MPI_Op MPI_PARAM( op ),
1100  MPI_Comm MPI_PARAM( comm ) )
1101 {
1102 #ifdef GEOS_USE_MPI
1103  return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1104 #else
1105  memset( recvbuf, 0, count*sizeof(T) );
1106  return 0;
1107 #endif
1108 }
1109 
1110 template< typename T >
1111 int MpiWrapper::bcast( T * const MPI_PARAM( buffer ),
1112  int MPI_PARAM( count ),
1113  int MPI_PARAM( root ),
1114  MPI_Comm MPI_PARAM( comm ) )
1115 {
1116 #ifdef GEOS_USE_MPI
1117  return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
1118 #else
1119  return 0;
1120 #endif
1121 
1122 }
1123 
1124 template< typename T >
1125 void MpiWrapper::broadcast( T & MPI_PARAM( value ), int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
1126 {
1127 #ifdef GEOS_USE_MPI
1128  MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
1129 #endif
1130 }
1131 
1132 template<>
1133 inline
1134 void MpiWrapper::broadcast< string >( string & MPI_PARAM( value ),
1135  int MPI_PARAM( srcRank ),
1136  MPI_Comm MPI_PARAM( comm ) )
1137 {
1138 #ifdef GEOS_USE_MPI
1139  int size = LvArray::integerConversion< int >( value.size() );
1140  broadcast( size, srcRank, comm );
1141  value.resize( size );
1142  MPI_Bcast( const_cast< char * >( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
1143 #endif
1144 }
1145 
1146 template< typename TS, typename TR >
1147 int MpiWrapper::gather( TS const * const sendbuf,
1148  int sendcount,
1149  TR * const recvbuf,
1150  int recvcount,
1151  int MPI_PARAM( root ),
1152  MPI_Comm MPI_PARAM( comm ) )
1153 {
1154 #ifdef GEOS_USE_MPI
1155  return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
1156  recvbuf, recvcount, internal::getMpiType< TR >(),
1157  root, comm );
1158 #else
1159  static_assert( std::is_same< TS, TR >::value,
1160  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1161  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1162  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1163  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1164  memcpy( recvbuf, sendbuf, sendBufferSize );
1165  return 0;
1166 #endif
1167 }
1168 
1169 template< typename T, typename DST_CONTAINER, typename >
1170 int MpiWrapper::gather( T const & value,
1171  DST_CONTAINER & destValuesBuffer,
1172  int root,
1173  MPI_Comm MPI_PARAM( comm ) )
1174 {
1175  if( commRank() == 0 )
1176  GEOS_ERROR_IF_LT_MSG( destValuesBuffer.size(), size_t( commSize() ),
1177  "Receive buffer is not large enough to contain the values to receive." );
1178 #ifdef GEOS_USE_MPI
1179  return MPI_Gather( &value, sizeof( T ), internal::getMpiType< uint8_t >(),
1180  destValuesBuffer.data(), sizeof( T ), internal::getMpiType< uint8_t >(),
1181  root, comm );
1182 #else
1183  memcpy( destValuesBuffer.data(), &value, sendBufferSize );
1184  return 0;
1185 #endif
1186 }
1187 
1188 template< typename TS, typename TR >
1189 int MpiWrapper::gatherv( TS const * const sendbuf,
1190  int sendcount,
1191  TR * const recvbuf,
1192  const int * recvcounts,
1193  const int * MPI_PARAM( displs ),
1194  int MPI_PARAM( root ),
1195  MPI_Comm MPI_PARAM( comm ) )
1196 {
1197 #ifdef GEOS_USE_MPI
1198  return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
1199  recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
1200  root, comm );
1201 #else
1202  static_assert( std::is_same< TS, TR >::value,
1203  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1204  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1205  std::size_t const recvBufferSize = recvcounts[0] * sizeof(TR);
1206  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1207  memcpy( recvbuf, sendbuf, sendBufferSize );
1208  return 0;
1209 #endif
1210 }
1211 
1212 template< typename T >
1213 int MpiWrapper::iRecv( T * const buf,
1214  int count,
1215  int MPI_PARAM( source ),
1216  int tag,
1217  MPI_Comm MPI_PARAM( comm ),
1218  MPI_Request * MPI_PARAM( request ) )
1219 {
1220 #ifdef GEOS_USE_MPI
1221  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1222  "Attempting to use an MPI_Request that is still in use." );
1223  return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
1224 #else
1225  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1226  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1227 
1228  if( iPointer==pointerMap.end() )
1229  {
1230  pointerMap.insert( {tag, {1, buf} } );
1231  }
1232  else
1233  {
1234  GEOS_ERROR_IF( iPointer->second.first != 0,
1235  "Tag does is assigned, but pointer was not set by iSend." );
1236  memcpy( buf, iPointer->second.second, count*sizeof(T) );
1237  pointerMap.erase( iPointer );
1238  }
1239  return 0;
1240 #endif
1241 }
1242 
1243 template< typename T >
1244 int MpiWrapper::recv( array1d< T > & buf,
1245  int MPI_PARAM( source ),
1246  int tag,
1247  MPI_Comm MPI_PARAM( comm ),
1248  MPI_Status * MPI_PARAM( request ) )
1249 {
1250 #ifdef GEOS_USE_MPI
1251  MPI_Status status;
1252  int count;
1253  MPI_Probe( source, tag, comm, &status );
1254  MPI_Get_count( &status, MPI_CHAR, &count );
1255 
1256  GEOS_ASSERT_EQ( count % sizeof( T ), 0 );
1257  buf.resize( count / sizeof( T ) );
1258 
1259  return MPI_Recv( reinterpret_cast< char * >( buf.data() ),
1260  count,
1261  MPI_CHAR,
1262  source,
1263  tag,
1264  comm,
1265  request );
1266 #else
1267  GEOS_ERROR( "Not implemented!" );
1268  return MPI_SUCCESS;
1269 #endif
1270 }
1271 
1272 template< typename T >
1273 int MpiWrapper::iSend( arrayView1d< T > const & buf,
1274  int MPI_PARAM( dest ),
1275  int tag,
1276  MPI_Comm MPI_PARAM( comm ),
1277  MPI_Request * MPI_PARAM( request ) )
1278 {
1279 #ifdef GEOS_USE_MPI
1280  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1281  "Attempting to use an MPI_Request that is still in use." );
1282  return MPI_Isend( reinterpret_cast< void const * >( buf.data() ),
1283  buf.size() * sizeof( T ),
1284  MPI_CHAR,
1285  dest,
1286  tag,
1287  comm,
1288  request );
1289 #else
1290  GEOS_ERROR( "Not implemented." );
1291  return MPI_SUCCESS;
1292 #endif
1293 }
1294 
1295 template< typename T >
1296 int MpiWrapper::send( T const * const buf,
1297  int count,
1298  int dest,
1299  int tag,
1300  MPI_Comm comm )
1301 {
1302 #ifdef GEOS_USE_MPI
1303  return MPI_Send( buf, count, internal::getMpiType< T >(), dest, tag, comm );
1304 #else
1305  GEOS_ERROR( "Not implemented without MPI" );
1306 #endif
1307 }
1308 
1309 template< typename T >
1310 int MpiWrapper::iSend( T const * const buf,
1311  int count,
1312  int MPI_PARAM( dest ),
1313  int tag,
1314  MPI_Comm MPI_PARAM( comm ),
1315  MPI_Request * MPI_PARAM( request ) )
1316 {
1317 #ifdef GEOS_USE_MPI
1318  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1319  "Attempting to use an MPI_Request that is still in use." );
1320  return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1321 #else
1322  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1323  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1324 
1325  if( iPointer==pointerMap.end() )
1326  {
1327  pointerMap.insert( {tag, {0, const_cast< T * >(buf)}
1328  } );
1329  }
1330  else
1331  {
1332  GEOS_ERROR_IF( iPointer->second.first != 1,
1333  "Tag does is assigned, but pointer was not set by iRecv." );
1334  memcpy( iPointer->second.second, buf, count*sizeof(T) );
1335  pointerMap.erase( iPointer );
1336  }
1337  return 0;
1338 #endif
1339 }
1340 
1341 template< typename U, typename T >
1342 U MpiWrapper::prefixSum( T const value, MPI_Comm comm )
1343 {
1344  U localResult;
1345 
1346 #ifdef GEOS_USE_MPI
1347  U const convertedValue = value;
1348  int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1349  MPI_CHECK_ERROR( error );
1350 #endif
1351  if( commRank() == 0 )
1352  {
1353  localResult = 0;
1354  }
1355 
1356  return localResult;
1357 }
1358 
1359 
1360 template< typename T >
1361 T MpiWrapper::allReduce( T const & value, Reduction const op, MPI_Comm const comm )
1362 {
1363  T result;
1364  allReduce( &value, &result, 1, getMpiOp( op ), comm );
1365  return result;
1366 }
1367 
1368 template< typename T >
1369 void MpiWrapper::allReduce( Span< T const > const src, Span< T > const dst, Reduction const op, MPI_Comm const comm )
1370 {
1371  GEOS_ASSERT_EQ( src.size(), dst.size() );
1372  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1373 }
1374 
1375 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1376 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm )
1377 {
1378  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1379  "The type in the source container must be trivially copyable." );
1380  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1381  "The type in the destination container must be trivially copyable." );
1382  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1384  "Source and destination containers must have the same value type." );
1385  GEOS_ASSERT_GE( src.size(), count );
1386  GEOS_ASSERT_GE( dst.size(), count );
1387  allReduce( src.data(), dst.data(), count, getMpiOp( op ), comm );
1388 }
1389 
1390 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1391 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm )
1392 {
1393  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1394  "The type in the source container must be trivially copyable." );
1395  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1396  "The type in the destination container must be trivially copyable." );
1397  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1399  "Source and destination containers must have the same value type." );
1400  GEOS_ASSERT_EQ( src.size(), dst.size() );
1401  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1402 }
1403 
1404 template< typename T >
1405 T MpiWrapper::sum( T const & value, MPI_Comm comm )
1406 {
1407  return MpiWrapper::allReduce( value, Reduction::Sum, comm );
1408 }
1409 
1410 template< typename T >
1411 void MpiWrapper::sum( Span< T const > src, Span< T > dst, MPI_Comm comm )
1412 {
1413  MpiWrapper::allReduce( src, dst, Reduction::Sum, comm );
1414 }
1415 
1416 template< typename T >
1417 T MpiWrapper::min( T const & value, MPI_Comm comm )
1418 {
1419  return MpiWrapper::allReduce( value, Reduction::Min, comm );
1420 }
1421 
1422 template< typename T >
1423 void MpiWrapper::min( Span< T const > src, Span< T > dst, MPI_Comm comm )
1424 {
1425  MpiWrapper::allReduce( src, dst, Reduction::Min, comm );
1426 }
1427 
1428 template< typename T >
1429 T MpiWrapper::max( T const & value, MPI_Comm comm )
1430 {
1431  return MpiWrapper::allReduce( value, Reduction::Max, comm );
1432 }
1433 
1434 template< typename T >
1435 void MpiWrapper::max( Span< T const > src, Span< T > dst, MPI_Comm comm )
1436 {
1437  MpiWrapper::allReduce( src, dst, Reduction::Max, comm );
1438 }
1439 
1440 
1441 template< typename T >
1442 T MpiWrapper::reduce( T const & value, Reduction const op, int root, MPI_Comm const comm )
1443 {
1444  T result;
1445  reduce( &value, &result, 1, getMpiOp( op ), root, comm );
1446  return result;
1447 }
1448 
1449 template< typename T >
1450 void MpiWrapper::reduce( Span< T const > const src, Span< T > const dst, Reduction const op, int root, MPI_Comm const comm )
1451 {
1452  GEOS_ASSERT_EQ( src.size(), dst.size() );
1453  reduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), root, comm );
1454 }
1455 
1456 template< typename FIRST, typename SECOND, MpiWrapper::PairReduction const OP >
1458 MpiWrapper::allReduce( PairType< FIRST, SECOND > const & localPair, MPI_Comm comm )
1459 {
1460 #ifdef GEOS_USE_MPI
1461  auto const type = internal::getMpiPairType< FIRST, SECOND >();
1462  auto const mpiOp = internal::getMpiPairReductionOp< FIRST, SECOND, OP >();
1463  PairType< FIRST, SECOND > pair{ localPair.first, localPair.second };
1464  MPI_Allreduce( MPI_IN_PLACE, &pair, 1, type, mpiOp, comm );
1465  return pair;
1466 #else
1467  return localPair;
1468 #endif
1469 }
1470 
1471 template< typename FIRST, typename SECOND, typename CONTAINER, MpiWrapper::PairReduction const OP >
1473 MpiWrapper::allReduce( CONTAINER const & pairs, MPI_Comm const comm )
1474 {
1475  using PAIR_T = PairType< FIRST, SECOND >;
1476  std::function< PAIR_T( PAIR_T, PAIR_T ) > const getMin = []( PAIR_T const & a, PAIR_T const & b ) {
1477  return ( std::tie( a.first, a.second ) < std::tie( b.first, b.second ) ) ? a : b;
1478  };
1479  std::function< PAIR_T( PAIR_T, PAIR_T ) > const getMax = []( PAIR_T const & a, PAIR_T const & b ) {
1480  return ( std::tie( a.first, a.second ) > std::tie( b.first, b.second ) ) ? a : b;
1481  };
1482  PAIR_T const defaultPair{
1483  OP == PairReduction::Min ? std::numeric_limits< FIRST >::max() : std::numeric_limits< FIRST >::lowest(),
1484  OP == PairReduction::Min ? std::numeric_limits< SECOND >::max() : std::numeric_limits< SECOND >::lowest()
1485  };
1486  // based on the operation, pair will be the minimum / maximum element (or defaultPair if pairs is empty)
1487  PAIR_T pair = std::accumulate( pairs.begin(), pairs.end(), defaultPair,
1488  OP == PairReduction::Min ? getMin : getMax );
1489  return allReduce< FIRST, SECOND, OP >( pair, comm );
1490 }
1491 
1492 template< typename FIRST, typename SECOND >
1494 { return allReduce< FIRST, SECOND, PairReduction::Min >( pair, comm ); }
1495 
1496 template< typename FIRST, typename SECOND, typename CONTAINER >
1497 MpiWrapper::PairType< FIRST, SECOND > MpiWrapper::min( CONTAINER const & pairs, MPI_Comm comm )
1498 { return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Min >( pairs, comm ); }
1499 
1500 template< typename FIRST, typename SECOND >
1502 { return allReduce< FIRST, SECOND, PairReduction::Max >( pair, comm ); }
1503 
1504 template< typename FIRST, typename SECOND, typename CONTAINER >
1505 MpiWrapper::PairType< FIRST, SECOND > MpiWrapper::max( CONTAINER const & pairs, MPI_Comm comm )
1506 { return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Max >( pairs, comm ); }
1507 
1508 } /* namespace geos */
1509 
1510 #endif /* GEOS_COMMON_MPIWRAPPER_HPP_ */
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
Definition: Logger.hpp:157
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
Definition: Logger.hpp:142
#define GEOS_ERROR_IF_NE(lhs, rhs)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:259
#define GEOS_ASSERT_GE(lhs, rhs)
Assert that one value compares greater than or equal to the other in debug builds.
Definition: Logger.hpp:455
#define GEOS_ERROR_IF_LT_MSG(lhs, rhs, msg)
Raise a hard error if one value compares less than the other.
Definition: Logger.hpp:339
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:243
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Definition: Logger.hpp:410
Lightweight non-owning wrapper over a contiguous range of elements.
Definition: Span.hpp:42
constexpr T * data() const noexcept
Definition: Span.hpp:131
constexpr size_type size() const noexcept
Definition: Span.hpp:107
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
Definition: DataTypes.hpp:179
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
Definition: DataTypes.hpp:84
std::size_t size_t
Unsigned size type.
Definition: DataTypes.hpp:78
Array< T, 1 > array1d
Alias for 1D array.
Definition: DataTypes.hpp:175
internal::StdVectorWrapper< T, Allocator, USE_STD_CONTAINER_BOUNDS_CHECKING > stdVector
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
Definition: MpiWrapper.hpp:889
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgatherv.
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
@ Max
Max pair first value.
@ Min
Min pair first value.
static int activeWaitSomeCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int bcast(T *buffer, int count, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Bcast.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Allgather.
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgather.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static int send(T const *const buf, int count, int dest, int tag, MPI_Comm comm)
Strongly typed wrapper around MPI_Send()
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOS)
Compute exclusive prefix sum and full sum.
static int gather(T const &value, DST_CONTAINER &destValuesBuffer, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static T allReduce(T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS)
Convenience wrapper for the MPI_Allreduce function.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static int activeWaitOrderedCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gatherv.
static void finalize()
Free MPI managed resources, then call MPI_Finalize(). Please note that once called,...
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Broadcast.
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Reduce.
Trait to retrieve the value_type or ValueType of a type T.