GEOS
MpiWrapper.hpp
Go to the documentation of this file.
1 /*
2  * ------------------------------------------------------------------------------------------------------------
3  * SPDX-License-Identifier: LGPL-2.1-only
4  *
5  * Copyright (c) 2016-2024 Lawrence Livermore National Security LLC
6  * Copyright (c) 2018-2024 TotalEnergies
7  * Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University
8  * Copyright (c) 2023-2024 Chevron
9  * Copyright (c) 2019- GEOS/GEOSX Contributors
10  * All rights reserved
11  *
12  * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details.
13  * ------------------------------------------------------------------------------------------------------------
14  */
15 
20 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
21 #define GEOS_COMMON_MPIWRAPPER_HPP_
22 
23 #include "common/DataTypes.hpp"
24 #include "common/Span.hpp"
25 #include "common/TypesHelpers.hpp"
26 
27 #include <numeric>
28 
29 #if defined(GEOS_USE_MPI)
30  #include <mpi.h>
31 #define MPI_PARAM( x ) x
32 #else
33 #define MPI_PARAM( x )
34 typedef int MPI_Comm;
35 
36 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
37 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
38 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
39 
40 
41 typedef int MPI_Datatype;
42 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
43 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
44 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
45 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
46 #define MPI_C_BOOL ((MPI_Datatype)0x4c00013f)
47 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
48 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
49 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
50 #define MPI_INT ((MPI_Datatype)0x4c000405)
51 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
52 #define MPI_LONG ((MPI_Datatype)0x4c000807)
53 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
54 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
55 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
56 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
57 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
58 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
59 #define MPI_LONG_LONG MPI_LONG_LONG_INT
60 
61 typedef int MPI_Op;
62 
63 #define MPI_MAX (MPI_Op)(0x58000001)
64 #define MPI_MIN (MPI_Op)(0x58000002)
65 #define MPI_SUM (MPI_Op)(0x58000003)
66 #define MPI_PROD (MPI_Op)(0x58000004)
67 #define MPI_LAND (MPI_Op)(0x58000005)
68 #define MPI_BAND (MPI_Op)(0x58000006)
69 #define MPI_LOR (MPI_Op)(0x58000007)
70 #define MPI_BOR (MPI_Op)(0x58000008)
71 #define MPI_LXOR (MPI_Op)(0x58000009)
72 #define MPI_BXOR (MPI_Op)(0x5800000a)
73 #define MPI_MINLOC (MPI_Op)(0x5800000b)
74 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
75 #define MPI_REPLACE (MPI_Op)(0x5800000d)
76 #define MPI_NO_OP (MPI_Op)(0x5800000e)
77 
78 #define MPI_SUCCESS 0 /* Successful return code */
79 #define MPI_UNDEFINED (-32766)
80 #define MPI_STATUS_IGNORE (MPI_Status *)1
81 #define MPI_STATUSES_IGNORE (MPI_Status *)1
82 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
83 typedef int MPI_Request;
84 
85 typedef int MPI_Info;
86 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
87 
88 struct MPI_Status
89 {
90  int junk;
91 };
92 
93 #endif
94 
95 #if defined(NDEBUG)
96 #define MPI_CHECK_ERROR( error ) ((void) error)
97 #else
98 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
99 #endif
100 
101 
102 namespace geos
103 {
104 
106 #ifdef GEOS_USE_MPI
107 extern MPI_Comm MPI_COMM_GEOS;
108 #else
109 extern int MPI_COMM_GEOS;
110 #endif
111 
122 {
123 public:
124 
129  enum class Reduction
130  {
131  Max,
132  Min,
133  Sum,
134  Prod,
135  LogicalAnd,
136  LogicalOr,
137  };
138 
143  enum class PairReduction
144  {
145  Max,
146  Min,
147  };
148 
154  template< typename FIRST, typename SECOND >
155  struct PairType
156  {
157  FIRST first;
158  SECOND second;
159  };
160 
161  MpiWrapper() = delete;
162 
176 
177  static void barrier( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
178 
179  static int cartCoords( MPI_Comm comm, int rank, int maxdims, int coords[] );
180 
181  static int cartCreate( MPI_Comm comm_old, int ndims, const int dims[], const int periods[],
182  int reorder, MPI_Comm * comm_cart );
183 
184  static int cartRank( MPI_Comm comm, const int coords[] );
185 
186  static void commFree( MPI_Comm & comm );
187 
188  static int commRank( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
189 
190  static int commSize( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
191 
192  static bool commCompare( MPI_Comm const & comm1, MPI_Comm const & comm2 );
193 
194  static bool initialized();
195 
196  static int init( int * argc, char * * * argv );
197 
202  static void finalize();
203 
204  static MPI_Comm commDup( MPI_Comm const comm );
205 
206  static MPI_Comm commSplit( MPI_Comm const comm, int color, int key );
207 
208  static int test( MPI_Request * request, int * flag, MPI_Status * status );
209 
210  static int testAny( int count, MPI_Request array_of_requests[], int * idx, int * flags, MPI_Status array_of_statuses[] );
211 
212  static int testSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
213 
214  static int testAll( int count, MPI_Request array_of_requests[], int * flags, MPI_Status array_of_statuses[] );
215 
222  static int check( MPI_Request * request, int * flag, MPI_Status * status );
223 
236  static int checkAny( int count, MPI_Request array_of_requests[], int * idx, int * flag, MPI_Status array_of_statuses[] );
237 
247  static int checkAll( int count, MPI_Request array_of_requests[], int * flag, MPI_Status array_of_statuses[] );
248 
249  static int wait( MPI_Request * request, MPI_Status * status );
250 
251  static int waitAny( int count, MPI_Request array_of_requests[], int * indx, MPI_Status array_of_statuses[] );
252 
253  static int waitSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
254 
255  static int waitAll( int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[] );
256 
257  static double wtime( void );
258 
259 
269  static int activeWaitAny( const int count,
270  MPI_Request array_of_requests[],
271  MPI_Status array_of_statuses[],
272  std::function< MPI_Request ( int ) > func );
273 
283  static int activeWaitSome( const int count,
284  MPI_Request array_of_requests[],
285  MPI_Status array_of_statuses[],
286  std::function< MPI_Request ( int ) > func );
287 
300  static int activeWaitSomeCompletePhase( const int participants,
301  stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
302 
316  static int activeWaitOrderedCompletePhase( const int participants,
317  stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
319 
320 #if !defined(GEOS_USE_MPI)
321  static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
322  {
323  static std::map< int, std::pair< int, void * > > tagToPointers;
324  return tagToPointers;
325  }
326 #endif
327 
332  static int nodeCommSize();
333 
345  template< typename T_SEND, typename T_RECV >
346  static int allgather( T_SEND const * sendbuf,
347  int sendcount,
348  T_RECV * recvbuf,
349  int recvcount,
350  MPI_Comm comm = MPI_COMM_GEOS );
351 
364  template< typename T_SEND, typename T_RECV >
365  static int allgatherv( T_SEND const * sendbuf,
366  int sendcount,
367  T_RECV * recvbuf,
368  int * recvcounts,
369  int * displacements,
370  MPI_Comm comm = MPI_COMM_GEOS );
371 
378  template< typename T >
379  static void allGather( T const myValue, array1d< T > & allValues, MPI_Comm comm = MPI_COMM_GEOS );
380 
381  template< typename T >
382  static int allGather( arrayView1d< T const > const & sendbuf,
383  array1d< T > & recvbuf,
384  MPI_Comm comm = MPI_COMM_GEOS );
385 
386  template< typename T >
387  static int allGatherv( arrayView1d< T const > const & sendbuf,
388  array1d< T > & recvbuf,
389  MPI_Comm comm = MPI_COMM_GEOS );
390 
399  template< typename T >
400  static T allReduce( T const & value, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
401 
410  template< typename T >
411  static void allReduce( Span< T const > src, Span< T > dst, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
412 
421  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
422  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm = MPI_COMM_GEOS );
423 
433  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
434  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm );
435 
445  template< typename FIRST, typename SECOND, PairReduction OP >
447  MPI_Comm comm = MPI_COMM_GEOS );
448 
459  template< typename FIRST, typename SECOND, typename CONTAINER, PairReduction OP >
460  static PairType< FIRST, SECOND > allReduce( CONTAINER const & pairs,
461  MPI_Comm comm = MPI_COMM_GEOS );
462 
472  template< typename T >
473  static int reduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, int root, MPI_Comm comm = MPI_COMM_GEOS );
474 
483  template< typename T >
484  static T reduce( T const & value, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
485 
494  template< typename T >
495  static void reduce( Span< T const > src, Span< T > dst, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
496 
497 
498  template< typename T >
499  static int scan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
500 
501  template< typename T >
502  static int exscan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
503 
512  template< typename T >
513  static int bcast( T * buffer, int count, int root, MPI_Comm comm = MPI_COMM_GEOS );
514 
515 
522  template< typename T >
523  static void broadcast( T & value, int srcRank = 0, MPI_Comm comm = MPI_COMM_GEOS );
524 
537  template< typename TS, typename TR >
538  static int gather( TS const * const sendbuf,
539  int sendcount,
540  TR * const recvbuf,
541  int recvcount,
542  int root,
543  MPI_Comm comm = MPI_COMM_GEOS );
544 
556  template< typename T, typename DST_CONTAINER,
557  typename = std::enable_if_t<
558  std::is_trivially_copyable_v< T > &&
559  std::is_same_v< decltype(std::declval< DST_CONTAINER >().size()), std::size_t > &&
560  std::is_same_v< decltype(std::declval< DST_CONTAINER >().data()), T * > > >
561  static int gather( T const & value,
562  DST_CONTAINER & destValuesBuffer,
563  int root,
564  MPI_Comm comm = MPI_COMM_GEOS );
565 
580  template< typename TS, typename TR >
581  static int gatherv( TS const * const sendbuf,
582  int sendcount,
583  TR * const recvbuf,
584  const int * recvcounts,
585  const int * displs,
586  int root,
587  MPI_Comm comm = MPI_COMM_GEOS );
588 
589 
602  template< typename TS, typename TR >
603  static int scatter( TS const * const sendbuf,
604  int sendcount,
605  TR * const recvbuf,
606  int recvcount,
607  int root,
608  MPI_Comm comm = MPI_COMM_GEOS );
609 
624  template< typename TS, typename TR >
625  static int scatterv( TS const * const sendbuf,
626  const int * sendcounts,
627  const int * displs,
628  TR * const recvbuf,
629  int recvcount,
630  int root,
631  MPI_Comm comm = MPI_COMM_GEOS );
632 
633 
634 
640  static MPI_Op getMpiOp( Reduction const op );
641 
642  template< typename T >
643  static int recv( array1d< T > & buf,
644  int MPI_PARAM( source ),
645  int tag,
646  MPI_Comm MPI_PARAM( comm ),
647  MPI_Status * MPI_PARAM( request ) );
648 
649  template< typename T >
650  static int iSend( arrayView1d< T > const & buf,
651  int MPI_PARAM( dest ),
652  int tag,
653  MPI_Comm MPI_PARAM( comm ),
654  MPI_Request * MPI_PARAM( request ) );
655 
666  template< typename T >
667  static int iRecv( T * const buf,
668  int count,
669  int source,
670  int tag,
671  MPI_Comm comm,
672  MPI_Request * request );
673 
683  template< typename T >
684  static int send( T const * const buf,
685  int count,
686  int dest,
687  int tag,
688  MPI_Comm comm );
689 
700  template< typename T >
701  static int iSend( T const * const buf,
702  int count,
703  int dest,
704  int tag,
705  MPI_Comm comm,
706  MPI_Request * request );
707 
715  template< typename U, typename T >
716  static U prefixSum( T const value, MPI_Comm comm = MPI_COMM_GEOS );
717 
723  template< typename T >
724  static T sum( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
725 
732  template< typename T >
733  static void sum( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
734 
740  template< typename T >
741  static T min( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
742 
749  template< typename T >
750  static void min( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
751 
758  template< typename FIRST, typename SECOND >
759  static PairType< FIRST, SECOND > min( PairType< FIRST, SECOND > const & pair, MPI_Comm comm = MPI_COMM_GEOS );
760 
767  template< typename FIRST, typename SECOND, typename CONTAINER >
768  static PairType< FIRST, SECOND > min( CONTAINER const & pairs, MPI_Comm comm = MPI_COMM_GEOS );
769 
775  template< typename T >
776  static T max( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
777 
784  template< typename T >
785  static void max( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
786 
793  template< typename FIRST, typename SECOND >
794  static PairType< FIRST, SECOND > max( PairType< FIRST, SECOND > const & pair, MPI_Comm comm = MPI_COMM_GEOS );
795 
802  template< typename FIRST, typename SECOND, typename CONTAINER >
803  static PairType< FIRST, SECOND > max( CONTAINER const & pairs, MPI_Comm comm = MPI_COMM_GEOS );
804 
805 private:
806 
816  template< typename T >
817  static int allReduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
818 };
819 
820 namespace internal
821 {
822 
824 struct ManagedResources
825 {
826  // The list of managed MPI_Op instances
827  std::set< MPI_Op > m_mpiOps;
828 
829  // The list of managed MPI_Type instances
830  std::set< MPI_Datatype > m_mpiTypes;
831 
836  void finalize();
837 };
838 
843 ManagedResources & getManagedResources();
844 
845 template< typename T, typename ENABLE = void >
846 struct MpiTypeImpl {};
847 
848 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
849  template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
850 
851 ADD_MPI_TYPE_MAP( float, MPI_FLOAT );
852 ADD_MPI_TYPE_MAP( double, MPI_DOUBLE );
853 
854 ADD_MPI_TYPE_MAP( char, MPI_CHAR );
855 ADD_MPI_TYPE_MAP( signed char, MPI_SIGNED_CHAR );
856 ADD_MPI_TYPE_MAP( unsigned char, MPI_UNSIGNED_CHAR );
857 
858 ADD_MPI_TYPE_MAP( int, MPI_INT );
859 ADD_MPI_TYPE_MAP( long int, MPI_LONG );
860 ADD_MPI_TYPE_MAP( long long int, MPI_LONG_LONG );
861 
862 ADD_MPI_TYPE_MAP( unsigned int, MPI_UNSIGNED );
863 ADD_MPI_TYPE_MAP( unsigned long int, MPI_UNSIGNED_LONG );
864 ADD_MPI_TYPE_MAP( unsigned long long int, MPI_UNSIGNED_LONG_LONG );
865 
866 #undef ADD_MPI_TYPE_MAP
867 
868 template< typename T >
869 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
870 {
871  static MPI_Datatype get() { return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
872 };
873 
874 template<>
875 struct MpiTypeImpl< bool * >
876 {
877  static MPI_Datatype get()
878  {
879  // Return the appropriate MPI_Datatype for bool*
880  return MPI_C_BOOL;
881  }
882 };
883 
884 
885 template< typename T >
886 MPI_Datatype getMpiType()
887 {
888  return MpiTypeImpl< T >::get();
889 }
890 
891 template< typename FIRST, typename SECOND >
892 MPI_Datatype getMpiPairType()
893 {
894  static_assert( "no default implementation, please add a template specialization and add it in the \"testMpiWrapper\" unit test." );
895  return {};
896 }
897 template<> MPI_Datatype getMpiPairType< int, int >();
898 template<> MPI_Datatype getMpiPairType< long int, int >();
899 template<> MPI_Datatype getMpiPairType< long int, long int >();
900 template<> MPI_Datatype getMpiPairType< long long int, long long int >();
901 template<> MPI_Datatype getMpiPairType< float, int >();
902 template<> MPI_Datatype getMpiPairType< double, int >();
903 template<> MPI_Datatype getMpiPairType< double, long int >();
904 template<> MPI_Datatype getMpiPairType< double, long long int >();
905 template<> MPI_Datatype getMpiPairType< double, double >();
906 
907 // It is advised to always use this custom operator for pairs as MPI_MAXLOC is not a true lexicographical comparator.
908 template< typename FIRST, typename SECOND, MpiWrapper::PairReduction OP >
909 MPI_Op getMpiPairReductionOp()
910 {
911  static auto const createOpHolder = [] () {
912  using PAIR_T = MpiWrapper::PairType< FIRST, SECOND >;
913 
914  auto const customOpFunc =
915  []( void * invec, void * inoutvec, int * len, MPI_Datatype * )
916  {
917  for( int i = 0; i < *len; ++i )
918  {
919  PAIR_T & in = static_cast< PAIR_T * >(invec)[i];
920  PAIR_T & inout = static_cast< PAIR_T * >(inoutvec)[i];
921  if constexpr ( OP == MpiWrapper::PairReduction::Min )
922  {
923  if( std::tie( in.first, in.second ) < std::tie( inout.first, inout.second ) )
924  inout = in;
925  }
926  else
927  {
928  if( std::tie( in.first, in.second ) > std::tie( inout.first, inout.second ) )
929  inout = in;
930  }
931  }
932  };
933 
934  MPI_Op mpiOp;
935  GEOS_ERROR_IF_NE( MPI_Op_create( customOpFunc, 1, &mpiOp ), MPI_SUCCESS );
936  // Resource registered to be destroyed at MpiWrapper::finalize().
937  internal::getManagedResources().m_mpiOps.emplace( mpiOp );
938  return mpiOp;
939  };
940  // Static storage to ensure the MPI operation is created only once and reused for all calls to this function.
941  static MPI_Op mpiOp{ createOpHolder() };
942  return mpiOp;
943 }
944 
945 }
946 
947 inline MPI_Op MpiWrapper::getMpiOp( Reduction const op )
948 {
949  switch( op )
950  {
951  case Reduction::Sum:
952  {
953  return MPI_SUM;
954  }
955  case Reduction::Min:
956  {
957  return MPI_MIN;
958  }
959  case Reduction::Max:
960  {
961  return MPI_MAX;
962  }
963  case Reduction::Prod:
964  {
965  return MPI_PROD;
966  }
968  {
969  return MPI_LAND;
970  }
972  {
973  return MPI_LOR;
974  }
975  default:
976  GEOS_ERROR( "Unsupported reduction operation" );
977  return MPI_NO_OP;
978  }
979 }
980 
981 template< typename T_SEND, typename T_RECV >
982 int MpiWrapper::allgather( T_SEND const * const sendbuf,
983  int sendcount,
984  T_RECV * const recvbuf,
985  int recvcount,
986  MPI_Comm MPI_PARAM( comm ) )
987 {
988 #ifdef GEOS_USE_MPI
989  return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
990  recvbuf, recvcount, internal::getMpiType< T_RECV >(),
991  comm );
992 #else
993  static_assert( std::is_same< T_SEND, T_RECV >::value,
994  "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
995  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
996  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
997  return 0;
998 #endif
999 }
1000 
1001 template< typename T_SEND, typename T_RECV >
1002 int MpiWrapper::allgatherv( T_SEND const * const sendbuf,
1003  int sendcount,
1004  T_RECV * const recvbuf,
1005  int * recvcounts,
1006  int * displacements,
1007  MPI_Comm MPI_PARAM( comm ) )
1008 {
1009 #ifdef GEOS_USE_MPI
1010  return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
1011  recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
1012  comm );
1013 #else
1014  static_assert( std::is_same< T_SEND, T_RECV >::value,
1015  "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
1016  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
1017  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
1018  return 0;
1019 #endif
1020 }
1021 
1022 
1023 template< typename T >
1024 void MpiWrapper::allGather( T const myValue, array1d< T > & allValues, MPI_Comm MPI_PARAM( comm ) )
1025 {
1026 #ifdef GEOS_USE_MPI
1027  int const mpiSize = commSize( comm );
1028  allValues.resize( mpiSize );
1029 
1030  MPI_Datatype const MPI_TYPE = internal::getMpiType< T >();
1031 
1032  MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
1033 
1034 #else
1035  allValues.resize( 1 );
1036  allValues[0] = myValue;
1037 #endif
1038 }
1039 
1040 template< typename T >
1041 int MpiWrapper::allGather( arrayView1d< T const > const & sendValues,
1042  array1d< T > & allValues,
1043  MPI_Comm MPI_PARAM( comm ) )
1044 {
1045  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
1046 #ifdef GEOS_USE_MPI
1047  int const mpiSize = commSize( comm );
1048  allValues.resize( mpiSize * sendSize );
1049  return MPI_Allgather( sendValues.data(),
1050  sendSize,
1051  internal::getMpiType< T >(),
1052  allValues.data(),
1053  sendSize,
1054  internal::getMpiType< T >(),
1055  comm );
1056 
1057 #else
1058  allValues.resize( sendSize );
1059  for( localIndex a=0; a<sendSize; ++a )
1060  {
1061  allValues[a] = sendValues[a];
1062  }
1063  return 0;
1064 #endif
1065 }
1066 
1067 template< typename T >
1068 int MpiWrapper::allGatherv( arrayView1d< T const > const & sendValues,
1069  array1d< T > & allValues,
1070  MPI_Comm MPI_PARAM( comm ) )
1071 {
1072  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
1073 #ifdef GEOS_USE_MPI
1074  int const mpiSize = commSize( comm );
1075  array1d< int > counts;
1076  allGather( sendSize, counts, comm );
1077  array1d< int > displs( mpiSize + 1 );
1078  std::partial_sum( counts.begin(), counts.end(), displs.begin() + 1 );
1079  allValues.resize( displs.back() );
1080  return MPI_Allgatherv( sendValues.data(),
1081  sendSize,
1082  internal::getMpiType< T >(),
1083  allValues.data(),
1084  counts.data(),
1085  displs.data(),
1086  internal::getMpiType< T >(),
1087  comm );
1088 
1089 #else
1090  allValues.resize( sendSize );
1091  for( localIndex a=0; a<sendSize; ++a )
1092  {
1093  allValues[a] = sendValues[a];
1094  }
1095  return 0;
1096 #endif
1097 }
1098 
1099 template< typename T >
1100 int MpiWrapper::allReduce( T const * const sendbuf,
1101  T * const recvbuf,
1102  int const count,
1103  MPI_Op const MPI_PARAM( op ),
1104  MPI_Comm const MPI_PARAM( comm ) )
1105 {
1106 #ifdef GEOS_USE_MPI
1107  MPI_Datatype const mpiType = internal::getMpiType< T >();
1108  return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
1109 #else
1110  if( sendbuf != recvbuf )
1111  {
1112  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
1113  }
1114  return 0;
1115 #endif
1116 }
1117 
1118 template< typename T >
1119 int MpiWrapper::reduce( T const * const sendbuf,
1120  T * const recvbuf,
1121  int const count,
1122  MPI_Op const MPI_PARAM( op ),
1123  int root,
1124  MPI_Comm const MPI_PARAM( comm ) )
1125 {
1126 #ifdef GEOS_USE_MPI
1127  MPI_Datatype const mpiType = internal::getMpiType< T >();
1128  return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
1129 #else
1130  if( sendbuf != recvbuf )
1131  {
1132  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
1133  }
1134  return 0;
1135 #endif
1136 }
1137 
1138 template< typename T >
1139 int MpiWrapper::scan( T const * const sendbuf,
1140  T * const recvbuf,
1141  int count,
1142  MPI_Op MPI_PARAM( op ),
1143  MPI_Comm MPI_PARAM( comm ) )
1144 {
1145 #ifdef GEOS_USE_MPI
1146  return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1147 #else
1148  memcpy( recvbuf, sendbuf, count*sizeof(T) );
1149  return 0;
1150 #endif
1151 }
1152 
1153 template< typename T >
1154 int MpiWrapper::exscan( T const * const MPI_PARAM( sendbuf ),
1155  T * const recvbuf,
1156  int count,
1157  MPI_Op MPI_PARAM( op ),
1158  MPI_Comm MPI_PARAM( comm ) )
1159 {
1160 #ifdef GEOS_USE_MPI
1161  return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1162 #else
1163  memset( recvbuf, 0, count*sizeof(T) );
1164  return 0;
1165 #endif
1166 }
1167 
1168 template< typename T >
1169 int MpiWrapper::bcast( T * const MPI_PARAM( buffer ),
1170  int MPI_PARAM( count ),
1171  int MPI_PARAM( root ),
1172  MPI_Comm MPI_PARAM( comm ) )
1173 {
1174 #ifdef GEOS_USE_MPI
1175  return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
1176 #else
1177  return 0;
1178 #endif
1179 
1180 }
1181 
1182 template< typename T >
1183 void MpiWrapper::broadcast( T & MPI_PARAM( value ), int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
1184 {
1185 #ifdef GEOS_USE_MPI
1186  MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
1187 #endif
1188 }
1189 
1190 template<>
1191 inline
1192 void MpiWrapper::broadcast< string >( string & MPI_PARAM( value ),
1193  int MPI_PARAM( srcRank ),
1194  MPI_Comm MPI_PARAM( comm ) )
1195 {
1196 #ifdef GEOS_USE_MPI
1197  int size = LvArray::integerConversion< int >( value.size() );
1198  broadcast( size, srcRank, comm );
1199  value.resize( size );
1200  MPI_Bcast( const_cast< char * >( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
1201 #endif
1202 }
1203 
1204 template< typename TS, typename TR >
1205 int MpiWrapper::gather( TS const * const sendbuf,
1206  int sendcount,
1207  TR * const recvbuf,
1208  int recvcount,
1209  int MPI_PARAM( root ),
1210  MPI_Comm MPI_PARAM( comm ) )
1211 {
1212 #ifdef GEOS_USE_MPI
1213  return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
1214  recvbuf, recvcount, internal::getMpiType< TR >(),
1215  root, comm );
1216 #else
1217  static_assert( std::is_same< TS, TR >::value,
1218  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1219  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1220  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1221  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1222  memcpy( recvbuf, sendbuf, sendBufferSize );
1223  return 0;
1224 #endif
1225 }
1226 
1227 template< typename T, typename DST_CONTAINER, typename >
1228 int MpiWrapper::gather( T const & value,
1229  DST_CONTAINER & destValuesBuffer,
1230  int root,
1231  MPI_Comm MPI_PARAM( comm ) )
1232 {
1233  if( commRank() == 0 )
1234  GEOS_ERROR_IF_LT_MSG( destValuesBuffer.size(), size_t( commSize() ),
1235  "Receive buffer is not large enough to contain the values to receive." );
1236 #ifdef GEOS_USE_MPI
1237  return MPI_Gather( &value, sizeof( T ), internal::getMpiType< uint8_t >(),
1238  destValuesBuffer.data(), sizeof( T ), internal::getMpiType< uint8_t >(),
1239  root, comm );
1240 #else
1241  memcpy( destValuesBuffer.data(), &value, sendBufferSize );
1242  return 0;
1243 #endif
1244 }
1245 
1246 template< typename TS, typename TR >
1247 int MpiWrapper::gatherv( TS const * const sendbuf,
1248  int sendcount,
1249  TR * const recvbuf,
1250  const int * recvcounts,
1251  const int * MPI_PARAM( displs ),
1252  int MPI_PARAM( root ),
1253  MPI_Comm MPI_PARAM( comm ) )
1254 {
1255 #ifdef GEOS_USE_MPI
1256  return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
1257  recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
1258  root, comm );
1259 #else
1260  static_assert( std::is_same< TS, TR >::value,
1261  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1262  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1263  std::size_t const recvBufferSize = recvcounts[0] * sizeof(TR);
1264  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1265  memcpy( recvbuf, sendbuf, sendBufferSize );
1266  return 0;
1267 #endif
1268 }
1269 
1270 
1271 template< typename TS, typename TR >
1272 int MpiWrapper::scatter( TS const * const sendbuf,
1273  int sendcount,
1274  TR * const recvbuf,
1275  int recvcount,
1276  int MPI_PARAM( root ),
1277  MPI_Comm MPI_PARAM( comm ))
1278 {
1279 #ifdef GEOS_USE_MPI
1280  return MPI_Scatter( sendbuf, sendcount, internal::getMpiType< TS >(),
1281  recvbuf, recvcount, internal::getMpiType< TR >(),
1282  root, comm );
1283 #else
1284  static_assert( std::is_same< TS, TR >::value,
1285  "MpiWrapper::scatter() for serial run requires send and receive buffers are of the same type" );
1286  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1287  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1288  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1289  memcpy( recvbuf, sendbuf, sendBufferSize );
1290  return 0;
1291 #endif
1292 }
1293 
1294 template< typename TS, typename TR >
1295 int MpiWrapper::scatterv( TS const * const sendbuf,
1296  const int * sendcounts,
1297  const int * MPI_PARAM( displs ),
1298  TR * const recvbuf,
1299  int recvcount,
1300  int MPI_PARAM( root ),
1301  MPI_Comm MPI_PARAM( comm ))
1302 {
1303 #ifdef GEOS_USE_MPI
1304  return MPI_Scatterv( sendbuf, sendcounts, displs, internal::getMpiType< TS >(),
1305  recvbuf, recvcount, internal::getMpiType< TR >(),
1306  root, comm );
1307 #else
1308  static_assert( std::is_same< TS, TR >::value,
1309  "MpiWrapper::scatterv() for serial run requires send and receive buffers are of the same type" );
1310  std::size_t const sendBufferSize = sendcounts * sizeof(TS);
1311  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1312  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1313  memcpy( recvbuf, sendbuf, sendBufferSize );
1314  return 0;
1315 #endif
1316 }
1317 
1318 
1319 
1320 template< typename T >
1321 int MpiWrapper::iRecv( T * const buf,
1322  int count,
1323  int MPI_PARAM( source ),
1324  int tag,
1325  MPI_Comm MPI_PARAM( comm ),
1326  MPI_Request * MPI_PARAM( request ) )
1327 {
1328 #ifdef GEOS_USE_MPI
1329  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1330  "Attempting to use an MPI_Request that is still in use." );
1331  return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
1332 #else
1333  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1334  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1335 
1336  if( iPointer==pointerMap.end() )
1337  {
1338  pointerMap.insert( {tag, {1, buf} } );
1339  }
1340  else
1341  {
1342  GEOS_ERROR_IF( iPointer->second.first != 0,
1343  "Tag does is assigned, but pointer was not set by iSend." );
1344  memcpy( buf, iPointer->second.second, count*sizeof(T) );
1345  pointerMap.erase( iPointer );
1346  }
1347  return 0;
1348 #endif
1349 }
1350 
1351 template< typename T >
1352 int MpiWrapper::recv( array1d< T > & buf,
1353  int MPI_PARAM( source ),
1354  int tag,
1355  MPI_Comm MPI_PARAM( comm ),
1356  MPI_Status * MPI_PARAM( request ) )
1357 {
1358 #ifdef GEOS_USE_MPI
1359  MPI_Status status;
1360  int count;
1361  MPI_Probe( source, tag, comm, &status );
1362  MPI_Get_count( &status, MPI_CHAR, &count );
1363 
1364  GEOS_ASSERT_EQ( count % sizeof( T ), 0 );
1365  buf.resize( count / sizeof( T ) );
1366 
1367  return MPI_Recv( reinterpret_cast< char * >( buf.data() ),
1368  count,
1369  MPI_CHAR,
1370  source,
1371  tag,
1372  comm,
1373  request );
1374 #else
1375  GEOS_ERROR( "Not implemented!" );
1376  return MPI_SUCCESS;
1377 #endif
1378 }
1379 
1380 template< typename T >
1381 int MpiWrapper::iSend( arrayView1d< T > const & buf,
1382  int MPI_PARAM( dest ),
1383  int tag,
1384  MPI_Comm MPI_PARAM( comm ),
1385  MPI_Request * MPI_PARAM( request ) )
1386 {
1387 #ifdef GEOS_USE_MPI
1388  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1389  "Attempting to use an MPI_Request that is still in use." );
1390  return MPI_Isend( reinterpret_cast< void const * >( buf.data() ),
1391  buf.size() * sizeof( T ),
1392  MPI_CHAR,
1393  dest,
1394  tag,
1395  comm,
1396  request );
1397 #else
1398  GEOS_ERROR( "Not implemented." );
1399  return MPI_SUCCESS;
1400 #endif
1401 }
1402 
1403 template< typename T >
1404 int MpiWrapper::send( T const * const buf,
1405  int count,
1406  int dest,
1407  int tag,
1408  MPI_Comm comm )
1409 {
1410 #ifdef GEOS_USE_MPI
1411  return MPI_Send( buf, count, internal::getMpiType< T >(), dest, tag, comm );
1412 #else
1413  GEOS_ERROR( "Not implemented without MPI" );
1414 #endif
1415 }
1416 
1417 template< typename T >
1418 int MpiWrapper::iSend( T const * const buf,
1419  int count,
1420  int MPI_PARAM( dest ),
1421  int tag,
1422  MPI_Comm MPI_PARAM( comm ),
1423  MPI_Request * MPI_PARAM( request ) )
1424 {
1425 #ifdef GEOS_USE_MPI
1426  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1427  "Attempting to use an MPI_Request that is still in use." );
1428  return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1429 #else
1430  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1431  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1432 
1433  if( iPointer==pointerMap.end() )
1434  {
1435  pointerMap.insert( {tag, {0, const_cast< T * >(buf)}
1436  } );
1437  }
1438  else
1439  {
1440  GEOS_ERROR_IF( iPointer->second.first != 1,
1441  "Tag does is assigned, but pointer was not set by iRecv." );
1442  memcpy( iPointer->second.second, buf, count*sizeof(T) );
1443  pointerMap.erase( iPointer );
1444  }
1445  return 0;
1446 #endif
1447 }
1448 
1449 template< typename U, typename T >
1450 U MpiWrapper::prefixSum( T const value, MPI_Comm comm )
1451 {
1452  U localResult;
1453 
1454 #ifdef GEOS_USE_MPI
1455  U const convertedValue = value;
1456  int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1457  MPI_CHECK_ERROR( error );
1458 #endif
1459  if( commRank() == 0 )
1460  {
1461  localResult = 0;
1462  }
1463 
1464  return localResult;
1465 }
1466 
1467 
1468 template< typename T >
1469 T MpiWrapper::allReduce( T const & value, Reduction const op, MPI_Comm const comm )
1470 {
1471  T result;
1472  allReduce( &value, &result, 1, getMpiOp( op ), comm );
1473  return result;
1474 }
1475 
1476 template< typename T >
1477 void MpiWrapper::allReduce( Span< T const > const src, Span< T > const dst, Reduction const op, MPI_Comm const comm )
1478 {
1479  GEOS_ASSERT_EQ( src.size(), dst.size() );
1480  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1481 }
1482 
1483 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1484 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm )
1485 {
1486  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1487  "The type in the source container must be trivially copyable." );
1488  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1489  "The type in the destination container must be trivially copyable." );
1490  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1492  "Source and destination containers must have the same value type." );
1493  GEOS_ASSERT_GE( src.size(), count );
1494  GEOS_ASSERT_GE( dst.size(), count );
1495  allReduce( src.data(), dst.data(), count, getMpiOp( op ), comm );
1496 }
1497 
1498 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1499 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm )
1500 {
1501  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1502  "The type in the source container must be trivially copyable." );
1503  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1504  "The type in the destination container must be trivially copyable." );
1505  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1507  "Source and destination containers must have the same value type." );
1508  GEOS_ASSERT_EQ( src.size(), dst.size() );
1509  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1510 }
1511 
1512 template< typename T >
1513 T MpiWrapper::sum( T const & value, MPI_Comm comm )
1514 {
1515  return MpiWrapper::allReduce( value, Reduction::Sum, comm );
1516 }
1517 
1518 template< typename T >
1519 void MpiWrapper::sum( Span< T const > src, Span< T > dst, MPI_Comm comm )
1520 {
1521  MpiWrapper::allReduce( src, dst, Reduction::Sum, comm );
1522 }
1523 
1524 template< typename T >
1525 T MpiWrapper::min( T const & value, MPI_Comm comm )
1526 {
1527  return MpiWrapper::allReduce( value, Reduction::Min, comm );
1528 }
1529 
1530 template< typename T >
1531 void MpiWrapper::min( Span< T const > src, Span< T > dst, MPI_Comm comm )
1532 {
1533  MpiWrapper::allReduce( src, dst, Reduction::Min, comm );
1534 }
1535 
1536 template< typename T >
1537 T MpiWrapper::max( T const & value, MPI_Comm comm )
1538 {
1539  return MpiWrapper::allReduce( value, Reduction::Max, comm );
1540 }
1541 
1542 template< typename T >
1543 void MpiWrapper::max( Span< T const > src, Span< T > dst, MPI_Comm comm )
1544 {
1545  MpiWrapper::allReduce( src, dst, Reduction::Max, comm );
1546 }
1547 
1548 
1549 template< typename T >
1550 T MpiWrapper::reduce( T const & value, Reduction const op, int root, MPI_Comm const comm )
1551 {
1552  T result;
1553  reduce( &value, &result, 1, getMpiOp( op ), root, comm );
1554  return result;
1555 }
1556 
1557 template< typename T >
1558 void MpiWrapper::reduce( Span< T const > const src, Span< T > const dst, Reduction const op, int root, MPI_Comm const comm )
1559 {
1560  GEOS_ASSERT_EQ( src.size(), dst.size() );
1561  reduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), root, comm );
1562 }
1563 
1564 template< typename FIRST, typename SECOND, MpiWrapper::PairReduction const OP >
1566 MpiWrapper::allReduce( PairType< FIRST, SECOND > const & localPair, MPI_Comm comm )
1567 {
1568 #ifdef GEOS_USE_MPI
1569  auto const type = internal::getMpiPairType< FIRST, SECOND >();
1570  auto const mpiOp = internal::getMpiPairReductionOp< FIRST, SECOND, OP >();
1571  PairType< FIRST, SECOND > pair{ localPair.first, localPair.second };
1572  MPI_Allreduce( MPI_IN_PLACE, &pair, 1, type, mpiOp, comm );
1573  return pair;
1574 #else
1575  return localPair;
1576 #endif
1577 }
1578 
1579 template< typename FIRST, typename SECOND, typename CONTAINER, MpiWrapper::PairReduction const OP >
1581 MpiWrapper::allReduce( CONTAINER const & pairs, MPI_Comm const comm )
1582 {
1583  using PAIR_T = PairType< FIRST, SECOND >;
1584  std::function< PAIR_T( PAIR_T, PAIR_T ) > const getMin = []( PAIR_T const & a, PAIR_T const & b ) {
1585  return ( std::tie( a.first, a.second ) < std::tie( b.first, b.second ) ) ? a : b;
1586  };
1587  std::function< PAIR_T( PAIR_T, PAIR_T ) > const getMax = []( PAIR_T const & a, PAIR_T const & b ) {
1588  return ( std::tie( a.first, a.second ) > std::tie( b.first, b.second ) ) ? a : b;
1589  };
1590  PAIR_T const defaultPair{
1591  OP == PairReduction::Min ? std::numeric_limits< FIRST >::max() : std::numeric_limits< FIRST >::lowest(),
1592  OP == PairReduction::Min ? std::numeric_limits< SECOND >::max() : std::numeric_limits< SECOND >::lowest()
1593  };
1594  // based on the operation, pair will be the minimum / maximum element (or defaultPair if pairs is empty)
1595  PAIR_T pair = std::accumulate( pairs.begin(), pairs.end(), defaultPair,
1596  OP == PairReduction::Min ? getMin : getMax );
1597  return allReduce< FIRST, SECOND, OP >( pair, comm );
1598 }
1599 
1600 template< typename FIRST, typename SECOND >
1602 { return allReduce< FIRST, SECOND, PairReduction::Min >( pair, comm ); }
1603 
1604 template< typename FIRST, typename SECOND, typename CONTAINER >
1605 MpiWrapper::PairType< FIRST, SECOND > MpiWrapper::min( CONTAINER const & pairs, MPI_Comm comm )
1606 { return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Min >( pairs, comm ); }
1607 
1608 template< typename FIRST, typename SECOND >
1610 { return allReduce< FIRST, SECOND, PairReduction::Max >( pair, comm ); }
1611 
1612 template< typename FIRST, typename SECOND, typename CONTAINER >
1613 MpiWrapper::PairType< FIRST, SECOND > MpiWrapper::max( CONTAINER const & pairs, MPI_Comm comm )
1614 { return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Max >( pairs, comm ); }
1615 
1616 } /* namespace geos */
1617 
1618 #endif /* GEOS_COMMON_MPIWRAPPER_HPP_ */
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
Definition: Logger.hpp:157
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
Definition: Logger.hpp:142
#define GEOS_ERROR_IF_NE(lhs, rhs)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:259
#define GEOS_ASSERT_GE(lhs, rhs)
Assert that one value compares greater than or equal to the other in debug builds.
Definition: Logger.hpp:455
#define GEOS_ERROR_IF_LT_MSG(lhs, rhs, msg)
Raise a hard error if one value compares less than the other.
Definition: Logger.hpp:339
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:243
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Definition: Logger.hpp:410
Lightweight non-owning wrapper over a contiguous range of elements.
Definition: Span.hpp:42
constexpr T * data() const noexcept
Definition: Span.hpp:131
constexpr size_type size() const noexcept
Definition: Span.hpp:107
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
Definition: DataTypes.hpp:179
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
Definition: DataTypes.hpp:84
std::size_t size_t
Unsigned size type.
Definition: DataTypes.hpp:78
Array< T, 1 > array1d
Alias for 1D array.
Definition: DataTypes.hpp:175
internal::StdVectorWrapper< T, Allocator, USE_STD_CONTAINER_BOUNDS_CHECKING > stdVector
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
Definition: MpiWrapper.hpp:947
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgatherv.
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
@ Max
Max pair first value.
@ Min
Min pair first value.
static int activeWaitSomeCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int bcast(T *buffer, int count, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Bcast.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Allgather.
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgather.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static int send(T const *const buf, int count, int dest, int tag, MPI_Comm comm)
Strongly typed wrapper around MPI_Send()
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOS)
Compute exclusive prefix sum and full sum.
static int scatter(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Scatter.
static int gather(T const &value, DST_CONTAINER &destValuesBuffer, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static T allReduce(T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS)
Convenience wrapper for the MPI_Allreduce function.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static int scatterv(TS const *const sendbuf, const int *sendcounts, const int *displs, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Scatterv.
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static int activeWaitOrderedCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gatherv.
static void finalize()
Free MPI managed resources, then call MPI_Finalize(). Please note that once called,...
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Broadcast.
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Reduce.
Trait to retrieve the value_type or ValueType of a type T.