GEOS
MpiWrapper.hpp
Go to the documentation of this file.
1 /*
2  * ------------------------------------------------------------------------------------------------------------
3  * SPDX-License-Identifier: LGPL-2.1-only
4  *
5  * Copyright (c) 2016-2024 Lawrence Livermore National Security LLC
6  * Copyright (c) 2018-2024 TotalEnergies
7  * Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University
8  * Copyright (c) 2023-2024 Chevron
9  * Copyright (c) 2019- GEOS/GEOSX Contributors
10  * All rights reserved
11  *
12  * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details.
13  * ------------------------------------------------------------------------------------------------------------
14  */
15 
20 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
21 #define GEOS_COMMON_MPIWRAPPER_HPP_
22 
23 #include "common/DataTypes.hpp"
24 #include "common/Span.hpp"
25 #include "common/TypesHelpers.hpp"
26 
27 #include <numeric>
28 
29 #if defined(GEOS_USE_MPI)
30  #include <mpi.h>
31 #define MPI_PARAM( x ) x
32 #else
33 #define MPI_PARAM( x )
34 typedef int MPI_Comm;
35 
36 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
37 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
38 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
39 
40 
41 typedef int MPI_Datatype;
42 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
43 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
44 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
45 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
46 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
47 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
48 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
49 #define MPI_INT ((MPI_Datatype)0x4c000405)
50 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
51 #define MPI_LONG ((MPI_Datatype)0x4c000807)
52 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
53 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
54 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
55 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
56 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
57 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
58 #define MPI_LONG_LONG MPI_LONG_LONG_INT
59 
60 typedef int MPI_Op;
61 
62 #define MPI_MAX (MPI_Op)(0x58000001)
63 #define MPI_MIN (MPI_Op)(0x58000002)
64 #define MPI_SUM (MPI_Op)(0x58000003)
65 #define MPI_PROD (MPI_Op)(0x58000004)
66 #define MPI_LAND (MPI_Op)(0x58000005)
67 #define MPI_BAND (MPI_Op)(0x58000006)
68 #define MPI_LOR (MPI_Op)(0x58000007)
69 #define MPI_BOR (MPI_Op)(0x58000008)
70 #define MPI_LXOR (MPI_Op)(0x58000009)
71 #define MPI_BXOR (MPI_Op)(0x5800000a)
72 #define MPI_MINLOC (MPI_Op)(0x5800000b)
73 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
74 #define MPI_REPLACE (MPI_Op)(0x5800000d)
75 #define MPI_NO_OP (MPI_Op)(0x5800000e)
76 
77 #define MPI_SUCCESS 0 /* Successful return code */
78 #define MPI_UNDEFINED (-32766)
79 #define MPI_STATUS_IGNORE (MPI_Status *)1
80 #define MPI_STATUSES_IGNORE (MPI_Status *)1
81 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
82 typedef int MPI_Request;
83 
84 typedef int MPI_Info;
85 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
86 
87 struct MPI_Status
88 {
89  int junk;
90 };
91 
92 #endif
93 
94 #if defined(NDEBUG)
95 #define MPI_CHECK_ERROR( error ) ((void) error)
96 #else
97 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
98 #endif
99 
100 
101 namespace geos
102 {
103 
105 #ifdef GEOS_USE_MPI
106 extern MPI_Comm MPI_COMM_GEOS;
107 #else
108 extern int MPI_COMM_GEOS;
109 #endif
110 
121 {
122 public:
123 
128  enum class Reduction
129  {
130  Max,
131  Min,
132  Sum,
133  Prod,
134  LogicalAnd,
135  LogicalOr,
136  };
137 
142  enum class PairReduction
143  {
144  Max,
145  Min,
146  };
147 
153  template< typename FIRST, typename SECOND >
154  struct PairType
155  {
156  FIRST first;
157  SECOND second;
158  };
159 
160  MpiWrapper() = delete;
161 
175 
176  static void barrier( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
177 
178  static int cartCoords( MPI_Comm comm, int rank, int maxdims, int coords[] );
179 
180  static int cartCreate( MPI_Comm comm_old, int ndims, const int dims[], const int periods[],
181  int reorder, MPI_Comm * comm_cart );
182 
183  static int cartRank( MPI_Comm comm, const int coords[] );
184 
185  static void commFree( MPI_Comm & comm );
186 
187  static int commRank( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
188 
189  static int commSize( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
190 
191  static bool commCompare( MPI_Comm const & comm1, MPI_Comm const & comm2 );
192 
193  static bool initialized();
194 
195  static int init( int * argc, char * * * argv );
196 
201  static void finalize();
202 
203  static MPI_Comm commDup( MPI_Comm const comm );
204 
205  static MPI_Comm commSplit( MPI_Comm const comm, int color, int key );
206 
207  static int test( MPI_Request * request, int * flag, MPI_Status * status );
208 
209  static int testAny( int count, MPI_Request array_of_requests[], int * idx, int * flags, MPI_Status array_of_statuses[] );
210 
211  static int testSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
212 
213  static int testAll( int count, MPI_Request array_of_requests[], int * flags, MPI_Status array_of_statuses[] );
214 
221  static int check( MPI_Request * request, int * flag, MPI_Status * status );
222 
235  static int checkAny( int count, MPI_Request array_of_requests[], int * idx, int * flag, MPI_Status array_of_statuses[] );
236 
246  static int checkAll( int count, MPI_Request array_of_requests[], int * flag, MPI_Status array_of_statuses[] );
247 
248  static int wait( MPI_Request * request, MPI_Status * status );
249 
250  static int waitAny( int count, MPI_Request array_of_requests[], int * indx, MPI_Status array_of_statuses[] );
251 
252  static int waitSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
253 
254  static int waitAll( int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[] );
255 
256  static double wtime( void );
257 
258 
268  static int activeWaitAny( const int count,
269  MPI_Request array_of_requests[],
270  MPI_Status array_of_statuses[],
271  std::function< MPI_Request ( int ) > func );
272 
282  static int activeWaitSome( const int count,
283  MPI_Request array_of_requests[],
284  MPI_Status array_of_statuses[],
285  std::function< MPI_Request ( int ) > func );
286 
299  static int activeWaitSomeCompletePhase( const int participants,
300  stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
301 
315  static int activeWaitOrderedCompletePhase( const int participants,
316  stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
318 
319 #if !defined(GEOS_USE_MPI)
320  static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
321  {
322  static std::map< int, std::pair< int, void * > > tagToPointers;
323  return tagToPointers;
324  }
325 #endif
326 
331  static int nodeCommSize();
332 
344  template< typename T_SEND, typename T_RECV >
345  static int allgather( T_SEND const * sendbuf,
346  int sendcount,
347  T_RECV * recvbuf,
348  int recvcount,
349  MPI_Comm comm = MPI_COMM_GEOS );
350 
363  template< typename T_SEND, typename T_RECV >
364  static int allgatherv( T_SEND const * sendbuf,
365  int sendcount,
366  T_RECV * recvbuf,
367  int * recvcounts,
368  int * displacements,
369  MPI_Comm comm = MPI_COMM_GEOS );
370 
377  template< typename T >
378  static void allGather( T const myValue, array1d< T > & allValues, MPI_Comm comm = MPI_COMM_GEOS );
379 
380  template< typename T >
381  static int allGather( arrayView1d< T const > const & sendbuf,
382  array1d< T > & recvbuf,
383  MPI_Comm comm = MPI_COMM_GEOS );
384 
385  template< typename T >
386  static int allGatherv( arrayView1d< T const > const & sendbuf,
387  array1d< T > & recvbuf,
388  MPI_Comm comm = MPI_COMM_GEOS );
389 
398  template< typename T >
399  static T allReduce( T const & value, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
400 
409  template< typename T >
410  static void allReduce( Span< T const > src, Span< T > dst, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
411 
420  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
421  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm = MPI_COMM_GEOS );
422 
432  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
433  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm );
434 
444  template< typename FIRST, typename SECOND, PairReduction OP >
446  MPI_Comm comm = MPI_COMM_GEOS );
447 
458  template< typename FIRST, typename SECOND, typename CONTAINER, PairReduction OP >
459  static PairType< FIRST, SECOND > allReduce( CONTAINER const & pairs,
460  MPI_Comm comm = MPI_COMM_GEOS );
461 
471  template< typename T >
472  static int reduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, int root, MPI_Comm comm = MPI_COMM_GEOS );
473 
482  template< typename T >
483  static T reduce( T const & value, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
484 
493  template< typename T >
494  static void reduce( Span< T const > src, Span< T > dst, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
495 
496 
497  template< typename T >
498  static int scan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
499 
500  template< typename T >
501  static int exscan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
502 
511  template< typename T >
512  static int bcast( T * buffer, int count, int root, MPI_Comm comm = MPI_COMM_GEOS );
513 
514 
521  template< typename T >
522  static void broadcast( T & value, int srcRank = 0, MPI_Comm comm = MPI_COMM_GEOS );
523 
536  template< typename TS, typename TR >
537  static int gather( TS const * const sendbuf,
538  int sendcount,
539  TR * const recvbuf,
540  int recvcount,
541  int root,
542  MPI_Comm comm = MPI_COMM_GEOS );
543 
555  template< typename T, typename DST_CONTAINER,
556  typename = std::enable_if_t<
557  std::is_trivially_copyable_v< T > &&
558  std::is_same_v< decltype(std::declval< DST_CONTAINER >().size()), std::size_t > &&
559  std::is_same_v< decltype(std::declval< DST_CONTAINER >().data()), T * > > >
560  static int gather( T const & value,
561  DST_CONTAINER & destValuesBuffer,
562  int root,
563  MPI_Comm comm = MPI_COMM_GEOS );
564 
579  template< typename TS, typename TR >
580  static int gatherv( TS const * const sendbuf,
581  int sendcount,
582  TR * const recvbuf,
583  const int * recvcounts,
584  const int * displs,
585  int root,
586  MPI_Comm comm = MPI_COMM_GEOS );
587 
588 
601  template< typename TS, typename TR >
602  static int scatter( TS const * const sendbuf,
603  int sendcount,
604  TR * const recvbuf,
605  int recvcount,
606  int root,
607  MPI_Comm comm = MPI_COMM_GEOS );
608 
623  template< typename TS, typename TR >
624  static int scatterv( TS const * const sendbuf,
625  const int * sendcounts,
626  const int * displs,
627  TR * const recvbuf,
628  int recvcount,
629  int root,
630  MPI_Comm comm = MPI_COMM_GEOS );
631 
632 
633 
639  static MPI_Op getMpiOp( Reduction const op );
640 
641  template< typename T >
642  static int recv( array1d< T > & buf,
643  int MPI_PARAM( source ),
644  int tag,
645  MPI_Comm MPI_PARAM( comm ),
646  MPI_Status * MPI_PARAM( request ) );
647 
648  template< typename T >
649  static int iSend( arrayView1d< T > const & buf,
650  int MPI_PARAM( dest ),
651  int tag,
652  MPI_Comm MPI_PARAM( comm ),
653  MPI_Request * MPI_PARAM( request ) );
654 
665  template< typename T >
666  static int iRecv( T * const buf,
667  int count,
668  int source,
669  int tag,
670  MPI_Comm comm,
671  MPI_Request * request );
672 
682  template< typename T >
683  static int send( T const * const buf,
684  int count,
685  int dest,
686  int tag,
687  MPI_Comm comm );
688 
699  template< typename T >
700  static int iSend( T const * const buf,
701  int count,
702  int dest,
703  int tag,
704  MPI_Comm comm,
705  MPI_Request * request );
706 
714  template< typename U, typename T >
715  static U prefixSum( T const value, MPI_Comm comm = MPI_COMM_GEOS );
716 
722  template< typename T >
723  static T sum( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
724 
731  template< typename T >
732  static void sum( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
733 
739  template< typename T >
740  static T min( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
741 
748  template< typename T >
749  static void min( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
750 
757  template< typename FIRST, typename SECOND >
758  static PairType< FIRST, SECOND > min( PairType< FIRST, SECOND > const & pair, MPI_Comm comm = MPI_COMM_GEOS );
759 
766  template< typename FIRST, typename SECOND, typename CONTAINER >
767  static PairType< FIRST, SECOND > min( CONTAINER const & pairs, MPI_Comm comm = MPI_COMM_GEOS );
768 
774  template< typename T >
775  static T max( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
776 
783  template< typename T >
784  static void max( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
785 
792  template< typename FIRST, typename SECOND >
793  static PairType< FIRST, SECOND > max( PairType< FIRST, SECOND > const & pair, MPI_Comm comm = MPI_COMM_GEOS );
794 
801  template< typename FIRST, typename SECOND, typename CONTAINER >
802  static PairType< FIRST, SECOND > max( CONTAINER const & pairs, MPI_Comm comm = MPI_COMM_GEOS );
803 
804 private:
805 
815  template< typename T >
816  static int allReduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
817 };
818 
819 namespace internal
820 {
821 
823 struct ManagedResources
824 {
825  // The list of managed MPI_Op instances
826  std::set< MPI_Op > m_mpiOps;
827 
828  // The list of managed MPI_Type instances
829  std::set< MPI_Datatype > m_mpiTypes;
830 
835  void finalize();
836 };
837 
842 ManagedResources & getManagedResources();
843 
844 template< typename T, typename ENABLE = void >
845 struct MpiTypeImpl {};
846 
847 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
848  template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
849 
850 ADD_MPI_TYPE_MAP( float, MPI_FLOAT );
851 ADD_MPI_TYPE_MAP( double, MPI_DOUBLE );
852 
853 ADD_MPI_TYPE_MAP( char, MPI_CHAR );
854 ADD_MPI_TYPE_MAP( signed char, MPI_SIGNED_CHAR );
855 ADD_MPI_TYPE_MAP( unsigned char, MPI_UNSIGNED_CHAR );
856 
857 ADD_MPI_TYPE_MAP( int, MPI_INT );
858 ADD_MPI_TYPE_MAP( long int, MPI_LONG );
859 ADD_MPI_TYPE_MAP( long long int, MPI_LONG_LONG );
860 
861 ADD_MPI_TYPE_MAP( unsigned int, MPI_UNSIGNED );
862 ADD_MPI_TYPE_MAP( unsigned long int, MPI_UNSIGNED_LONG );
863 ADD_MPI_TYPE_MAP( unsigned long long int, MPI_UNSIGNED_LONG_LONG );
864 
865 #undef ADD_MPI_TYPE_MAP
866 
867 template< typename T >
868 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
869 {
870  static MPI_Datatype get() { return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
871 };
872 
873 template<>
874 struct MpiTypeImpl< bool * >
875 {
876  static MPI_Datatype get()
877  {
878  // Return the appropriate MPI_Datatype for bool*
879  return MPI_BYTE;
880  }
881 };
882 
883 
884 template< typename T >
885 MPI_Datatype getMpiType()
886 {
887  return MpiTypeImpl< T >::get();
888 }
889 
890 template< typename FIRST, typename SECOND >
891 MPI_Datatype getMpiPairType()
892 {
893  static_assert( "no default implementation, please add a template specialization and add it in the \"testMpiWrapper\" unit test." );
894  return {};
895 }
896 template<> MPI_Datatype getMpiPairType< int, int >();
897 template<> MPI_Datatype getMpiPairType< long int, int >();
898 template<> MPI_Datatype getMpiPairType< long int, long int >();
899 template<> MPI_Datatype getMpiPairType< long long int, long long int >();
900 template<> MPI_Datatype getMpiPairType< float, int >();
901 template<> MPI_Datatype getMpiPairType< double, int >();
902 template<> MPI_Datatype getMpiPairType< double, long int >();
903 template<> MPI_Datatype getMpiPairType< double, long long int >();
904 template<> MPI_Datatype getMpiPairType< double, double >();
905 
906 // It is advised to always use this custom operator for pairs as MPI_MAXLOC is not a true lexicographical comparator.
907 template< typename FIRST, typename SECOND, MpiWrapper::PairReduction OP >
908 MPI_Op getMpiPairReductionOp()
909 {
910  static auto const createOpHolder = [] () {
911  using PAIR_T = MpiWrapper::PairType< FIRST, SECOND >;
912 
913  auto const customOpFunc =
914  []( void * invec, void * inoutvec, int * len, MPI_Datatype * )
915  {
916  for( int i = 0; i < *len; ++i )
917  {
918  PAIR_T & in = static_cast< PAIR_T * >(invec)[i];
919  PAIR_T & inout = static_cast< PAIR_T * >(inoutvec)[i];
920  if constexpr ( OP == MpiWrapper::PairReduction::Min )
921  {
922  if( std::tie( in.first, in.second ) < std::tie( inout.first, inout.second ) )
923  inout = in;
924  }
925  else
926  {
927  if( std::tie( in.first, in.second ) > std::tie( inout.first, inout.second ) )
928  inout = in;
929  }
930  }
931  };
932 
933  MPI_Op mpiOp;
934  GEOS_ERROR_IF_NE( MPI_Op_create( customOpFunc, 1, &mpiOp ), MPI_SUCCESS );
935  // Resource registered to be destroyed at MpiWrapper::finalize().
936  internal::getManagedResources().m_mpiOps.emplace( mpiOp );
937  return mpiOp;
938  };
939  // Static storage to ensure the MPI operation is created only once and reused for all calls to this function.
940  static MPI_Op mpiOp{ createOpHolder() };
941  return mpiOp;
942 }
943 
944 }
945 
946 inline MPI_Op MpiWrapper::getMpiOp( Reduction const op )
947 {
948  switch( op )
949  {
950  case Reduction::Sum:
951  {
952  return MPI_SUM;
953  }
954  case Reduction::Min:
955  {
956  return MPI_MIN;
957  }
958  case Reduction::Max:
959  {
960  return MPI_MAX;
961  }
962  case Reduction::Prod:
963  {
964  return MPI_PROD;
965  }
967  {
968  return MPI_LAND;
969  }
971  {
972  return MPI_LOR;
973  }
974  default:
975  GEOS_ERROR( "Unsupported reduction operation" );
976  return MPI_NO_OP;
977  }
978 }
979 
980 template< typename T_SEND, typename T_RECV >
981 int MpiWrapper::allgather( T_SEND const * const sendbuf,
982  int sendcount,
983  T_RECV * const recvbuf,
984  int recvcount,
985  MPI_Comm MPI_PARAM( comm ) )
986 {
987 #ifdef GEOS_USE_MPI
988  return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
989  recvbuf, recvcount, internal::getMpiType< T_RECV >(),
990  comm );
991 #else
992  static_assert( std::is_same< T_SEND, T_RECV >::value,
993  "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
994  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
995  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
996  return 0;
997 #endif
998 }
999 
1000 template< typename T_SEND, typename T_RECV >
1001 int MpiWrapper::allgatherv( T_SEND const * const sendbuf,
1002  int sendcount,
1003  T_RECV * const recvbuf,
1004  int * recvcounts,
1005  int * displacements,
1006  MPI_Comm MPI_PARAM( comm ) )
1007 {
1008 #ifdef GEOS_USE_MPI
1009  return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
1010  recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
1011  comm );
1012 #else
1013  static_assert( std::is_same< T_SEND, T_RECV >::value,
1014  "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
1015  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
1016  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
1017  return 0;
1018 #endif
1019 }
1020 
1021 
1022 template< typename T >
1023 void MpiWrapper::allGather( T const myValue, array1d< T > & allValues, MPI_Comm MPI_PARAM( comm ) )
1024 {
1025 #ifdef GEOS_USE_MPI
1026  int const mpiSize = commSize( comm );
1027  allValues.resize( mpiSize );
1028 
1029  MPI_Datatype const MPI_TYPE = internal::getMpiType< T >();
1030 
1031  MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
1032 
1033 #else
1034  allValues.resize( 1 );
1035  allValues[0] = myValue;
1036 #endif
1037 }
1038 
1039 template< typename T >
1040 int MpiWrapper::allGather( arrayView1d< T const > const & sendValues,
1041  array1d< T > & allValues,
1042  MPI_Comm MPI_PARAM( comm ) )
1043 {
1044  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
1045 #ifdef GEOS_USE_MPI
1046  int const mpiSize = commSize( comm );
1047  allValues.resize( mpiSize * sendSize );
1048  return MPI_Allgather( sendValues.data(),
1049  sendSize,
1050  internal::getMpiType< T >(),
1051  allValues.data(),
1052  sendSize,
1053  internal::getMpiType< T >(),
1054  comm );
1055 
1056 #else
1057  allValues.resize( sendSize );
1058  for( localIndex a=0; a<sendSize; ++a )
1059  {
1060  allValues[a] = sendValues[a];
1061  }
1062  return 0;
1063 #endif
1064 }
1065 
1066 template< typename T >
1067 int MpiWrapper::allGatherv( arrayView1d< T const > const & sendValues,
1068  array1d< T > & allValues,
1069  MPI_Comm MPI_PARAM( comm ) )
1070 {
1071  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
1072 #ifdef GEOS_USE_MPI
1073  int const mpiSize = commSize( comm );
1074  array1d< int > counts;
1075  allGather( sendSize, counts, comm );
1076  array1d< int > displs( mpiSize + 1 );
1077  std::partial_sum( counts.begin(), counts.end(), displs.begin() + 1 );
1078  allValues.resize( displs.back() );
1079  return MPI_Allgatherv( sendValues.data(),
1080  sendSize,
1081  internal::getMpiType< T >(),
1082  allValues.data(),
1083  counts.data(),
1084  displs.data(),
1085  internal::getMpiType< T >(),
1086  comm );
1087 
1088 #else
1089  allValues.resize( sendSize );
1090  for( localIndex a=0; a<sendSize; ++a )
1091  {
1092  allValues[a] = sendValues[a];
1093  }
1094  return 0;
1095 #endif
1096 }
1097 
1098 template< typename T >
1099 int MpiWrapper::allReduce( T const * const sendbuf,
1100  T * const recvbuf,
1101  int const count,
1102  MPI_Op const MPI_PARAM( op ),
1103  MPI_Comm const MPI_PARAM( comm ) )
1104 {
1105 #ifdef GEOS_USE_MPI
1106  MPI_Datatype const mpiType = internal::getMpiType< T >();
1107  return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
1108 #else
1109  if( sendbuf != recvbuf )
1110  {
1111  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
1112  }
1113  return 0;
1114 #endif
1115 }
1116 
1117 template< typename T >
1118 int MpiWrapper::reduce( T const * const sendbuf,
1119  T * const recvbuf,
1120  int const count,
1121  MPI_Op const MPI_PARAM( op ),
1122  int root,
1123  MPI_Comm const MPI_PARAM( comm ) )
1124 {
1125 #ifdef GEOS_USE_MPI
1126  MPI_Datatype const mpiType = internal::getMpiType< T >();
1127  return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
1128 #else
1129  if( sendbuf != recvbuf )
1130  {
1131  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
1132  }
1133  return 0;
1134 #endif
1135 }
1136 
1137 template< typename T >
1138 int MpiWrapper::scan( T const * const sendbuf,
1139  T * const recvbuf,
1140  int count,
1141  MPI_Op MPI_PARAM( op ),
1142  MPI_Comm MPI_PARAM( comm ) )
1143 {
1144 #ifdef GEOS_USE_MPI
1145  return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1146 #else
1147  memcpy( recvbuf, sendbuf, count*sizeof(T) );
1148  return 0;
1149 #endif
1150 }
1151 
1152 template< typename T >
1153 int MpiWrapper::exscan( T const * const MPI_PARAM( sendbuf ),
1154  T * const recvbuf,
1155  int count,
1156  MPI_Op MPI_PARAM( op ),
1157  MPI_Comm MPI_PARAM( comm ) )
1158 {
1159 #ifdef GEOS_USE_MPI
1160  return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
1161 #else
1162  memset( recvbuf, 0, count*sizeof(T) );
1163  return 0;
1164 #endif
1165 }
1166 
1167 template< typename T >
1168 int MpiWrapper::bcast( T * const MPI_PARAM( buffer ),
1169  int MPI_PARAM( count ),
1170  int MPI_PARAM( root ),
1171  MPI_Comm MPI_PARAM( comm ) )
1172 {
1173 #ifdef GEOS_USE_MPI
1174  return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
1175 #else
1176  return 0;
1177 #endif
1178 
1179 }
1180 
1181 template< typename T >
1182 void MpiWrapper::broadcast( T & MPI_PARAM( value ), int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
1183 {
1184 #ifdef GEOS_USE_MPI
1185  MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
1186 #endif
1187 }
1188 
1189 template<>
1190 inline
1191 void MpiWrapper::broadcast< string >( string & MPI_PARAM( value ),
1192  int MPI_PARAM( srcRank ),
1193  MPI_Comm MPI_PARAM( comm ) )
1194 {
1195 #ifdef GEOS_USE_MPI
1196  int size = LvArray::integerConversion< int >( value.size() );
1197  broadcast( size, srcRank, comm );
1198  value.resize( size );
1199  MPI_Bcast( const_cast< char * >( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
1200 #endif
1201 }
1202 
1203 template< typename TS, typename TR >
1204 int MpiWrapper::gather( TS const * const sendbuf,
1205  int sendcount,
1206  TR * const recvbuf,
1207  int recvcount,
1208  int MPI_PARAM( root ),
1209  MPI_Comm MPI_PARAM( comm ) )
1210 {
1211 #ifdef GEOS_USE_MPI
1212  return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
1213  recvbuf, recvcount, internal::getMpiType< TR >(),
1214  root, comm );
1215 #else
1216  static_assert( std::is_same< TS, TR >::value,
1217  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1218  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1219  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1220  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1221  memcpy( recvbuf, sendbuf, sendBufferSize );
1222  return 0;
1223 #endif
1224 }
1225 
1226 template< typename T, typename DST_CONTAINER, typename >
1227 int MpiWrapper::gather( T const & value,
1228  DST_CONTAINER & destValuesBuffer,
1229  int root,
1230  MPI_Comm MPI_PARAM( comm ) )
1231 {
1232  if( commRank() == 0 )
1233  GEOS_ERROR_IF_LT_MSG( destValuesBuffer.size(), size_t( commSize() ),
1234  "Receive buffer is not large enough to contain the values to receive." );
1235 #ifdef GEOS_USE_MPI
1236  return MPI_Gather( &value, sizeof( T ), internal::getMpiType< uint8_t >(),
1237  destValuesBuffer.data(), sizeof( T ), internal::getMpiType< uint8_t >(),
1238  root, comm );
1239 #else
1240  memcpy( destValuesBuffer.data(), &value, sendBufferSize );
1241  return 0;
1242 #endif
1243 }
1244 
1245 template< typename TS, typename TR >
1246 int MpiWrapper::gatherv( TS const * const sendbuf,
1247  int sendcount,
1248  TR * const recvbuf,
1249  const int * recvcounts,
1250  const int * MPI_PARAM( displs ),
1251  int MPI_PARAM( root ),
1252  MPI_Comm MPI_PARAM( comm ) )
1253 {
1254 #ifdef GEOS_USE_MPI
1255  return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
1256  recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
1257  root, comm );
1258 #else
1259  static_assert( std::is_same< TS, TR >::value,
1260  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
1261  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1262  std::size_t const recvBufferSize = recvcounts[0] * sizeof(TR);
1263  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1264  memcpy( recvbuf, sendbuf, sendBufferSize );
1265  return 0;
1266 #endif
1267 }
1268 
1269 
1270 template< typename TS, typename TR >
1271 int MpiWrapper::scatter( TS const * const sendbuf,
1272  int sendcount,
1273  TR * const recvbuf,
1274  int recvcount,
1275  int MPI_PARAM( root ),
1276  MPI_Comm MPI_PARAM( comm ))
1277 {
1278 #ifdef GEOS_USE_MPI
1279  return MPI_Scatter( sendbuf, sendcount, internal::getMpiType< TS >(),
1280  recvbuf, recvcount, internal::getMpiType< TR >(),
1281  root, comm );
1282 #else
1283  static_assert( std::is_same< TS, TR >::value,
1284  "MpiWrapper::scatter() for serial run requires send and receive buffers are of the same type" );
1285  std::size_t const sendBufferSize = sendcount * sizeof(TS);
1286  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1287  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1288  memcpy( recvbuf, sendbuf, sendBufferSize );
1289  return 0;
1290 #endif
1291 }
1292 
1293 template< typename TS, typename TR >
1294 int MpiWrapper::scatterv( TS const * const sendbuf,
1295  const int * sendcounts,
1296  const int * MPI_PARAM( displs ),
1297  TR * const recvbuf,
1298  int recvcount,
1299  int MPI_PARAM( root ),
1300  MPI_Comm MPI_PARAM( comm ))
1301 {
1302 #ifdef GEOS_USE_MPI
1303  return MPI_Scatterv( sendbuf, sendcounts, displs, internal::getMpiType< TS >(),
1304  recvbuf, recvcount, internal::getMpiType< TR >(),
1305  root, comm );
1306 #else
1307  static_assert( std::is_same< TS, TR >::value,
1308  "MpiWrapper::scatterv() for serial run requires send and receive buffers are of the same type" );
1309  std::size_t const sendBufferSize = sendcounts * sizeof(TS);
1310  std::size_t const recvBufferSize = recvcount * sizeof(TR);
1311  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
1312  memcpy( recvbuf, sendbuf, sendBufferSize );
1313  return 0;
1314 #endif
1315 }
1316 
1317 
1318 
1319 template< typename T >
1320 int MpiWrapper::iRecv( T * const buf,
1321  int count,
1322  int MPI_PARAM( source ),
1323  int tag,
1324  MPI_Comm MPI_PARAM( comm ),
1325  MPI_Request * MPI_PARAM( request ) )
1326 {
1327 #ifdef GEOS_USE_MPI
1328  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1329  "Attempting to use an MPI_Request that is still in use." );
1330  return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
1331 #else
1332  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1333  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1334 
1335  if( iPointer==pointerMap.end() )
1336  {
1337  pointerMap.insert( {tag, {1, buf} } );
1338  }
1339  else
1340  {
1341  GEOS_ERROR_IF( iPointer->second.first != 0,
1342  "Tag does is assigned, but pointer was not set by iSend." );
1343  memcpy( buf, iPointer->second.second, count*sizeof(T) );
1344  pointerMap.erase( iPointer );
1345  }
1346  return 0;
1347 #endif
1348 }
1349 
1350 template< typename T >
1351 int MpiWrapper::recv( array1d< T > & buf,
1352  int MPI_PARAM( source ),
1353  int tag,
1354  MPI_Comm MPI_PARAM( comm ),
1355  MPI_Status * MPI_PARAM( request ) )
1356 {
1357 #ifdef GEOS_USE_MPI
1358  MPI_Status status;
1359  int count;
1360  MPI_Probe( source, tag, comm, &status );
1361  MPI_Get_count( &status, MPI_CHAR, &count );
1362 
1363  GEOS_ASSERT_EQ( count % sizeof( T ), 0 );
1364  buf.resize( count / sizeof( T ) );
1365 
1366  return MPI_Recv( reinterpret_cast< char * >( buf.data() ),
1367  count,
1368  MPI_CHAR,
1369  source,
1370  tag,
1371  comm,
1372  request );
1373 #else
1374  GEOS_ERROR( "Not implemented!" );
1375  return MPI_SUCCESS;
1376 #endif
1377 }
1378 
1379 template< typename T >
1380 int MpiWrapper::iSend( arrayView1d< T > const & buf,
1381  int MPI_PARAM( dest ),
1382  int tag,
1383  MPI_Comm MPI_PARAM( comm ),
1384  MPI_Request * MPI_PARAM( request ) )
1385 {
1386 #ifdef GEOS_USE_MPI
1387  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1388  "Attempting to use an MPI_Request that is still in use." );
1389  return MPI_Isend( reinterpret_cast< void const * >( buf.data() ),
1390  buf.size() * sizeof( T ),
1391  MPI_CHAR,
1392  dest,
1393  tag,
1394  comm,
1395  request );
1396 #else
1397  GEOS_ERROR( "Not implemented." );
1398  return MPI_SUCCESS;
1399 #endif
1400 }
1401 
1402 template< typename T >
1403 int MpiWrapper::send( T const * const buf,
1404  int count,
1405  int dest,
1406  int tag,
1407  MPI_Comm comm )
1408 {
1409 #ifdef GEOS_USE_MPI
1410  return MPI_Send( buf, count, internal::getMpiType< T >(), dest, tag, comm );
1411 #else
1412  GEOS_ERROR( "Not implemented without MPI" );
1413 #endif
1414 }
1415 
1416 template< typename T >
1417 int MpiWrapper::iSend( T const * const buf,
1418  int count,
1419  int MPI_PARAM( dest ),
1420  int tag,
1421  MPI_Comm MPI_PARAM( comm ),
1422  MPI_Request * MPI_PARAM( request ) )
1423 {
1424 #ifdef GEOS_USE_MPI
1425  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1426  "Attempting to use an MPI_Request that is still in use." );
1427  return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1428 #else
1429  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1430  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1431 
1432  if( iPointer==pointerMap.end() )
1433  {
1434  pointerMap.insert( {tag, {0, const_cast< T * >(buf)}
1435  } );
1436  }
1437  else
1438  {
1439  GEOS_ERROR_IF( iPointer->second.first != 1,
1440  "Tag does is assigned, but pointer was not set by iRecv." );
1441  memcpy( iPointer->second.second, buf, count*sizeof(T) );
1442  pointerMap.erase( iPointer );
1443  }
1444  return 0;
1445 #endif
1446 }
1447 
1448 template< typename U, typename T >
1449 U MpiWrapper::prefixSum( T const value, MPI_Comm comm )
1450 {
1451  U localResult;
1452 
1453 #ifdef GEOS_USE_MPI
1454  U const convertedValue = value;
1455  int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1456  MPI_CHECK_ERROR( error );
1457 #endif
1458  if( commRank() == 0 )
1459  {
1460  localResult = 0;
1461  }
1462 
1463  return localResult;
1464 }
1465 
1466 
1467 template< typename T >
1468 T MpiWrapper::allReduce( T const & value, Reduction const op, MPI_Comm const comm )
1469 {
1470  T result;
1471  allReduce( &value, &result, 1, getMpiOp( op ), comm );
1472  return result;
1473 }
1474 
1475 template< typename T >
1476 void MpiWrapper::allReduce( Span< T const > const src, Span< T > const dst, Reduction const op, MPI_Comm const comm )
1477 {
1478  GEOS_ASSERT_EQ( src.size(), dst.size() );
1479  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1480 }
1481 
1482 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1483 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm )
1484 {
1485  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1486  "The type in the source container must be trivially copyable." );
1487  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1488  "The type in the destination container must be trivially copyable." );
1489  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1491  "Source and destination containers must have the same value type." );
1492  GEOS_ASSERT_GE( src.size(), count );
1493  GEOS_ASSERT_GE( dst.size(), count );
1494  allReduce( src.data(), dst.data(), count, getMpiOp( op ), comm );
1495 }
1496 
1497 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1498 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm )
1499 {
1500  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1501  "The type in the source container must be trivially copyable." );
1502  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1503  "The type in the destination container must be trivially copyable." );
1504  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1506  "Source and destination containers must have the same value type." );
1507  GEOS_ASSERT_EQ( src.size(), dst.size() );
1508  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1509 }
1510 
1511 template< typename T >
1512 T MpiWrapper::sum( T const & value, MPI_Comm comm )
1513 {
1514  return MpiWrapper::allReduce( value, Reduction::Sum, comm );
1515 }
1516 
1517 template< typename T >
1518 void MpiWrapper::sum( Span< T const > src, Span< T > dst, MPI_Comm comm )
1519 {
1520  MpiWrapper::allReduce( src, dst, Reduction::Sum, comm );
1521 }
1522 
1523 template< typename T >
1524 T MpiWrapper::min( T const & value, MPI_Comm comm )
1525 {
1526  return MpiWrapper::allReduce( value, Reduction::Min, comm );
1527 }
1528 
1529 template< typename T >
1530 void MpiWrapper::min( Span< T const > src, Span< T > dst, MPI_Comm comm )
1531 {
1532  MpiWrapper::allReduce( src, dst, Reduction::Min, comm );
1533 }
1534 
1535 template< typename T >
1536 T MpiWrapper::max( T const & value, MPI_Comm comm )
1537 {
1538  return MpiWrapper::allReduce( value, Reduction::Max, comm );
1539 }
1540 
1541 template< typename T >
1542 void MpiWrapper::max( Span< T const > src, Span< T > dst, MPI_Comm comm )
1543 {
1544  MpiWrapper::allReduce( src, dst, Reduction::Max, comm );
1545 }
1546 
1547 
1548 template< typename T >
1549 T MpiWrapper::reduce( T const & value, Reduction const op, int root, MPI_Comm const comm )
1550 {
1551  T result;
1552  reduce( &value, &result, 1, getMpiOp( op ), root, comm );
1553  return result;
1554 }
1555 
1556 template< typename T >
1557 void MpiWrapper::reduce( Span< T const > const src, Span< T > const dst, Reduction const op, int root, MPI_Comm const comm )
1558 {
1559  GEOS_ASSERT_EQ( src.size(), dst.size() );
1560  reduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), root, comm );
1561 }
1562 
1563 template< typename FIRST, typename SECOND, MpiWrapper::PairReduction const OP >
1565 MpiWrapper::allReduce( PairType< FIRST, SECOND > const & localPair, MPI_Comm comm )
1566 {
1567 #ifdef GEOS_USE_MPI
1568  auto const type = internal::getMpiPairType< FIRST, SECOND >();
1569  auto const mpiOp = internal::getMpiPairReductionOp< FIRST, SECOND, OP >();
1570  PairType< FIRST, SECOND > pair{ localPair.first, localPair.second };
1571  MPI_Allreduce( MPI_IN_PLACE, &pair, 1, type, mpiOp, comm );
1572  return pair;
1573 #else
1574  return localPair;
1575 #endif
1576 }
1577 
1578 template< typename FIRST, typename SECOND, typename CONTAINER, MpiWrapper::PairReduction const OP >
1580 MpiWrapper::allReduce( CONTAINER const & pairs, MPI_Comm const comm )
1581 {
1582  using PAIR_T = PairType< FIRST, SECOND >;
1583  std::function< PAIR_T( PAIR_T, PAIR_T ) > const getMin = []( PAIR_T const & a, PAIR_T const & b ) {
1584  return ( std::tie( a.first, a.second ) < std::tie( b.first, b.second ) ) ? a : b;
1585  };
1586  std::function< PAIR_T( PAIR_T, PAIR_T ) > const getMax = []( PAIR_T const & a, PAIR_T const & b ) {
1587  return ( std::tie( a.first, a.second ) > std::tie( b.first, b.second ) ) ? a : b;
1588  };
1589  PAIR_T const defaultPair{
1590  OP == PairReduction::Min ? std::numeric_limits< FIRST >::max() : std::numeric_limits< FIRST >::lowest(),
1591  OP == PairReduction::Min ? std::numeric_limits< SECOND >::max() : std::numeric_limits< SECOND >::lowest()
1592  };
1593  // based on the operation, pair will be the minimum / maximum element (or defaultPair if pairs is empty)
1594  PAIR_T pair = std::accumulate( pairs.begin(), pairs.end(), defaultPair,
1595  OP == PairReduction::Min ? getMin : getMax );
1596  return allReduce< FIRST, SECOND, OP >( pair, comm );
1597 }
1598 
1599 template< typename FIRST, typename SECOND >
1601 { return allReduce< FIRST, SECOND, PairReduction::Min >( pair, comm ); }
1602 
1603 template< typename FIRST, typename SECOND, typename CONTAINER >
1604 MpiWrapper::PairType< FIRST, SECOND > MpiWrapper::min( CONTAINER const & pairs, MPI_Comm comm )
1605 { return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Min >( pairs, comm ); }
1606 
1607 template< typename FIRST, typename SECOND >
1609 { return allReduce< FIRST, SECOND, PairReduction::Max >( pair, comm ); }
1610 
1611 template< typename FIRST, typename SECOND, typename CONTAINER >
1612 MpiWrapper::PairType< FIRST, SECOND > MpiWrapper::max( CONTAINER const & pairs, MPI_Comm comm )
1613 { return allReduce< FIRST, SECOND, CONTAINER, PairReduction::Max >( pairs, comm ); }
1614 
1615 } /* namespace geos */
1616 
1617 #endif /* GEOS_COMMON_MPIWRAPPER_HPP_ */
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
Definition: Logger.hpp:157
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
Definition: Logger.hpp:142
#define GEOS_ERROR_IF_NE(lhs, rhs)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:259
#define GEOS_ASSERT_GE(lhs, rhs)
Assert that one value compares greater than or equal to the other in debug builds.
Definition: Logger.hpp:455
#define GEOS_ERROR_IF_LT_MSG(lhs, rhs, msg)
Raise a hard error if one value compares less than the other.
Definition: Logger.hpp:339
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:243
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Definition: Logger.hpp:410
Lightweight non-owning wrapper over a contiguous range of elements.
Definition: Span.hpp:42
constexpr T * data() const noexcept
Definition: Span.hpp:131
constexpr size_type size() const noexcept
Definition: Span.hpp:107
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
Definition: DataTypes.hpp:179
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
Definition: DataTypes.hpp:84
std::size_t size_t
Unsigned size type.
Definition: DataTypes.hpp:78
Array< T, 1 > array1d
Alias for 1D array.
Definition: DataTypes.hpp:175
internal::StdVectorWrapper< T, Allocator, USE_STD_CONTAINER_BOUNDS_CHECKING > stdVector
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
Definition: MpiWrapper.hpp:946
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgatherv.
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
@ Max
Max pair first value.
@ Min
Min pair first value.
static int activeWaitSomeCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int bcast(T *buffer, int count, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Bcast.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Allgather.
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgather.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static int send(T const *const buf, int count, int dest, int tag, MPI_Comm comm)
Strongly typed wrapper around MPI_Send()
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOS)
Compute exclusive prefix sum and full sum.
static int scatter(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Scatter.
static int gather(T const &value, DST_CONTAINER &destValuesBuffer, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static T allReduce(T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS)
Convenience wrapper for the MPI_Allreduce function.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static int scatterv(TS const *const sendbuf, const int *sendcounts, const int *displs, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Scatterv.
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static int activeWaitOrderedCompletePhase(const int participants, stdVector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gatherv.
static void finalize()
Free MPI managed resources, then call MPI_Finalize(). Please note that once called,...
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Broadcast.
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Reduce.
Trait to retrieve the value_type or ValueType of a type T.