GEOS
MpiWrapper.hpp
Go to the documentation of this file.
1 /*
2  * ------------------------------------------------------------------------------------------------------------
3  * SPDX-License-Identifier: LGPL-2.1-only
4  *
5  * Copyright (c) 2016-2024 Lawrence Livermore National Security LLC
6  * Copyright (c) 2018-2024 TotalEnergies
7  * Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University
8  * Copyright (c) 2023-2024 Chevron
9  * Copyright (c) 2019- GEOS/GEOSX Contributors
10  * All rights reserved
11  *
12  * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details.
13  * ------------------------------------------------------------------------------------------------------------
14  */
15 
20 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
21 #define GEOS_COMMON_MPIWRAPPER_HPP_
22 
23 #include "common/DataTypes.hpp"
24 #include "common/Span.hpp"
25 #include "common/TypesHelpers.hpp"
26 
27 #if defined(GEOS_USE_MPI)
28  #include <mpi.h>
29 #define MPI_PARAM( x ) x
30 #else
31 #define MPI_PARAM( x )
32 typedef int MPI_Comm;
33 
34 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
35 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
36 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
37 
38 
39 typedef int MPI_Datatype;
40 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
41 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
42 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
43 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
44 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
45 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
46 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
47 #define MPI_INT ((MPI_Datatype)0x4c000405)
48 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
49 #define MPI_LONG ((MPI_Datatype)0x4c000807)
50 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
51 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
52 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
53 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
54 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
55 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
56 #define MPI_LONG_LONG MPI_LONG_LONG_INT
57 
58 typedef int MPI_Op;
59 
60 #define MPI_MAX (MPI_Op)(0x58000001)
61 #define MPI_MIN (MPI_Op)(0x58000002)
62 #define MPI_SUM (MPI_Op)(0x58000003)
63 #define MPI_PROD (MPI_Op)(0x58000004)
64 #define MPI_LAND (MPI_Op)(0x58000005)
65 #define MPI_BAND (MPI_Op)(0x58000006)
66 #define MPI_LOR (MPI_Op)(0x58000007)
67 #define MPI_BOR (MPI_Op)(0x58000008)
68 #define MPI_LXOR (MPI_Op)(0x58000009)
69 #define MPI_BXOR (MPI_Op)(0x5800000a)
70 #define MPI_MINLOC (MPI_Op)(0x5800000b)
71 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
72 #define MPI_REPLACE (MPI_Op)(0x5800000d)
73 #define MPI_NO_OP (MPI_Op)(0x5800000e)
74 
75 #define MPI_SUCCESS 0 /* Successful return code */
76 #define MPI_UNDEFINED (-32766)
77 #define MPI_STATUS_IGNORE (MPI_Status *)1
78 #define MPI_STATUSES_IGNORE (MPI_Status *)1
79 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
80 typedef int MPI_Request;
81 
82 typedef int MPI_Info;
83 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
84 
85 struct MPI_Status
86 {
87  int junk;
88 };
89 
90 #endif
91 
92 #if defined(NDEBUG)
93 #define MPI_CHECK_ERROR( error ) ((void) error)
94 #else
95 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
96 #endif
97 
98 
99 namespace geos
100 {
101 
103 #ifdef GEOS_USE_MPI
104 extern MPI_Comm MPI_COMM_GEOS;
105 #else
106 extern int MPI_COMM_GEOS;
107 #endif
108 
119 {
120 public:
121 
126  enum class Reduction
127  {
128  Max,
129  Min,
130  Sum,
131  Prod,
132  LogicalAnd,
133  LogicalOr,
134  };
135 
136  MpiWrapper() = delete;
137 
151 
152  static void barrier( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
153 
154  static int cartCoords( MPI_Comm comm, int rank, int maxdims, int coords[] );
155 
156  static int cartCreate( MPI_Comm comm_old, int ndims, const int dims[], const int periods[],
157  int reorder, MPI_Comm * comm_cart );
158 
159  static int cartRank( MPI_Comm comm, const int coords[] );
160 
161  static void commFree( MPI_Comm & comm );
162 
163  static int commRank( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
164 
165  static int commSize( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOS );
166 
167  static bool commCompare( MPI_Comm const & comm1, MPI_Comm const & comm2 );
168 
169  static bool initialized();
170 
171  static int init( int * argc, char * * * argv );
172 
173  static void finalize();
174 
175  static MPI_Comm commDup( MPI_Comm const comm );
176 
177  static MPI_Comm commSplit( MPI_Comm const comm, int color, int key );
178 
179  static int test( MPI_Request * request, int * flag, MPI_Status * status );
180 
181  static int testAny( int count, MPI_Request array_of_requests[], int * idx, int * flags, MPI_Status array_of_statuses[] );
182 
183  static int testSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
184 
185  static int testAll( int count, MPI_Request array_of_requests[], int * flags, MPI_Status array_of_statuses[] );
186 
193  static int check( MPI_Request * request, int * flag, MPI_Status * status );
194 
207  static int checkAny( int count, MPI_Request array_of_requests[], int * idx, int * flag, MPI_Status array_of_statuses[] );
208 
218  static int checkAll( int count, MPI_Request array_of_requests[], int * flag, MPI_Status array_of_statuses[] );
219 
220  static int wait( MPI_Request * request, MPI_Status * status );
221 
222  static int waitAny( int count, MPI_Request array_of_requests[], int * indx, MPI_Status array_of_statuses[] );
223 
224  static int waitSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
225 
226  static int waitAll( int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[] );
227 
228  static double wtime( void );
229 
230 
240  static int activeWaitAny( const int count,
241  MPI_Request array_of_requests[],
242  MPI_Status array_of_statuses[],
243  std::function< MPI_Request ( int ) > func );
244 
254  static int activeWaitSome( const int count,
255  MPI_Request array_of_requests[],
256  MPI_Status array_of_statuses[],
257  std::function< MPI_Request ( int ) > func );
258 
271  static int activeWaitSomeCompletePhase( const int participants,
272  std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
273 
287  static int activeWaitOrderedCompletePhase( const int participants,
288  std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
290 
291 #if !defined(GEOS_USE_MPI)
292  static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
293  {
294  static std::map< int, std::pair< int, void * > > tagToPointers;
295  return tagToPointers;
296  }
297 #endif
298 
303  static int nodeCommSize();
304 
316  template< typename T_SEND, typename T_RECV >
317  static int allgather( T_SEND const * sendbuf,
318  int sendcount,
319  T_RECV * recvbuf,
320  int recvcount,
321  MPI_Comm comm = MPI_COMM_GEOS );
322 
335  template< typename T_SEND, typename T_RECV >
336  static int allgatherv( T_SEND const * sendbuf,
337  int sendcount,
338  T_RECV * recvbuf,
339  int * recvcounts,
340  int * displacements,
341  MPI_Comm comm = MPI_COMM_GEOS );
342 
349  template< typename T >
350  static void allGather( T const myValue, array1d< T > & allValues, MPI_Comm comm = MPI_COMM_GEOS );
351 
352  template< typename T >
353  static int allGather( arrayView1d< T const > const & sendbuf,
354  array1d< T > & recvbuf,
355  MPI_Comm comm = MPI_COMM_GEOS );
356 
365  template< typename T >
366  static T allReduce( T const & value, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
367 
376  template< typename T >
377  static void allReduce( Span< T const > src, Span< T > dst, Reduction const op, MPI_Comm comm = MPI_COMM_GEOS );
378 
387  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
388  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm = MPI_COMM_GEOS );
389 
399  template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
400  static void allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm );
401 
402 
412  template< typename T >
413  static int reduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, int root, MPI_Comm comm = MPI_COMM_GEOS );
414 
423  template< typename T >
424  static T reduce( T const & value, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
425 
434  template< typename T >
435  static void reduce( Span< T const > src, Span< T > dst, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOS );
436 
437 
438  template< typename T >
439  static int scan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
440 
441  template< typename T >
442  static int exscan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
443 
452  template< typename T >
453  static int bcast( T * buffer, int count, int root, MPI_Comm comm = MPI_COMM_GEOS );
454 
455 
462  template< typename T >
463  static void broadcast( T & value, int srcRank = 0, MPI_Comm comm = MPI_COMM_GEOS );
464 
477  template< typename TS, typename TR >
478  static int gather( TS const * const sendbuf,
479  int sendcount,
480  TR * const recvbuf,
481  int recvcount,
482  int root,
483  MPI_Comm comm = MPI_COMM_GEOS );
484 
496  template< typename T, typename DST_CONTAINER,
497  typename = std::enable_if_t<
498  std::is_trivially_copyable_v< T > &&
499  std::is_same_v< decltype(std::declval< DST_CONTAINER >().size()), std::size_t > &&
500  std::is_same_v< decltype(std::declval< DST_CONTAINER >().data()), T * > > >
501  static int gather( T const & value,
502  DST_CONTAINER & destValuesBuffer,
503  int root,
504  MPI_Comm comm = MPI_COMM_GEOS );
505 
520  template< typename TS, typename TR >
521  static int gatherv( TS const * const sendbuf,
522  int sendcount,
523  TR * const recvbuf,
524  const int * recvcounts,
525  const int * displs,
526  int root,
527  MPI_Comm comm = MPI_COMM_GEOS );
528 
534  static MPI_Op getMpiOp( Reduction const op );
535 
536  template< typename T >
537  static int recv( array1d< T > & buf,
538  int MPI_PARAM( source ),
539  int tag,
540  MPI_Comm MPI_PARAM( comm ),
541  MPI_Status * MPI_PARAM( request ) );
542 
543  template< typename T >
544  static int iSend( arrayView1d< T > const & buf,
545  int MPI_PARAM( dest ),
546  int tag,
547  MPI_Comm MPI_PARAM( comm ),
548  MPI_Request * MPI_PARAM( request ) );
549 
560  template< typename T >
561  static int iRecv( T * const buf,
562  int count,
563  int source,
564  int tag,
565  MPI_Comm comm,
566  MPI_Request * request );
567 
578  template< typename T >
579  static int iSend( T const * const buf,
580  int count,
581  int dest,
582  int tag,
583  MPI_Comm comm,
584  MPI_Request * request );
585 
593  template< typename U, typename T >
594  static U prefixSum( T const value, MPI_Comm comm = MPI_COMM_GEOS );
595 
601  template< typename T >
602  static T sum( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
603 
610  template< typename T >
611  static void sum( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
612 
618  template< typename T >
619  static T min( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
620 
627  template< typename T >
628  static void min( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
629 
635  template< typename T >
636  static T max( T const & value, MPI_Comm comm = MPI_COMM_GEOS );
637 
644  template< typename T >
645  static void max( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOS );
646 
647 
654  template< typename T > static T maxValLoc( T localValueLocation, MPI_Comm comm = MPI_COMM_GEOS );
655 
656 private:
657 
667  template< typename T >
668  static int allReduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOS );
669 };
670 
671 namespace internal
672 {
673 
674 template< typename T, typename ENABLE = void >
675 struct MpiTypeImpl {};
676 
677 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
678  template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
679 
680 ADD_MPI_TYPE_MAP( float, MPI_FLOAT );
681 ADD_MPI_TYPE_MAP( double, MPI_DOUBLE );
682 
683 ADD_MPI_TYPE_MAP( char, MPI_CHAR );
684 ADD_MPI_TYPE_MAP( signed char, MPI_SIGNED_CHAR );
685 ADD_MPI_TYPE_MAP( unsigned char, MPI_UNSIGNED_CHAR );
686 
687 ADD_MPI_TYPE_MAP( int, MPI_INT );
688 ADD_MPI_TYPE_MAP( long int, MPI_LONG );
689 ADD_MPI_TYPE_MAP( long long int, MPI_LONG_LONG );
690 
691 ADD_MPI_TYPE_MAP( unsigned int, MPI_UNSIGNED );
692 ADD_MPI_TYPE_MAP( unsigned long int, MPI_UNSIGNED_LONG );
693 ADD_MPI_TYPE_MAP( unsigned long long int, MPI_UNSIGNED_LONG_LONG );
694 
695 #undef ADD_MPI_TYPE_MAP
696 
697 template< typename T >
698 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
699 {
700  static MPI_Datatype get() { return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
701 };
702 
703 template< typename T >
704 MPI_Datatype getMpiType()
705 {
706  return MpiTypeImpl< T >::get();
707 }
708 
709 }
710 
711 inline MPI_Op MpiWrapper::getMpiOp( Reduction const op )
712 {
713  switch( op )
714  {
715  case Reduction::Sum:
716  {
717  return MPI_SUM;
718  }
719  case Reduction::Min:
720  {
721  return MPI_MIN;
722  }
723  case Reduction::Max:
724  {
725  return MPI_MAX;
726  }
727  case Reduction::Prod:
728  {
729  return MPI_PROD;
730  }
732  {
733  return MPI_LAND;
734  }
736  {
737  return MPI_LOR;
738  }
739  default:
740  GEOS_ERROR( "Unsupported reduction operation" );
741  return MPI_NO_OP;
742  }
743 }
744 
745 template< typename T_SEND, typename T_RECV >
746 int MpiWrapper::allgather( T_SEND const * const sendbuf,
747  int sendcount,
748  T_RECV * const recvbuf,
749  int recvcount,
750  MPI_Comm MPI_PARAM( comm ) )
751 {
752 #ifdef GEOS_USE_MPI
753  return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
754  recvbuf, recvcount, internal::getMpiType< T_RECV >(),
755  comm );
756 #else
757  static_assert( std::is_same< T_SEND, T_RECV >::value,
758  "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
759  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
760  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
761  return 0;
762 #endif
763 }
764 
765 template< typename T_SEND, typename T_RECV >
766 int MpiWrapper::allgatherv( T_SEND const * const sendbuf,
767  int sendcount,
768  T_RECV * const recvbuf,
769  int * recvcounts,
770  int * displacements,
771  MPI_Comm MPI_PARAM( comm ) )
772 {
773 #ifdef GEOS_USE_MPI
774  return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
775  recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
776  comm );
777 #else
778  static_assert( std::is_same< T_SEND, T_RECV >::value,
779  "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
780  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
781  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
782  return 0;
783 #endif
784 }
785 
786 
787 template< typename T >
788 void MpiWrapper::allGather( T const myValue, array1d< T > & allValues, MPI_Comm MPI_PARAM( comm ) )
789 {
790 #ifdef GEOS_USE_MPI
791  int const mpiSize = commSize( comm );
792  allValues.resize( mpiSize );
793 
794  MPI_Datatype const MPI_TYPE = internal::getMpiType< T >();
795 
796  MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
797 
798 #else
799  allValues.resize( 1 );
800  allValues[0] = myValue;
801 #endif
802 }
803 
804 template< typename T >
805 int MpiWrapper::allGather( arrayView1d< T const > const & sendValues,
806  array1d< T > & allValues,
807  MPI_Comm MPI_PARAM( comm ) )
808 {
809  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
810 #ifdef GEOS_USE_MPI
811  int const mpiSize = commSize( comm );
812  allValues.resize( mpiSize * sendSize );
813  return MPI_Allgather( sendValues.data(),
814  sendSize,
815  internal::getMpiType< T >(),
816  allValues.data(),
817  sendSize,
818  internal::getMpiType< T >(),
819  comm );
820 
821 #else
822  allValues.resize( sendSize );
823  for( localIndex a=0; a<sendSize; ++a )
824  {
825  allValues[a] = sendValues[a];
826  }
827  return 0;
828 #endif
829 }
830 
831 template< typename T >
832 int MpiWrapper::allReduce( T const * const sendbuf,
833  T * const recvbuf,
834  int const count,
835  MPI_Op const MPI_PARAM( op ),
836  MPI_Comm const MPI_PARAM( comm ) )
837 {
838 #ifdef GEOS_USE_MPI
839  MPI_Datatype const mpiType = internal::getMpiType< T >();
840  return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
841 #else
842  if( sendbuf != recvbuf )
843  {
844  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
845  }
846  return 0;
847 #endif
848 }
849 
850 template< typename T >
851 int MpiWrapper::reduce( T const * const sendbuf,
852  T * const recvbuf,
853  int const count,
854  MPI_Op const MPI_PARAM( op ),
855  int root,
856  MPI_Comm const MPI_PARAM( comm ) )
857 {
858 #ifdef GEOS_USE_MPI
859  MPI_Datatype const mpiType = internal::getMpiType< T >();
860  return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
861 #else
862  if( sendbuf != recvbuf )
863  {
864  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
865  }
866  return 0;
867 #endif
868 }
869 
870 template< typename T >
871 int MpiWrapper::scan( T const * const sendbuf,
872  T * const recvbuf,
873  int count,
874  MPI_Op MPI_PARAM( op ),
875  MPI_Comm MPI_PARAM( comm ) )
876 {
877 #ifdef GEOS_USE_MPI
878  return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
879 #else
880  memcpy( recvbuf, sendbuf, count*sizeof(T) );
881  return 0;
882 #endif
883 }
884 
885 template< typename T >
886 int MpiWrapper::exscan( T const * const MPI_PARAM( sendbuf ),
887  T * const recvbuf,
888  int count,
889  MPI_Op MPI_PARAM( op ),
890  MPI_Comm MPI_PARAM( comm ) )
891 {
892 #ifdef GEOS_USE_MPI
893  return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
894 #else
895  memset( recvbuf, 0, count*sizeof(T) );
896  return 0;
897 #endif
898 }
899 
900 template< typename T >
901 int MpiWrapper::bcast( T * const MPI_PARAM( buffer ),
902  int MPI_PARAM( count ),
903  int MPI_PARAM( root ),
904  MPI_Comm MPI_PARAM( comm ) )
905 {
906 #ifdef GEOS_USE_MPI
907  return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
908 #else
909  return 0;
910 #endif
911 
912 }
913 
914 template< typename T >
915 void MpiWrapper::broadcast( T & MPI_PARAM( value ), int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
916 {
917 #ifdef GEOS_USE_MPI
918  MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
919 #endif
920 }
921 
922 template<>
923 inline
924 void MpiWrapper::broadcast< string >( string & MPI_PARAM( value ),
925  int MPI_PARAM( srcRank ),
926  MPI_Comm MPI_PARAM( comm ) )
927 {
928 #ifdef GEOS_USE_MPI
929  int size = LvArray::integerConversion< int >( value.size() );
930  broadcast( size, srcRank, comm );
931  value.resize( size );
932  MPI_Bcast( const_cast< char * >( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
933 #endif
934 }
935 
936 template< typename TS, typename TR >
937 int MpiWrapper::gather( TS const * const sendbuf,
938  int sendcount,
939  TR * const recvbuf,
940  int recvcount,
941  int MPI_PARAM( root ),
942  MPI_Comm MPI_PARAM( comm ) )
943 {
944 #ifdef GEOS_USE_MPI
945  return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
946  recvbuf, recvcount, internal::getMpiType< TR >(),
947  root, comm );
948 #else
949  static_assert( std::is_same< TS, TR >::value,
950  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
951  std::size_t const sendBufferSize = sendcount * sizeof(TS);
952  std::size_t const recvBufferSize = recvcount * sizeof(TR);
953  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
954  memcpy( recvbuf, sendbuf, sendBufferSize );
955  return 0;
956 #endif
957 }
958 
959 template< typename T, typename DST_CONTAINER, typename >
960 int MpiWrapper::gather( T const & value,
961  DST_CONTAINER & destValuesBuffer,
962  int root,
963  MPI_Comm MPI_PARAM( comm ) )
964 {
965  if( commRank() == 0 )
966  GEOS_ERROR_IF_LT_MSG( destValuesBuffer.size(), size_t( commSize() ),
967  "Receive buffer is not large enough to contain the values to receive." );
968 #ifdef GEOS_USE_MPI
969  return MPI_Gather( &value, sizeof( T ), internal::getMpiType< uint8_t >(),
970  destValuesBuffer.data(), sizeof( T ), internal::getMpiType< uint8_t >(),
971  root, comm );
972 #else
973  memcpy( destValuesBuffer.data(), &value, sendBufferSize );
974  return 0;
975 #endif
976 }
977 
978 template< typename TS, typename TR >
979 int MpiWrapper::gatherv( TS const * const sendbuf,
980  int sendcount,
981  TR * const recvbuf,
982  const int * recvcounts,
983  const int * MPI_PARAM( displs ),
984  int MPI_PARAM( root ),
985  MPI_Comm MPI_PARAM( comm ) )
986 {
987 #ifdef GEOS_USE_MPI
988  return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
989  recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
990  root, comm );
991 #else
992  static_assert( std::is_same< TS, TR >::value,
993  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
994  std::size_t const sendBufferSize = sendcount * sizeof(TS);
995  std::size_t const recvBufferSize = recvcounts[0] * sizeof(TR);
996  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
997  memcpy( recvbuf, sendbuf, sendBufferSize );
998  return 0;
999 #endif
1000 }
1001 
1002 template< typename T >
1003 int MpiWrapper::iRecv( T * const buf,
1004  int count,
1005  int MPI_PARAM( source ),
1006  int tag,
1007  MPI_Comm MPI_PARAM( comm ),
1008  MPI_Request * MPI_PARAM( request ) )
1009 {
1010 #ifdef GEOS_USE_MPI
1011  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1012  "Attempting to use an MPI_Request that is still in use." );
1013  return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
1014 #else
1015  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1016  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1017 
1018  if( iPointer==pointerMap.end() )
1019  {
1020  pointerMap.insert( {tag, {1, buf} } );
1021  }
1022  else
1023  {
1024  GEOS_ERROR_IF( iPointer->second.first != 0,
1025  "Tag does is assigned, but pointer was not set by iSend." );
1026  memcpy( buf, iPointer->second.second, count*sizeof(T) );
1027  pointerMap.erase( iPointer );
1028  }
1029  return 0;
1030 #endif
1031 }
1032 
1033 template< typename T >
1034 int MpiWrapper::recv( array1d< T > & buf,
1035  int MPI_PARAM( source ),
1036  int tag,
1037  MPI_Comm MPI_PARAM( comm ),
1038  MPI_Status * MPI_PARAM( request ) )
1039 {
1040 #ifdef GEOS_USE_MPI
1041  MPI_Status status;
1042  int count;
1043  MPI_Probe( source, tag, comm, &status );
1044  MPI_Get_count( &status, MPI_CHAR, &count );
1045 
1046  GEOS_ASSERT_EQ( count % sizeof( T ), 0 );
1047  buf.resize( count / sizeof( T ) );
1048 
1049  return MPI_Recv( reinterpret_cast< char * >( buf.data() ),
1050  count,
1051  MPI_CHAR,
1052  source,
1053  tag,
1054  comm,
1055  request );
1056 #else
1057  GEOS_ERROR( "Not implemented!" );
1058  return MPI_SUCCESS;
1059 #endif
1060 }
1061 
1062 template< typename T >
1063 int MpiWrapper::iSend( arrayView1d< T > const & buf,
1064  int MPI_PARAM( dest ),
1065  int tag,
1066  MPI_Comm MPI_PARAM( comm ),
1067  MPI_Request * MPI_PARAM( request ) )
1068 {
1069 #ifdef GEOS_USE_MPI
1070  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1071  "Attempting to use an MPI_Request that is still in use." );
1072  return MPI_Isend( reinterpret_cast< void const * >( buf.data() ),
1073  buf.size() * sizeof( T ),
1074  MPI_CHAR,
1075  dest,
1076  tag,
1077  comm,
1078  request );
1079 #else
1080  GEOS_ERROR( "Not implemented." );
1081  return MPI_SUCCESS;
1082 #endif
1083 }
1084 
1085 template< typename T >
1086 int MpiWrapper::iSend( T const * const buf,
1087  int count,
1088  int MPI_PARAM( dest ),
1089  int tag,
1090  MPI_Comm MPI_PARAM( comm ),
1091  MPI_Request * MPI_PARAM( request ) )
1092 {
1093 #ifdef GEOS_USE_MPI
1094  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1095  "Attempting to use an MPI_Request that is still in use." );
1096  return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1097 #else
1098  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1099  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1100 
1101  if( iPointer==pointerMap.end() )
1102  {
1103  pointerMap.insert( {tag, {0, const_cast< T * >(buf)}
1104  } );
1105  }
1106  else
1107  {
1108  GEOS_ERROR_IF( iPointer->second.first != 1,
1109  "Tag does is assigned, but pointer was not set by iRecv." );
1110  memcpy( iPointer->second.second, buf, count*sizeof(T) );
1111  pointerMap.erase( iPointer );
1112  }
1113  return 0;
1114 #endif
1115 }
1116 
1117 template< typename U, typename T >
1118 U MpiWrapper::prefixSum( T const value, MPI_Comm comm )
1119 {
1120  U localResult;
1121 
1122 #ifdef GEOS_USE_MPI
1123  U const convertedValue = value;
1124  int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1125  MPI_CHECK_ERROR( error );
1126 #endif
1127  if( commRank() == 0 )
1128  {
1129  localResult = 0;
1130  }
1131 
1132  return localResult;
1133 }
1134 
1135 
1136 template< typename T >
1137 T MpiWrapper::allReduce( T const & value, Reduction const op, MPI_Comm const comm )
1138 {
1139  T result;
1140  allReduce( &value, &result, 1, getMpiOp( op ), comm );
1141  return result;
1142 }
1143 
1144 template< typename T >
1145 void MpiWrapper::allReduce( Span< T const > const src, Span< T > const dst, Reduction const op, MPI_Comm const comm )
1146 {
1147  GEOS_ASSERT_EQ( src.size(), dst.size() );
1148  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1149 }
1150 
1151 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1152 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, int const count, Reduction const op, MPI_Comm const comm )
1153 {
1154  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1155  "The type in the source container must be trivially copyable." );
1156  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1157  "The type in the destination container must be trivially copyable." );
1158  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1160  "Source and destination containers must have the same value type." );
1161  GEOS_ASSERT_GE( src.size(), count );
1162  GEOS_ASSERT_GE( dst.size(), count );
1163  allReduce( src.data(), dst.data(), count, getMpiOp( op ), comm );
1164 }
1165 
1166 template< typename SRC_CONTAINER_TYPE, typename DST_CONTAINER_TYPE >
1167 void MpiWrapper::allReduce( SRC_CONTAINER_TYPE const & src, DST_CONTAINER_TYPE & dst, Reduction const op, MPI_Comm const comm )
1168 {
1169  static_assert( std::is_trivially_copyable< typename get_value_type< SRC_CONTAINER_TYPE >::type >::value,
1170  "The type in the source container must be trivially copyable." );
1171  static_assert( std::is_trivially_copyable< typename get_value_type< DST_CONTAINER_TYPE >::type >::value,
1172  "The type in the destination container must be trivially copyable." );
1173  static_assert( std::is_same< typename get_value_type< SRC_CONTAINER_TYPE >::type,
1175  "Source and destination containers must have the same value type." );
1176  GEOS_ASSERT_EQ( src.size(), dst.size() );
1177  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1178 }
1179 
1180 template< typename T >
1181 T MpiWrapper::sum( T const & value, MPI_Comm comm )
1182 {
1183  return MpiWrapper::allReduce( value, Reduction::Sum, comm );
1184 }
1185 
1186 template< typename T >
1187 void MpiWrapper::sum( Span< T const > src, Span< T > dst, MPI_Comm comm )
1188 {
1189  MpiWrapper::allReduce( src, dst, Reduction::Sum, comm );
1190 }
1191 
1192 template< typename T >
1193 T MpiWrapper::min( T const & value, MPI_Comm comm )
1194 {
1195  return MpiWrapper::allReduce( value, Reduction::Min, comm );
1196 }
1197 
1198 template< typename T >
1199 void MpiWrapper::min( Span< T const > src, Span< T > dst, MPI_Comm comm )
1200 {
1201  MpiWrapper::allReduce( src, dst, Reduction::Min, comm );
1202 }
1203 
1204 template< typename T >
1205 T MpiWrapper::max( T const & value, MPI_Comm comm )
1206 {
1207  return MpiWrapper::allReduce( value, Reduction::Max, comm );
1208 }
1209 
1210 template< typename T >
1211 void MpiWrapper::max( Span< T const > src, Span< T > dst, MPI_Comm comm )
1212 {
1213  MpiWrapper::allReduce( src, dst, Reduction::Max, comm );
1214 }
1215 
1216 
1217 template< typename T >
1218 T MpiWrapper::reduce( T const & value, Reduction const op, int root, MPI_Comm const comm )
1219 {
1220  T result;
1221  reduce( &value, &result, 1, getMpiOp( op ), root, comm );
1222  return result;
1223 }
1224 
1225 template< typename T >
1226 void MpiWrapper::reduce( Span< T const > const src, Span< T > const dst, Reduction const op, int root, MPI_Comm const comm )
1227 {
1228  GEOS_ASSERT_EQ( src.size(), dst.size() );
1229  reduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), root, comm );
1230 }
1231 
1232 // Mpi helper function to return struct containing the max value and location across ranks
1233 template< typename T >
1234 T MpiWrapper::maxValLoc( T localValueLocation, MPI_Comm comm )
1235 {
1236  // Ensure T is trivially copyable
1237  static_assert( std::is_trivially_copyable< T >::value, "maxValLoc requires a trivially copyable type" );
1238 
1239  // T to have only 2 data members named value and location
1240  static_assert( (sizeof(T::value)+sizeof(T::location)) == sizeof(T) );
1241 
1242  // Ensure T has value and location members are scalars
1243  static_assert( std::is_scalar_v< decltype(T::value) > || std::is_scalar_v< decltype(T::location) >, "members of struct should be scalar" );
1244  static_assert( !std::is_pointer_v< decltype(T::value) > && !std::is_pointer_v< decltype(T::location) >, "members of struct should not be pointers" );
1245 
1246  // receive "buffer"
1247  int const numProcs = commSize( comm );
1248  std::vector< T > recvValLoc( numProcs );
1249 
1250  MPI_Allgather( &localValueLocation, sizeof(T), MPI_BYTE, recvValLoc.data(), sizeof(T), MPI_BYTE, comm );
1251 
1252  T maxValLoc= *std::max_element( recvValLoc.begin(),
1253  recvValLoc.end(),
1254  []( auto & lhs, auto & rhs ) -> bool {return lhs.value < rhs.value; } );
1255 
1256  return maxValLoc;
1257 }
1258 } /* namespace geos */
1259 
1260 #endif /* GEOS_COMMON_MPIWRAPPER_HPP_ */
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
Definition: Logger.hpp:157
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
Definition: Logger.hpp:142
#define GEOS_ASSERT_GE(lhs, rhs)
Assert that one value compares greater than or equal to the other in debug builds.
Definition: Logger.hpp:455
#define GEOS_ERROR_IF_LT_MSG(lhs, rhs, msg)
Raise a hard error if one value compares less than the other.
Definition: Logger.hpp:339
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:243
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Definition: Logger.hpp:410
Lightweight non-owning wrapper over a contiguous range of elements.
Definition: Span.hpp:42
constexpr T * data() const noexcept
Definition: Span.hpp:131
constexpr size_type size() const noexcept
Definition: Span.hpp:107
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
Definition: DataTypes.hpp:180
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
Definition: DataTypes.hpp:85
std::size_t size_t
Unsigned size type.
Definition: DataTypes.hpp:79
Array< T, 1 > array1d
Alias for 1D array.
Definition: DataTypes.hpp:176
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
Definition: MpiWrapper.hpp:711
static int activeWaitSomeCompletePhase(const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgatherv.
static int activeWaitOrderedCompletePhase(const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int bcast(T *buffer, int count, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Bcast.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Allgather.
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Allgather.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOS)
Compute exclusive prefix sum and full sum.
static int gather(T const &value, DST_CONTAINER &destValuesBuffer, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static T allReduce(T const &value, Reduction const op, MPI_Comm comm=MPI_COMM_GEOS)
Convenience wrapper for the MPI_Allreduce function.
static T maxValLoc(T localValueLocation, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Gather using a MPI_MAX operation on struct of value and location.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gather().
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Gatherv.
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOS)
Convenience function for MPI_Broadcast.
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOS)
Strongly typed wrapper around MPI_Reduce.
Trait to retrieve the value_type or ValueType of a type T.