GEOSX
MpiWrapper.hpp
Go to the documentation of this file.
1 /*
2  * ------------------------------------------------------------------------------------------------------------
3  * SPDX-License-Identifier: LGPL-2.1-only
4  *
5  * Copyright (c) 2018-2020 Lawrence Livermore National Security LLC
6  * Copyright (c) 2018-2020 The Board of Trustees of the Leland Stanford Junior University
7  * Copyright (c) 2018-2020 TotalEnergies
8  * Copyright (c) 2019- GEOSX Contributors
9  * All rights reserved
10  *
11  * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details.
12  * ------------------------------------------------------------------------------------------------------------
13  */
18 #ifndef GEOS_COMMON_MPIWRAPPER_HPP_
19 #define GEOS_COMMON_MPIWRAPPER_HPP_
20 
21 #include "common/DataTypes.hpp"
22 #include "common/Span.hpp"
23 
24 #if defined(GEOSX_USE_MPI)
25  #include <mpi.h>
26 #define MPI_PARAM( x ) x
27 #else
28 #define MPI_PARAM( x )
29 typedef int MPI_Comm;
30 
31 #define MPI_COMM_NULL ((MPI_Comm)0x04000000)
32 #define MPI_COMM_WORLD ((MPI_Comm)0x44000000)
33 #define MPI_COMM_SELF ((MPI_Comm)0x40000000)
34 
35 
36 typedef int MPI_Datatype;
37 #define MPI_CHAR ((MPI_Datatype)0x4c000101)
38 #define MPI_SIGNED_CHAR ((MPI_Datatype)0x4c000118)
39 #define MPI_UNSIGNED_CHAR ((MPI_Datatype)0x4c000102)
40 #define MPI_BYTE ((MPI_Datatype)0x4c00010d)
41 #define MPI_WCHAR ((MPI_Datatype)0x4c00040e)
42 #define MPI_SHORT ((MPI_Datatype)0x4c000203)
43 #define MPI_UNSIGNED_SHORT ((MPI_Datatype)0x4c000204)
44 #define MPI_INT ((MPI_Datatype)0x4c000405)
45 #define MPI_UNSIGNED ((MPI_Datatype)0x4c000406)
46 #define MPI_LONG ((MPI_Datatype)0x4c000807)
47 #define MPI_UNSIGNED_LONG ((MPI_Datatype)0x4c000808)
48 #define MPI_FLOAT ((MPI_Datatype)0x4c00040a)
49 #define MPI_DOUBLE ((MPI_Datatype)0x4c00080b)
50 #define MPI_LONG_DOUBLE ((MPI_Datatype)0x4c00100c)
51 #define MPI_LONG_LONG_INT ((MPI_Datatype)0x4c000809)
52 #define MPI_UNSIGNED_LONG_LONG ((MPI_Datatype)0x4c000819)
53 #define MPI_LONG_LONG MPI_LONG_LONG_INT
54 
55 typedef int MPI_Op;
56 
57 #define MPI_MAX (MPI_Op)(0x58000001)
58 #define MPI_MIN (MPI_Op)(0x58000002)
59 #define MPI_SUM (MPI_Op)(0x58000003)
60 #define MPI_PROD (MPI_Op)(0x58000004)
61 #define MPI_LAND (MPI_Op)(0x58000005)
62 #define MPI_BAND (MPI_Op)(0x58000006)
63 #define MPI_LOR (MPI_Op)(0x58000007)
64 #define MPI_BOR (MPI_Op)(0x58000008)
65 #define MPI_LXOR (MPI_Op)(0x58000009)
66 #define MPI_BXOR (MPI_Op)(0x5800000a)
67 #define MPI_MINLOC (MPI_Op)(0x5800000b)
68 #define MPI_MAXLOC (MPI_Op)(0x5800000c)
69 #define MPI_REPLACE (MPI_Op)(0x5800000d)
70 #define MPI_NO_OP (MPI_Op)(0x5800000e)
71 
72 #define MPI_SUCCESS 0 /* Successful return code */
73 #define MPI_UNDEFINED (-32766)
74 #define MPI_STATUS_IGNORE (MPI_Status *)1
75 #define MPI_STATUSES_IGNORE (MPI_Status *)1
76 #define MPI_REQUEST_NULL ((MPI_Request)0x2c000000)
77 typedef int MPI_Request;
78 
79 typedef int MPI_Info;
80 #define MPI_INFO_NULL (MPI_Info)(0x60000000)
81 
82 struct MPI_Status
83 {
84  int junk;
85 };
86 
87 #endif
88 
89 #if defined(NDEBUG)
90 #define MPI_CHECK_ERROR( error ) ((void) error)
91 #else
92 #define MPI_CHECK_ERROR( error ) GEOS_ERROR_IF_NE( error, MPI_SUCCESS );
93 #endif
94 
95 
96 namespace geos
97 {
98 
109 {
110 public:
111 
116  enum class Reduction
117  {
118  Max,
119  Min,
120  Sum,
121  Prod,
122  };
123 
124  MpiWrapper() = delete;
125 
139 
140  static void barrier( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOSX );
141 
142  static int cartCoords( MPI_Comm comm, int rank, int maxdims, int coords[] );
143 
144  static int cartCreate( MPI_Comm comm_old, int ndims, const int dims[], const int periods[],
145  int reorder, MPI_Comm * comm_cart );
146 
147  static int cartRank( MPI_Comm comm, const int coords[] );
148 
149  static void commFree( MPI_Comm & comm );
150 
151  static int commRank( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOSX );
152 
153  static int commSize( MPI_Comm const & MPI_PARAM( comm )=MPI_COMM_GEOSX );
154 
155  static bool commCompare( MPI_Comm const & comm1, MPI_Comm const & comm2 );
156 
157  static bool initialized();
158 
159  static int init( int * argc, char * * * argv );
160 
161  static void finalize();
162 
163  static MPI_Comm commDup( MPI_Comm const comm );
164 
165  static MPI_Comm commSplit( MPI_Comm const comm, int color, int key );
166 
167  static int test( MPI_Request * request, int * flag, MPI_Status * status );
168 
169  static int testAny( int count, MPI_Request array_of_requests[], int * idx, int * flags, MPI_Status array_of_statuses[] );
170 
171  static int testSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
172 
173  static int testAll( int count, MPI_Request array_of_requests[], int * flags, MPI_Status array_of_statuses[] );
174 
181  static int check( MPI_Request * request, int * flag, MPI_Status * status );
182 
195  static int checkAny( int count, MPI_Request array_of_requests[], int * idx, int * flag, MPI_Status array_of_statuses[] );
196 
206  static int checkAll( int count, MPI_Request array_of_requests[], int * flag, MPI_Status array_of_statuses[] );
207 
208  static int wait( MPI_Request * request, MPI_Status * status );
209 
210  static int waitAny( int count, MPI_Request array_of_requests[], int * indx, MPI_Status array_of_statuses[] );
211 
212  static int waitSome( int count, MPI_Request array_of_requests[], int * outcount, int array_of_indices[], MPI_Status array_of_statuses[] );
213 
214  static int waitAll( int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[] );
215 
216  static double wtime( void );
217 
218 
228  static int activeWaitAny( const int count,
229  MPI_Request array_of_requests[],
230  MPI_Status array_of_statuses[],
231  std::function< MPI_Request ( int ) > func );
232 
242  static int activeWaitSome( const int count,
243  MPI_Request array_of_requests[],
244  MPI_Status array_of_statuses[],
245  std::function< MPI_Request ( int ) > func );
246 
259  static int activeWaitSomeCompletePhase( const int participants,
260  std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
261 
275  static int activeWaitOrderedCompletePhase( const int participants,
276  std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request ( int ) > > > const & phases );
278 
279 #if !defined(GEOSX_USE_MPI)
280  static std::map< int, std::pair< int, void * > > & getTagToPointersMap()
281  {
282  static std::map< int, std::pair< int, void * > > tagToPointers;
283  return tagToPointers;
284  }
285 #endif
286 
291  static int nodeCommSize();
292 
304  template< typename T_SEND, typename T_RECV >
305  static int allgather( T_SEND const * sendbuf,
306  int sendcount,
307  T_RECV * recvbuf,
308  int recvcount,
309  MPI_Comm comm );
310 
323  template< typename T_SEND, typename T_RECV >
324  static int allgatherv( T_SEND const * sendbuf,
325  int sendcount,
326  T_RECV * recvbuf,
327  int * recvcounts,
328  int * displacements,
329  MPI_Comm comm );
330 
337  template< typename T >
338  static void allGather( T const myValue, array1d< T > & allValues, MPI_Comm comm = MPI_COMM_GEOSX );
339 
340  template< typename T >
341  static int allGather( arrayView1d< T const > const & sendbuf,
342  array1d< T > & recvbuf,
343  MPI_Comm comm = MPI_COMM_GEOSX );
344 
354  template< typename T >
355  static int allReduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm = MPI_COMM_GEOSX );
356 
365  template< typename T >
366  static T allReduce( T const & value, Reduction const op, MPI_Comm comm = MPI_COMM_GEOSX );
367 
376  template< typename T >
377  static void allReduce( Span< T const > src, Span< T > dst, Reduction const op, MPI_Comm comm = MPI_COMM_GEOSX );
378 
379 
389  template< typename T >
390  static int reduce( T const * sendbuf, T * recvbuf, int count, MPI_Op op, int root, MPI_Comm comm = MPI_COMM_GEOSX );
391 
400  template< typename T >
401  static T reduce( T const & value, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOSX );
402 
411  template< typename T >
412  static void reduce( Span< T const > src, Span< T > dst, Reduction const op, int root, MPI_Comm comm = MPI_COMM_GEOSX );
413 
414 
415  template< typename T >
416  static int scan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm );
417 
418  template< typename T >
419  static int exscan( T const * sendbuf, T * recvbuf, int count, MPI_Op op, MPI_Comm comm );
420 
429  template< typename T >
430  static int bcast( T * buffer, int count, int root, MPI_Comm comm );
431 
432 
439  template< typename T >
440  static void broadcast( T & value, int srcRank = 0, MPI_Comm comm = MPI_COMM_GEOSX );
441 
454  template< typename TS, typename TR >
455  static int gather( TS const * const sendbuf,
456  int sendcount,
457  TR * const recvbuf,
458  int recvcount,
459  int root,
460  MPI_Comm comm );
461 
476  template< typename TS, typename TR >
477  static int gatherv( TS const * const sendbuf,
478  int sendcount,
479  TR * const recvbuf,
480  const int * recvcounts,
481  const int * displs,
482  int root,
483  MPI_Comm comm );
484 
490  static MPI_Op getMpiOp( Reduction const op );
491 
492  template< typename T >
493  static int recv( array1d< T > & buf,
494  int MPI_PARAM( source ),
495  int tag,
496  MPI_Comm MPI_PARAM( comm ),
497  MPI_Status * MPI_PARAM( request ) );
498 
499  template< typename T >
500  static int iSend( arrayView1d< T > const & buf,
501  int MPI_PARAM( dest ),
502  int tag,
503  MPI_Comm MPI_PARAM( comm ),
504  MPI_Request * MPI_PARAM( request ) );
505 
516  template< typename T >
517  static int iRecv( T * const buf,
518  int count,
519  int source,
520  int tag,
521  MPI_Comm comm,
522  MPI_Request * request );
523 
534  template< typename T >
535  static int iSend( T const * const buf,
536  int count,
537  int dest,
538  int tag,
539  MPI_Comm comm,
540  MPI_Request * request );
541 
549  template< typename U, typename T >
550  static U prefixSum( T const value, MPI_Comm comm = MPI_COMM_GEOSX );
551 
557  template< typename T >
558  static T sum( T const & value, MPI_Comm comm = MPI_COMM_GEOSX );
559 
566  template< typename T >
567  static void sum( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOSX );
568 
574  template< typename T >
575  static T min( T const & value, MPI_Comm comm = MPI_COMM_GEOSX );
576 
583  template< typename T >
584  static void min( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOSX );
585 
591  template< typename T >
592  static T max( T const & value, MPI_Comm comm = MPI_COMM_GEOSX );
593 
600  template< typename T >
601  static void max( Span< T const > src, Span< T > dst, MPI_Comm comm = MPI_COMM_GEOSX );
602 };
603 
604 namespace internal
605 {
606 
607 template< typename T, typename ENABLE = void >
608 struct MpiTypeImpl {};
609 
610 #define ADD_MPI_TYPE_MAP( T, MPI_T ) \
611  template<> struct MpiTypeImpl< T > { static MPI_Datatype get() { return MPI_T; } }
612 
613 ADD_MPI_TYPE_MAP( float, MPI_FLOAT );
614 ADD_MPI_TYPE_MAP( double, MPI_DOUBLE );
615 
616 ADD_MPI_TYPE_MAP( char, MPI_CHAR );
617 ADD_MPI_TYPE_MAP( signed char, MPI_SIGNED_CHAR );
618 ADD_MPI_TYPE_MAP( unsigned char, MPI_UNSIGNED_CHAR );
619 
620 ADD_MPI_TYPE_MAP( int, MPI_INT );
621 ADD_MPI_TYPE_MAP( long int, MPI_LONG );
622 ADD_MPI_TYPE_MAP( long long int, MPI_LONG_LONG );
623 
624 ADD_MPI_TYPE_MAP( unsigned int, MPI_UNSIGNED );
625 ADD_MPI_TYPE_MAP( unsigned long int, MPI_UNSIGNED_LONG );
626 ADD_MPI_TYPE_MAP( unsigned long long int, MPI_UNSIGNED_LONG_LONG );
627 
628 #undef ADD_MPI_TYPE_MAP
629 
630 template< typename T >
631 struct MpiTypeImpl< T, std::enable_if_t< std::is_enum< T >::value > >
632 {
633  static MPI_Datatype get() { return MpiTypeImpl< std::underlying_type_t< T > >::get(); }
634 };
635 
636 template< typename T >
637 MPI_Datatype getMpiType()
638 {
639  return MpiTypeImpl< T >::get();
640 }
641 
642 }
643 
644 inline MPI_Op MpiWrapper::getMpiOp( Reduction const op )
645 {
646  switch( op )
647  {
648  case Reduction::Sum:
649  {
650  return MPI_SUM;
651  }
652  case Reduction::Min:
653  {
654  return MPI_MIN;
655  }
656  case Reduction::Max:
657  {
658  return MPI_MAX;
659  }
660  case Reduction::Prod:
661  {
662  return MPI_PROD;
663  }
664  default:
665  GEOS_ERROR( "Unsupported reduction operation" );
666  return MPI_NO_OP;
667  }
668 }
669 
670 template< typename T_SEND, typename T_RECV >
671 int MpiWrapper::allgather( T_SEND const * const sendbuf,
672  int sendcount,
673  T_RECV * const recvbuf,
674  int recvcount,
675  MPI_Comm MPI_PARAM( comm ) )
676 {
677 #ifdef GEOSX_USE_MPI
678  return MPI_Allgather( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
679  recvbuf, recvcount, internal::getMpiType< T_RECV >(),
680  comm );
681 #else
682  static_assert( std::is_same< T_SEND, T_RECV >::value,
683  "MpiWrapper::allgather() for serial run requires send and receive buffers are of the same type" );
684  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
685  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
686  return 0;
687 #endif
688 }
689 
690 template< typename T_SEND, typename T_RECV >
691 int MpiWrapper::allgatherv( T_SEND const * const sendbuf,
692  int sendcount,
693  T_RECV * const recvbuf,
694  int * recvcounts,
695  int * displacements,
696  MPI_Comm MPI_PARAM( comm ) )
697 {
698 #ifdef GEOSX_USE_MPI
699  return MPI_Allgatherv( sendbuf, sendcount, internal::getMpiType< T_SEND >(),
700  recvbuf, recvcounts, displacements, internal::getMpiType< T_RECV >(),
701  comm );
702 #else
703  static_assert( std::is_same< T_SEND, T_RECV >::value,
704  "MpiWrapper::allgatherv() for serial run requires send and receive buffers are of the same type" );
705  GEOS_ERROR_IF_NE_MSG( sendcount, recvcount, "sendcount is not equal to recvcount." );
706  std::copy( sendbuf, sendbuf + sendcount, recvbuf )
707  return 0;
708 #endif
709 }
710 
711 
712 template< typename T >
713 void MpiWrapper::allGather( T const myValue, array1d< T > & allValues, MPI_Comm MPI_PARAM( comm ) )
714 {
715 #ifdef GEOSX_USE_MPI
716  int const mpiSize = commSize( comm );
717  allValues.resize( mpiSize );
718 
719  MPI_Datatype const MPI_TYPE = internal::getMpiType< T >();
720 
721  MPI_Allgather( &myValue, 1, MPI_TYPE, allValues.data(), 1, MPI_TYPE, comm );
722 
723 #else
724  allValues.resize( 1 );
725  allValues[0] = myValue;
726 #endif
727 }
728 
729 template< typename T >
730 int MpiWrapper::allGather( arrayView1d< T const > const & sendValues,
731  array1d< T > & allValues,
732  MPI_Comm MPI_PARAM( comm ) )
733 {
734  int const sendSize = LvArray::integerConversion< int >( sendValues.size() );
735 #ifdef GEOSX_USE_MPI
736  int const mpiSize = commSize( comm );
737  allValues.resize( mpiSize * sendSize );
738  return MPI_Allgather( sendValues.data(),
739  sendSize,
740  internal::getMpiType< T >(),
741  allValues.data(),
742  sendSize,
743  internal::getMpiType< T >(),
744  comm );
745 
746 #else
747  allValues.resize( sendSize );
748  for( localIndex a=0; a<sendSize; ++a )
749  {
750  allValues[a] = sendValues[a];
751  }
752  return 0;
753 #endif
754 }
755 
756 template< typename T >
757 int MpiWrapper::allReduce( T const * const sendbuf,
758  T * const recvbuf,
759  int const count,
760  MPI_Op const MPI_PARAM( op ),
761  MPI_Comm const MPI_PARAM( comm ) )
762 {
763 #ifdef GEOSX_USE_MPI
764  MPI_Datatype const mpiType = internal::getMpiType< T >();
765  return MPI_Allreduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, comm );
766 #else
767  if( sendbuf != recvbuf )
768  {
769  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
770  }
771  return 0;
772 #endif
773 }
774 
775 template< typename T >
776 int MpiWrapper::reduce( T const * const sendbuf,
777  T * const recvbuf,
778  int const count,
779  MPI_Op const MPI_PARAM( op ),
780  int root,
781  MPI_Comm const MPI_PARAM( comm ) )
782 {
783 #ifdef GEOSX_USE_MPI
784  MPI_Datatype const mpiType = internal::getMpiType< T >();
785  return MPI_Reduce( sendbuf == recvbuf ? MPI_IN_PLACE : sendbuf, recvbuf, count, mpiType, op, root, comm );
786 #else
787  if( sendbuf != recvbuf )
788  {
789  memcpy( recvbuf, sendbuf, count * sizeof( T ) );
790  }
791  return 0;
792 #endif
793 }
794 
795 template< typename T >
796 int MpiWrapper::scan( T const * const sendbuf,
797  T * const recvbuf,
798  int count,
799  MPI_Op MPI_PARAM( op ),
800  MPI_Comm MPI_PARAM( comm ) )
801 {
802 #ifdef GEOSX_USE_MPI
803  return MPI_Scan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
804 #else
805  memcpy( recvbuf, sendbuf, count*sizeof(T) );
806  return 0;
807 #endif
808 }
809 
810 template< typename T >
811 int MpiWrapper::exscan( T const * const MPI_PARAM( sendbuf ),
812  T * const recvbuf,
813  int count,
814  MPI_Op MPI_PARAM( op ),
815  MPI_Comm MPI_PARAM( comm ) )
816 {
817 #ifdef GEOSX_USE_MPI
818  return MPI_Exscan( sendbuf, recvbuf, count, internal::getMpiType< T >(), op, comm );
819 #else
820  memset( recvbuf, 0, count*sizeof(T) );
821  return 0;
822 #endif
823 }
824 
825 template< typename T >
826 int MpiWrapper::bcast( T * const MPI_PARAM( buffer ),
827  int MPI_PARAM( count ),
828  int MPI_PARAM( root ),
829  MPI_Comm MPI_PARAM( comm ) )
830 {
831 #ifdef GEOSX_USE_MPI
832  return MPI_Bcast( buffer, count, internal::getMpiType< T >(), root, comm );
833 #else
834  return 0;
835 #endif
836 
837 }
838 
839 template< typename T >
840 void MpiWrapper::broadcast( T & MPI_PARAM( value ), int MPI_PARAM( srcRank ), MPI_Comm MPI_PARAM( comm ) )
841 {
842 #ifdef GEOSX_USE_MPI
843  MPI_Bcast( &value, 1, internal::getMpiType< T >(), srcRank, comm );
844 #endif
845 }
846 
847 template<>
848 inline
849 void MpiWrapper::broadcast< string >( string & MPI_PARAM( value ),
850  int MPI_PARAM( srcRank ),
851  MPI_Comm MPI_PARAM( comm ) )
852 {
853 #ifdef GEOSX_USE_MPI
854  int size = LvArray::integerConversion< int >( value.size() );
855  broadcast( size, srcRank, comm );
856  value.resize( size );
857  MPI_Bcast( const_cast< char * >( value.data() ), size, internal::getMpiType< char >(), srcRank, comm );
858 #endif
859 }
860 
861 template< typename TS, typename TR >
862 int MpiWrapper::gather( TS const * const sendbuf,
863  int sendcount,
864  TR * const recvbuf,
865  int recvcount,
866  int MPI_PARAM( root ),
867  MPI_Comm MPI_PARAM( comm ) )
868 {
869 #ifdef GEOSX_USE_MPI
870  return MPI_Gather( sendbuf, sendcount, internal::getMpiType< TS >(),
871  recvbuf, recvcount, internal::getMpiType< TR >(),
872  root, comm );
873 #else
874  static_assert( std::is_same< TS, TR >::value,
875  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
876  std::size_t const sendBufferSize = sendcount * sizeof(TS);
877  std::size_t const recvBufferSize = recvcount * sizeof(TR);
878  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
879  memcpy( recvbuf, sendbuf, sendBufferSize );
880  return 0;
881 #endif
882 }
883 
884 template< typename TS, typename TR >
885 int MpiWrapper::gatherv( TS const * const sendbuf,
886  int sendcount,
887  TR * const recvbuf,
888  const int * recvcounts,
889  const int * MPI_PARAM( displs ),
890  int MPI_PARAM( root ),
891  MPI_Comm MPI_PARAM( comm ) )
892 {
893 #ifdef GEOSX_USE_MPI
894  return MPI_Gatherv( sendbuf, sendcount, internal::getMpiType< TS >(),
895  recvbuf, recvcounts, displs, internal::getMpiType< TR >(),
896  root, comm );
897 #else
898  static_assert( std::is_same< TS, TR >::value,
899  "MpiWrapper::gather() for serial run requires send and receive buffers are of the same type" );
900  std::size_t const sendBufferSize = sendcount * sizeof(TS);
901  std::size_t const recvBufferSize = recvcounts[0] * sizeof(TR);
902  GEOS_ERROR_IF_NE_MSG( sendBufferSize, recvBufferSize, "size of send buffer and receive buffer are not equal" );
903  memcpy( recvbuf, sendbuf, sendBufferSize );
904  return 0;
905 #endif
906 }
907 
908 template< typename T >
909 int MpiWrapper::iRecv( T * const buf,
910  int count,
911  int MPI_PARAM( source ),
912  int tag,
913  MPI_Comm MPI_PARAM( comm ),
914  MPI_Request * MPI_PARAM( request ) )
915 {
916 #ifdef GEOSX_USE_MPI
917  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
918  "Attempting to use an MPI_Request that is still in use." );
919  return MPI_Irecv( buf, count, internal::getMpiType< T >(), source, tag, comm, request );
920 #else
921  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
922  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
923 
924  if( iPointer==pointerMap.end() )
925  {
926  pointerMap.insert( {tag, {1, buf} } );
927  }
928  else
929  {
930  GEOS_ERROR_IF( iPointer->second.first != 0,
931  "Tag does is assigned, but pointer was not set by iSend." );
932  memcpy( buf, iPointer->second.second, count*sizeof(T) );
933  pointerMap.erase( iPointer );
934  }
935  return 0;
936 #endif
937 }
938 
939 template< typename T >
940 int MpiWrapper::recv( array1d< T > & buf,
941  int MPI_PARAM( source ),
942  int tag,
943  MPI_Comm MPI_PARAM( comm ),
944  MPI_Status * MPI_PARAM( request ) )
945 {
946 #ifdef GEOSX_USE_MPI
947  MPI_Status status;
948  int count;
949  MPI_Probe( source, tag, comm, &status );
950  MPI_Get_count( &status, MPI_CHAR, &count );
951 
952  GEOS_ASSERT_EQ( count % sizeof( T ), 0 );
953  buf.resize( count / sizeof( T ) );
954 
955  return MPI_Recv( reinterpret_cast< char * >( buf.data() ),
956  count,
957  MPI_CHAR,
958  source,
959  tag,
960  comm,
961  request );
962 #else
963  GEOS_ERROR( "Not implemented!" );
964  return MPI_SUCCESS;
965 #endif
966 }
967 
968 template< typename T >
969 int MpiWrapper::iSend( arrayView1d< T > const & buf,
970  int MPI_PARAM( dest ),
971  int tag,
972  MPI_Comm MPI_PARAM( comm ),
973  MPI_Request * MPI_PARAM( request ) )
974 {
975 #ifdef GEOSX_USE_MPI
976  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
977  "Attempting to use an MPI_Request that is still in use." );
978  return MPI_Isend( reinterpret_cast< void const * >( buf.data() ),
979  buf.size() * sizeof( T ),
980  MPI_CHAR,
981  dest,
982  tag,
983  comm,
984  request );
985 #else
986  GEOS_ERROR( "Not implemented." );
987  return MPI_SUCCESS;
988 #endif
989 }
990 
991 template< typename T >
992 int MpiWrapper::iSend( T const * const buf,
993  int count,
994  int MPI_PARAM( dest ),
995  int tag,
996  MPI_Comm MPI_PARAM( comm ),
997  MPI_Request * MPI_PARAM( request ) )
998 {
999 #ifdef GEOSX_USE_MPI
1000  GEOS_ERROR_IF( (*request)!=MPI_REQUEST_NULL,
1001  "Attempting to use an MPI_Request that is still in use." );
1002  return MPI_Isend( buf, count, internal::getMpiType< T >(), dest, tag, comm, request );
1003 #else
1004  std::map< int, std::pair< int, void * > > & pointerMap = getTagToPointersMap();
1005  std::map< int, std::pair< int, void * > >::iterator iPointer = pointerMap.find( tag );
1006 
1007  if( iPointer==pointerMap.end() )
1008  {
1009  pointerMap.insert( {tag, {0, const_cast< T * >(buf)}
1010  } );
1011  }
1012  else
1013  {
1014  GEOS_ERROR_IF( iPointer->second.first != 1,
1015  "Tag does is assigned, but pointer was not set by iRecv." );
1016  memcpy( iPointer->second.second, buf, count*sizeof(T) );
1017  pointerMap.erase( iPointer );
1018  }
1019  return 0;
1020 #endif
1021 }
1022 
1023 template< typename U, typename T >
1024 U MpiWrapper::prefixSum( T const value, MPI_Comm comm )
1025 {
1026  U localResult;
1027 
1028 #ifdef GEOSX_USE_MPI
1029  U const convertedValue = value;
1030  int const error = MPI_Exscan( &convertedValue, &localResult, 1, internal::getMpiType< U >(), MPI_SUM, comm );
1031  MPI_CHECK_ERROR( error );
1032 #endif
1033  if( commRank() == 0 )
1034  {
1035  localResult = 0;
1036  }
1037 
1038  return localResult;
1039 }
1040 
1041 
1042 template< typename T >
1043 T MpiWrapper::allReduce( T const & value, Reduction const op, MPI_Comm const comm )
1044 {
1045  T result;
1046  allReduce( &value, &result, 1, getMpiOp( op ), comm );
1047  return result;
1048 }
1049 
1050 template< typename T >
1051 void MpiWrapper::allReduce( Span< T const > const src, Span< T > const dst, Reduction const op, MPI_Comm const comm )
1052 {
1053  GEOS_ASSERT_EQ( src.size(), dst.size() );
1054  allReduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), comm );
1055 }
1056 
1057 template< typename T >
1058 T MpiWrapper::sum( T const & value, MPI_Comm comm )
1059 {
1060  return MpiWrapper::allReduce( value, Reduction::Sum, comm );
1061 }
1062 
1063 template< typename T >
1064 void MpiWrapper::sum( Span< T const > src, Span< T > dst, MPI_Comm comm )
1065 {
1066  MpiWrapper::allReduce( src, dst, Reduction::Sum, comm );
1067 }
1068 
1069 template< typename T >
1070 T MpiWrapper::min( T const & value, MPI_Comm comm )
1071 {
1072  return MpiWrapper::allReduce( value, Reduction::Min, comm );
1073 }
1074 
1075 template< typename T >
1076 void MpiWrapper::min( Span< T const > src, Span< T > dst, MPI_Comm comm )
1077 {
1078  MpiWrapper::allReduce( src, dst, Reduction::Min, comm );
1079 }
1080 
1081 template< typename T >
1082 T MpiWrapper::max( T const & value, MPI_Comm comm )
1083 {
1084  return MpiWrapper::allReduce( value, Reduction::Max, comm );
1085 }
1086 
1087 template< typename T >
1088 void MpiWrapper::max( Span< T const > src, Span< T > dst, MPI_Comm comm )
1089 {
1090  MpiWrapper::allReduce( src, dst, Reduction::Max, comm );
1091 }
1092 
1093 
1094 template< typename T >
1095 T MpiWrapper::reduce( T const & value, Reduction const op, int root, MPI_Comm const comm )
1096 {
1097  T result;
1098  reduce( &value, &result, 1, getMpiOp( op ), root, comm );
1099  return result;
1100 }
1101 
1102 template< typename T >
1103 void MpiWrapper::reduce( Span< T const > const src, Span< T > const dst, Reduction const op, int root, MPI_Comm const comm )
1104 {
1105  GEOS_ASSERT_EQ( src.size(), dst.size() );
1106  reduce( src.data(), dst.data(), LvArray::integerConversion< int >( src.size() ), getMpiOp( op ), root, comm );
1107 }
1108 
1109 
1110 } /* namespace geos */
1111 
1112 #endif /* GEOS_COMMON_MPIWRAPPER_HPP_ */
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
Definition: Logger.hpp:122
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
Definition: Logger.hpp:107
#define GEOS_ERROR_IF_NE_MSG(lhs, rhs, msg)
Raise a hard error if two values are not equal.
Definition: Logger.hpp:208
#define GEOS_ASSERT_EQ(lhs, rhs)
Assert that two values compare equal in debug builds.
Definition: Logger.hpp:375
Lightweight non-owning wrapper over a contiguous range of elements.
Definition: Span.hpp:41
constexpr T * data() const noexcept
Definition: Span.hpp:130
constexpr size_type size() const noexcept
Definition: Span.hpp:106
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
Definition: DataTypes.hpp:220
int MPI_COMM_GEOSX
Global MPI communicator used by GEOSX.
std::size_t size_t
Unsigned size type.
Definition: DataTypes.hpp:119
GEOSX_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
Definition: DataTypes.hpp:125
Array< T, 1 > array1d
Alias for 1D array.
Definition: DataTypes.hpp:216
static int allgatherv(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int *recvcounts, int *displacements, MPI_Comm comm)
Strongly typed wrapper around MPI_Allgatherv.
static int bcast(T *buffer, int count, int root, MPI_Comm comm)
Strongly typed wrapper around MPI_Bcast.
static MPI_Op getMpiOp(Reduction const op)
Returns an MPI_Op associated with our strongly typed Reduction enum.
Definition: MpiWrapper.hpp:644
static int activeWaitSomeCompletePhase(const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int checkAll(int count, MPI_Request array_of_requests[], int *flag, MPI_Status array_of_statuses[])
static int activeWaitOrderedCompletePhase(const int participants, std::vector< std::tuple< MPI_Request *, MPI_Status *, std::function< MPI_Request(int) > > > const &phases)
static int gather(TS const *const sendbuf, int sendcount, TR *const recvbuf, int recvcount, int root, MPI_Comm comm)
Strongly typed wrapper around MPI_Gather().
static int gatherv(TS const *const sendbuf, int sendcount, TR *const recvbuf, const int *recvcounts, const int *displs, int root, MPI_Comm comm)
Strongly typed wrapper around MPI_Gatherv.
static int check(MPI_Request *request, int *flag, MPI_Status *status)
static int activeWaitAny(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static int reduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, int root, MPI_Comm comm=MPI_COMM_GEOSX)
Strongly typed wrapper around MPI_Reduce.
static void allGather(T const myValue, array1d< T > &allValues, MPI_Comm comm=MPI_COMM_GEOSX)
Convenience function for MPI_Allgather.
static int iSend(T const *const buf, int count, int dest, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Isend()
static int allgather(T_SEND const *sendbuf, int sendcount, T_RECV *recvbuf, int recvcount, MPI_Comm comm)
Strongly typed wrapper around MPI_Allgather.
static T sum(T const &value, MPI_Comm comm=MPI_COMM_GEOSX)
Convenience function for a MPI_Allreduce using a MPI_SUM operation.
static int activeWaitSome(const int count, MPI_Request array_of_requests[], MPI_Status array_of_statuses[], std::function< MPI_Request(int) > func)
static T max(T const &value, MPI_Comm comm=MPI_COMM_GEOSX)
Convenience function for a MPI_Allreduce using a MPI_MAX operation.
static T min(T const &value, MPI_Comm comm=MPI_COMM_GEOSX)
Convenience function for a MPI_Allreduce using a MPI_MIN operation.
static int checkAny(int count, MPI_Request array_of_requests[], int *idx, int *flag, MPI_Status array_of_statuses[])
static int iRecv(T *const buf, int count, int source, int tag, MPI_Comm comm, MPI_Request *request)
Strongly typed wrapper around MPI_Irecv()
static void broadcast(T &value, int srcRank=0, MPI_Comm comm=MPI_COMM_GEOSX)
Convenience function for MPI_Broadcast.
static int allReduce(T const *sendbuf, T *recvbuf, int count, MPI_Op op, MPI_Comm comm=MPI_COMM_GEOSX)
Strongly typed wrapper around MPI_Allreduce.
static U prefixSum(T const value, MPI_Comm comm=MPI_COMM_GEOSX)
Compute exclusive prefix sum and full sum.
static int nodeCommSize()
Compute the number of ranks allocated on the same node.