GEOS
LAIHelperFunctions.hpp
Go to the documentation of this file.
1 /*
2  * ------------------------------------------------------------------------------------------------------------
3  * SPDX-License-Identifier: LGPL-2.1-only
4  *
5  * Copyright (c) 2016-2024 Lawrence Livermore National Security LLC
6  * Copyright (c) 2018-2024 Total, S.A
7  * Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University
8  * Copyright (c) 2023-2024 Chevron
9  * Copyright (c) 2019- GEOS/GEOSX Contributors
10  * All rights reserved
11  *
12  * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details.
13  * ------------------------------------------------------------------------------------------------------------
14  */
15 
20 #ifndef GEOS_LINEARALGEBRA_UTILITIES_LAIHELPERFUNCTIONS_HPP_
21 #define GEOS_LINEARALGEBRA_UTILITIES_LAIHELPERFUNCTIONS_HPP_
22 
23 #include "common/DataTypes.hpp"
26 #include "mesh/MeshBody.hpp"
27 #include "mesh/NodeManager.hpp"
29 
30 namespace geos
31 {
32 namespace LAIHelperFunctions
33 {
34 
42 template< typename MATRIX >
43 void makeIdentity( localIndex const n,
44  MPI_Comm const & comm,
45  MATRIX & mat )
46 {
47  mat.createWithLocalSize( n, 1, comm );
48  mat.open();
49  for( globalIndex i = mat.ilower(); i < mat.iupper(); ++i )
50  {
51  mat.insert( i, i, 1.0 );
52  }
53  mat.close();
54 }
55 
64 template< typename MATRIX >
65 void createPermutationMatrix( NodeManager const & nodeManager,
66  int const nDofPerNode,
67  string const & dofKey,
68  MATRIX & permutationMatrix )
69 {
70  /* Crates a permutation matrix for a given nodal variable specified by the DofKey.
71  * It consider that nDofPerNode dofs are associated with each node (e.g., nDofPerNode = 3 for the displacement).
72  *
73  * The permutation matrix maps from the dofs ordering set by the DOFManager to the ordering based on the global
74  * indexes of the nodes.
75  */
76 
77  localIndex const numLocalRows = nodeManager.getNumberOfLocalIndices() * nDofPerNode;
78  permutationMatrix.createWithLocalSize( numLocalRows, numLocalRows, 1, MPI_COMM_GEOS );
79 
80  arrayView1d< globalIndex const > const & dofNumber = nodeManager.getReference< globalIndex_array >( dofKey );
81  arrayView1d< globalIndex const > const & localToGlobal = nodeManager.localToGlobalMap();
82 
83  permutationMatrix.open();
84  for( localIndex a = 0; a < nodeManager.size(); ++a )
85  {
86  if( dofNumber[a] >= 0 )
87  {
88  for( int d = 0; d < nDofPerNode; ++d )
89  {
90  globalIndex const rowIndex = localToGlobal[a] * nDofPerNode + d;
91  globalIndex const columnIndex = dofNumber[a] + d;
92  permutationMatrix.insert( rowIndex, columnIndex, 1.0 );
93  }
94  }
95  }
96  permutationMatrix.close();
97  permutationMatrix.set( 1.0 );
98 }
99 
108 template< typename MATRIX >
110  int const nDofPerCell,
111  string const & dofKey,
112  MATRIX & permutationMatrix )
113 {
114  /* Crates a permutation matrix for a given cell centered variable specified by the DofKey.
115  * It consider that nDofPerCell dofs are associated with each cell (e.g., nDofPerCell = 3 for the displacement).
116  *
117  * The permutation matrix maps from the dofs ordering set by the DOFManager to the ordering based on the global
118  * indexes of the cells.
119  */
120 
121  // Create permutation matrix
122  localIndex numLocalRows = 0;
123  elemManager.forElementSubRegions< ElementSubRegionBase >( [&]( ElementSubRegionBase const & elementSubRegion )
124  {
125  if( elementSubRegion.hasWrapper( dofKey ) )
126  {
127  numLocalRows += elementSubRegion.getNumberOfLocalIndices() * nDofPerCell;
128  }
129  } );
130  permutationMatrix.createWithLocalSize( numLocalRows, numLocalRows, 1, MPI_COMM_GEOS );
131 
132  permutationMatrix.open();
133  elemManager.forElementSubRegions< ElementSubRegionBase >( [&]( ElementSubRegionBase const & elementSubRegion )
134  {
135  localIndex const numElems = elementSubRegion.size();
136  arrayView1d< globalIndex const > const & dofNumber = elementSubRegion.getReference< array1d< globalIndex > >( dofKey );
137  arrayView1d< globalIndex const > const & localToGlobal = elementSubRegion.localToGlobalMap();
138 
139  for( localIndex k = 0; k < numElems; ++k )
140  {
141  if( dofNumber[k] >= 0 )
142  {
143  for( int d = 0; d < nDofPerCell; ++d )
144  {
145  globalIndex const rowIndex = localToGlobal[k] * nDofPerCell + d;
146  globalIndex const columnIndex = dofNumber[k] + d;
147  permutationMatrix.insert( rowIndex, columnIndex, 1.0 );
148  }
149  }
150  }
151  } );
152  permutationMatrix.close();
153  permutationMatrix.set( 1.0 );
154 }
155 
164 template< typename VECTOR, typename MATRIX >
165 VECTOR permuteVector( VECTOR const & vector,
166  MATRIX const & permutationMatrix )
167 {
168  VECTOR permutedVector;
169  permutedVector.create( vector.localSize(), permutationMatrix.comm() );
170  permutationMatrix.apply( vector, permutedVector );
171  return permutedVector;
172 }
173 
180 template< typename MATRIX >
181 MATRIX permuteMatrix( MATRIX const & matrix,
182  MATRIX const & permutationMatrix )
183 {
184  MATRIX permutedMatrix;
185  matrix.multiplyRARt( permutationMatrix, permutedMatrix );
186  return matrix;
187 }
188 
196 template< typename MATRIX >
197 MATRIX permuteMatrix( MATRIX const & matrix,
198  MATRIX const & permutationMatrixLeft,
199  MATRIX const & permutationMatrixRight )
200 {
201  MATRIX permutedMatrix;
202  matrix.multiplyRAP( permutationMatrixLeft, permutationMatrixRight, permutedMatrix );
203  return matrix;
204 }
205 
214 template< typename VECTOR >
215 void computeRigidBodyModes( MeshLevel const & mesh,
216  DofManager const & dofManager,
217  std::vector< string > const & selection,
218  array1d< VECTOR > & rigidBodyModes )
219 {
220  NodeManager const & nodeManager = mesh.getNodeManager();
221 
222  localIndex numComponents = 0;
223  array1d< localIndex > globalNodeList;
224  for( localIndex k = 0; k < LvArray::integerConversion< localIndex >( selection.size() ); ++k )
225  {
226  if( dofManager.location( selection[k] ) == FieldLocation::Node )
227  {
228  string const & dispDofKey = dofManager.getKey( selection[k] );
229  arrayView1d< globalIndex const > const & dofNumber = nodeManager.getReference< globalIndex_array >( dispDofKey );
230  localIndex const numComponentsField = dofManager.numComponents( selection[k] );
231  numComponents = numComponents > 0 ? numComponents : numComponentsField;
232  GEOS_ERROR_IF( numComponents != numComponentsField, "Rigid body modes called with different number of components." );
233  globalIndex const globalOffset = dofManager.globalOffset( selection[k] );
234  globalIndex const numLocalDofs = LvArray::integerConversion< globalIndex >( dofManager.numLocalDofs( selection[k] ) );
235  for( globalIndex i = 0; i < dofNumber.size(); ++i )
236  {
237  if( dofNumber[i] >= globalOffset && ( dofNumber[i] - globalOffset ) < numLocalDofs )
238  {
239  globalNodeList.emplace_back( LvArray::integerConversion< localIndex >( dofNumber[i] - globalOffset ) / numComponentsField );
240  }
241  }
242  }
243  }
244  localIndex const numNodes = globalNodeList.size();
245  arrayView1d< localIndex const > globalNodeListView = globalNodeList.toViewConst();
246 
248 
249  localIndex const numRidigBodyModes = numComponents * ( numComponents + 1 ) / 2;
250  rigidBodyModes.resize( numRidigBodyModes );
251  for( localIndex k = 0; k < numComponents; ++k )
252  {
253  rigidBodyModes[k].create( numNodes * numComponents, MPI_COMM_GEOS );
254  arrayView1d< real64 > const values = rigidBodyModes[k].open();
255  forAll< parallelHostPolicy >( numNodes, [=]( localIndex const i )
256  {
257  values[numComponents * i + k] = 1.0;
258  } );
259  rigidBodyModes[k].close();
260  rigidBodyModes[k].scale( 1.0 / rigidBodyModes[k].norm2() );
261  }
262  switch( numComponents )
263  {
264  case 2:
265  {
266  localIndex const k = 2;
267  rigidBodyModes[k].create( numNodes*numComponents, MPI_COMM_GEOS );
268  {
269  arrayView1d< real64 > const values = rigidBodyModes[k].open();
270  forAll< parallelHostPolicy >( numNodes, [=]( localIndex const i )
271  {
272  values[numComponents * i + 0] = -nodePosition[globalNodeListView[i]][1];
273  values[numComponents * i + 1] = +nodePosition[globalNodeListView[i]][0];
274  } );
275  rigidBodyModes[k].close();
276  }
277 
278  for( localIndex j = 0; j < k; ++j )
279  {
280  rigidBodyModes[k].axpy( -rigidBodyModes[k].dot( rigidBodyModes[j] ), rigidBodyModes[j] );
281  }
282  rigidBodyModes[k].scale( 1.0 / rigidBodyModes[k].norm2() );
283  break;
284  }
285  case 3:
286  {
287  localIndex k = 3;
288  rigidBodyModes[k].create( numNodes*numComponents, MPI_COMM_GEOS );
289  {
290  arrayView1d< real64 > const values = rigidBodyModes[k].open();
291  forAll< parallelHostPolicy >( numNodes, [=]( localIndex const i )
292  {
293  values[numComponents * i + 0] = +nodePosition[globalNodeListView[i]][1];
294  values[numComponents * i + 1] = -nodePosition[globalNodeListView[i]][0];
295  } );
296  rigidBodyModes[k].close();
297  }
298 
299  for( localIndex j = 0; j < k; ++j )
300  {
301  rigidBodyModes[k].axpy( -rigidBodyModes[k].dot( rigidBodyModes[j] ), rigidBodyModes[j] );
302  }
303  rigidBodyModes[k].scale( 1.0 / rigidBodyModes[k].norm2() );
304 
305  ++k;
306  rigidBodyModes[k].create( numNodes*numComponents, MPI_COMM_GEOS );
307  {
308  arrayView1d< real64 > const values = rigidBodyModes[k].open();
309  forAll< parallelHostPolicy >( numNodes, [=]( localIndex const i )
310  {
311  values[numComponents * i + 1] = -nodePosition[globalNodeListView[i]][2];
312  values[numComponents * i + 2] = +nodePosition[globalNodeListView[i]][1];
313  } );
314  rigidBodyModes[k].close();
315  }
316 
317  for( localIndex j = 0; j < k; ++j )
318  {
319  rigidBodyModes[k].axpy( -rigidBodyModes[k].dot( rigidBodyModes[j] ), rigidBodyModes[j] );
320  }
321  rigidBodyModes[k].scale( 1.0 / rigidBodyModes[k].norm2() );
322 
323  ++k;
324  rigidBodyModes[k].create( numNodes*numComponents, MPI_COMM_GEOS );
325  {
326  arrayView1d< real64 > const values = rigidBodyModes[k].open();
327  forAll< parallelHostPolicy >( numNodes, [=]( localIndex const i )
328  {
329  values[numComponents * i + 0] = +nodePosition[globalNodeListView[i]][2];
330  values[numComponents * i + 2] = -nodePosition[globalNodeListView[i]][0];
331  } );
332  rigidBodyModes[k].close();
333  }
334 
335  for( localIndex j = 0; j < k; ++j )
336  {
337  rigidBodyModes[k].axpy( -rigidBodyModes[k].dot( rigidBodyModes[j] ), rigidBodyModes[j] );
338  }
339  rigidBodyModes[k].scale( 1.0 / rigidBodyModes[k].norm2() );
340  break;
341  }
342  default:
343  {
344  GEOS_ERROR( "Rigid body modes computation unsupported for " << numComponents << " components." );
345  }
346  }
347 }
348 
349 } // LAIHelperFunctions namespace
350 
351 } // geos namespace
352 
353 #endif /*GEOS_LINEARALGEBRA_UTILITIES_LAIHELPERFUNCTIONS_HPP_*/
void computeRigidBodyModes(MeshLevel const &mesh, DofManager const &dofManager, std::vector< string > const &selection, array1d< VECTOR > &rigidBodyModes)
Computes rigid body modes.
MATRIX permuteMatrix(MATRIX const &matrix, MATRIX const &permutationMatrix)
Permute rows and columns of a square matrix.
void createPermutationMatrix(NodeManager const &nodeManager, int const nDofPerNode, string const &dofKey, MATRIX &permutationMatrix)
Create a permutation matrix for a given nodal variable.
void makeIdentity(localIndex const n, MPI_Comm const &comm, MATRIX &mat)
Create an identity matrix.
VECTOR permuteVector(VECTOR const &vector, MATRIX const &permutationMatrix)
Permute a vector.
#define GEOS_ERROR(msg)
Raise a hard error and terminate the program.
Definition: Logger.hpp:157
#define GEOS_ERROR_IF(EXP, msg)
Conditionally raise a hard error and terminate the program.
Definition: Logger.hpp:142
The DoFManager is responsible for allocating global dofs, constructing sparsity patterns,...
Definition: DofManager.hpp:44
globalIndex globalOffset(string const &fieldName) const
integer numComponents(string const &fieldName="") const
FieldLocation location(string const &fieldName) const
Get the support location type of the field.
localIndex numLocalDofs(string const &fieldName) const
string const & getKey(string const &fieldName) const
Return the key used to record the field in the DofManager.
The ElementRegionManager class provides an interface to ObjectManagerBase in order to manage ElementR...
void forElementSubRegions(LAMBDA &&lambda)
This function is used to launch kernel function over the element subregions of all the subregion type...
Class facilitating the representation of a multi-level discretization of a MeshBody.
Definition: MeshLevel.hpp:42
NodeManager const & getNodeManager() const
Get the node manager.
Definition: MeshLevel.hpp:155
The NodeManager class provides an interface to ObjectManagerBase in order to manage node data.
Definition: NodeManager.hpp:46
arrayView1d< globalIndex > localToGlobalMap()
Get local to global map.
localIndex getNumberOfLocalIndices() const
Get the number of locally owned objects.
GEOS_DECLTYPE_AUTO_RETURN getReference(LOOKUP_TYPE const &lookup) const
Look up a wrapper and get reference to wrapped object.
Definition: Group.hpp:1273
localIndex size() const
Get the "size" of the group, which determines the number of elements in resizable wrappers.
Definition: Group.hpp:1315
array2d< real64, nodes::REFERENCE_POSITION_PERM > & referencePosition()
Get the mutable reference position array. This table will contain all the node coordinates.
ArrayView< T, 1 > arrayView1d
Alias for 1D array view.
Definition: DataTypes.hpp:180
int MPI_COMM_GEOS
Global MPI communicator used by GEOSX.
GEOS_GLOBALINDEX_TYPE globalIndex
Global index type (for indexing objects across MPI partitions).
Definition: DataTypes.hpp:88
array1d< globalIndex > globalIndex_array
A 1-dimensional array of geos::globalIndex types.
Definition: DataTypes.hpp:401
GEOS_LOCALINDEX_TYPE localIndex
Local index type (for indexing objects within an MPI partition).
Definition: DataTypes.hpp:85
@ Node
location is node (like displacements in finite elements)
ArrayView< T, 2, USD > arrayView2d
Alias for 2D array view.
Definition: DataTypes.hpp:196
Array< T, 1 > array1d
Alias for 1D array.
Definition: DataTypes.hpp:176