Intrepid2
Intrepid2_CellGeometryDef.hpp
1 // @HEADER
2 // ************************************************************************
3 //
4 // Intrepid2 Package
5 // Copyright (2007) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Kyungjoo Kim (kyukim@sandia.gov),
38 // Mauro Perego (mperego@sandia.gov), or
39 // Nate Roberts (nvrober@sandia.gov),
40 //
41 // ************************************************************************
42 // @HEADER
43 
50 #ifndef Intrepid2_CellGeometryDef_h
51 #define Intrepid2_CellGeometryDef_h
52 
53 namespace Intrepid2
54 {
55 
56  namespace Impl
57  {
60  template<class PointScalar, int spaceDim, typename DeviceType>
62  {
63  using BasisPtr = Teuchos::RCP<Intrepid2::Basis<DeviceType,PointScalar,PointScalar> >;
65  public:
66  // conceptually, these should be private members, but for the definition of these, we need them to be externally accessible.
67  static std::map<const CellGeometryType *, shards::CellTopology> cellTopology_;
68  static std::map<const CellGeometryType *, BasisPtr> basisForNodes_;
69 
70  public:
71  static void constructorCalled(const CellGeometryType *cellGeometry, const shards::CellTopology &cellTopo, BasisPtr basisForNodes)
72  {
73  cellTopology_[cellGeometry] = cellTopo;
74  basisForNodes_[cellGeometry] = basisForNodes;
75  }
76 
77  static void destructorCalled(const CellGeometryType *cellGeometry)
78  {
79  cellTopology_.erase(cellGeometry);
80  basisForNodes_.erase(cellGeometry);
81  }
82 
83  static BasisPtr getBasis(const CellGeometryType *cellGeometry)
84  {
85  return basisForNodes_[cellGeometry];
86  }
87 
88  static const shards::CellTopology & getCellTopology(const CellGeometryType *cellGeometry)
89  {
90  return cellTopology_[cellGeometry];
91  }
92  };
93 
94  // member lookup map definitions for CellGeometryHostMembers:
95  template< class PointScalar, int spaceDim, typename DeviceType > typename std::map<const CellGeometry<PointScalar,spaceDim,DeviceType> *, shards::CellTopology> CellGeometryHostMembers< PointScalar,spaceDim,DeviceType>::cellTopology_;
96 
97  template< class PointScalar, int spaceDim, typename DeviceType > typename std::map<const CellGeometry<PointScalar,spaceDim,DeviceType> *, Teuchos::RCP<Intrepid2::Basis<DeviceType,PointScalar,PointScalar> >> CellGeometryHostMembers< PointScalar,spaceDim,DeviceType>::basisForNodes_;
98 
101  template<class PointScalar, int spaceDim, typename DeviceType>
103  {
104  Kokkos::View<PointScalar**, DeviceType> cellMeasures_; // (C,P)
105  Kokkos::View<PointScalar**, DeviceType> detData_; // (C,P)
106  TensorData<PointScalar,DeviceType> cubatureWeights_; // (P)
107  public:
108  CellMeasureFunctor(Kokkos::View<PointScalar**, DeviceType> cellMeasures,
109  Kokkos::View<PointScalar**, DeviceType> detData, TensorData<PointScalar,DeviceType> cubatureWeights)
110  :
111  cellMeasures_(cellMeasures),
112  detData_(detData),
113  cubatureWeights_(cubatureWeights)
114  {}
115 
116  KOKKOS_INLINE_FUNCTION void
117  operator () (const ordinal_type cellOrdinal, const ordinal_type pointOrdinal) const
118  {
119  cellMeasures_(cellOrdinal,pointOrdinal) = detData_(cellOrdinal,pointOrdinal) * cubatureWeights_(pointOrdinal);
120  }
121  };
122  }
123 
124  template<class PointScalar, int spaceDim, typename DeviceType>
125  KOKKOS_INLINE_FUNCTION
127 :
128  nodeOrdering_(cellGeometry.nodeOrdering_),
129  cellGeometryType_(cellGeometry.cellGeometryType_),
130  subdivisionStrategy_(cellGeometry.subdivisionStrategy_),
131  affine_(cellGeometry.affine_),
132  orientations_(cellGeometry.orientations_),
133  origin_(cellGeometry.origin_),
134  domainExtents_(cellGeometry.domainExtents_),
135  gridCellCounts_(cellGeometry.gridCellCounts_),
136  tensorVertices_(cellGeometry.tensorVertices_),
137  cellToNodes_(cellGeometry.cellToNodes_),
138  nodes_(cellGeometry.nodes_),
139  numCells_(cellGeometry.numCells_),
140  numNodesPerCell_(cellGeometry.numNodesPerCell_)
141  {
142  // host-only registration with HostMemberLookup:
143 #ifndef INTREPID2_COMPILE_DEVICE_CODE
144  shards::CellTopology cellTopo = cellGeometry.cellTopology();
145  BasisPtr basisForNodes = cellGeometry.basisForNodes();
147  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
148 #endif
149  }
150 
151  template<class PointScalar, int spaceDim, typename DeviceType>
152  KOKKOS_INLINE_FUNCTION
154  {
155  // host-only deregistration with HostMemberLookup:
156 #ifndef INTREPID2_COMPILE_DEVICE_CODE
158  HostMemberLookup::destructorCalled(this);
159 #endif
160  }
161 
162  template<class PointScalar, int spaceDim, typename DeviceType>
163  KOKKOS_INLINE_FUNCTION
165  {
166  switch (subdivisionStrategy) {
167  case NO_SUBDIVISION:
168  return 1;
169  case TWO_TRIANGLES_LEFT:
170  case TWO_TRIANGLES_RIGHT:
171  return 2;
172  case FOUR_TRIANGLES:
173  return 4;
174  case FIVE_TETRAHEDRA:
175  return 5;
176  case SIX_PYRAMIDS:
177  case SIX_TETRAHEDRA:
178  return 6;
179  }
180  return -1;
181  }
182 
183  template<class PointScalar, int spaceDim, typename DeviceType>
185  CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianDataPrivate(const ScalarView<PointScalar,DeviceType> &pointComponentView, const int &pointsPerCell, const int startCell, const int endCell) const
186  {
187  ScalarView<PointScalar,DeviceType> data;
188  const int rank = 4; // C,P,D,D
189  const int CELL_DIM = 0;
190  const int POINT_DIM = 1;
191  const int D1_DIM = 2;
192  const int D2_DIM = 3;
193 
194  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
195 
196  Kokkos::Array<int,7> extents { numCellsWorkset, pointsPerCell, spaceDim, spaceDim, 1, 1, 1 };
197  Kokkos::Array<DataVariationType,7> variationType { CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT };
198 
199  int blockPlusDiagonalLastNonDiagonal = -1;
200 
201  if (cellGeometryType_ == UNIFORM_GRID)
202  {
203  if (uniformJacobianModulus() != 1)
204  {
205  variationType[CELL_DIM] = MODULAR;
206  variationType[POINT_DIM] = CONSTANT;
207  variationType[D1_DIM] = GENERAL;
208  variationType[D2_DIM] = GENERAL;
209 
210  int cellTypeModulus = uniformJacobianModulus();
211 
212  data = getMatchingViewWithLabel(pointComponentView, "CellGeometryProvider: Jacobian data", cellTypeModulus, spaceDim, spaceDim);
213  }
214  else
215  {
216  // diagonal Jacobian
217  variationType[D1_DIM] = BLOCK_PLUS_DIAGONAL;
218  variationType[D2_DIM] = BLOCK_PLUS_DIAGONAL;
219  blockPlusDiagonalLastNonDiagonal = -1;
220 
221  data = getMatchingViewWithLabel(pointComponentView, "CellGeometryProvider: Jacobian data", spaceDim);
222  }
223  }
224  else if (cellGeometryType_ == TENSOR_GRID)
225  {
226  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "tensor grid support not yet implemented");
227  }
228  else if (cellGeometryType_ == FIRST_ORDER)
229  {
230  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
231  if (simplex)
232  {
233  variationType[CELL_DIM] = GENERAL;
234  variationType[POINT_DIM] = CONSTANT; // affine: no point variation
235  variationType[D1_DIM] = GENERAL;
236  variationType[D2_DIM] = GENERAL;
237 
238  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCells_, spaceDim, spaceDim);
239  }
240  else
241  {
242  variationType[CELL_DIM] = GENERAL;
243  variationType[D1_DIM] = GENERAL;
244  variationType[D2_DIM] = GENERAL;
245  if (affine_)
246  {
247  // no point variation
248  variationType[POINT_DIM] = CONSTANT;
249  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, spaceDim, spaceDim);
250  }
251  else
252  {
253  variationType[POINT_DIM] = GENERAL;
254  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, pointsPerCell, spaceDim, spaceDim);
255  }
256  }
257  }
258  else if (cellGeometryType_ == HIGHER_ORDER)
259  {
260  // most general case: varies in all 4 dimensions
261  variationType[CELL_DIM] = GENERAL;
262  variationType[POINT_DIM] = GENERAL;
263  variationType[D1_DIM] = GENERAL;
264  variationType[D2_DIM] = GENERAL;
265  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, pointsPerCell, spaceDim, spaceDim);
266  }
267  else
268  {
269  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
270  }
271 
272  Data<PointScalar,DeviceType> jacobianData(data,rank,extents,variationType,blockPlusDiagonalLastNonDiagonal);
273  return jacobianData;
274  }
275 
276  template<class PointScalar, int spaceDim, typename DeviceType>
278  const int &pointsPerCell, const Data<PointScalar,DeviceType> &refData,
279  const int startCell, const int endCell) const
280  {
281  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
282 
283  if (cellGeometryType_ == UNIFORM_GRID)
284  {
285  if (uniformJacobianModulus() != 1)
286  {
287  int cellTypeModulus = uniformJacobianModulus();
288 
289  auto dataView3 = jacobianData.getUnderlyingView3(); // (cellTypeModulus, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
290  auto dataHost = Kokkos::create_mirror_view(dataView3);
291 
292  const int startCellType = startCell % cellTypeModulus;
293  const int endCellType = (numCellsWorkset >= cellTypeModulus) ? startCellType + cellTypeModulus : startCellType + numCellsWorkset;
294  const int gridCellOrdinal = 0; // sample cell
295  for (int cellType=startCellType; cellType<endCellType; cellType++)
296  {
297  const int subdivisionOrdinal = cellType % cellTypeModulus;
298  const int nodeZero = 0;
299  // simplex Jacobian formula is J_00 = x1 - x0, J_01 = x2 - x0, etc.
300  for (int i=0; i<spaceDim; i++)
301  {
302  for (int j=0; j<spaceDim; j++)
303  {
304  const int node = j+1; // this is the only node other than the 0 node that has non-zero derivative in the j direction -- and this has unit derivative
305  // nodeZero has derivative -1 in every dimension.
306  const auto J_ij = subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, node, i) - subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, nodeZero, i);
307  dataHost(cellType,i,j) = J_ij;
308  }
309  }
310  }
311 
312  Kokkos::deep_copy(dataView3,dataHost);
313  }
314  else
315  {
316  // diagonal Jacobian
317  auto dataView1 = jacobianData.getUnderlyingView1(); // (spaceDim) allocated in allocateJacobianDataPrivate()
318  const auto domainExtents = domainExtents_;
319  const auto gridCellCounts = gridCellCounts_;
320 
321  using ExecutionSpace = typename DeviceType::execution_space;
322  auto policy = Kokkos::RangePolicy<>(ExecutionSpace(),0,spaceDim);
323  Kokkos::parallel_for("fill jacobian", policy, KOKKOS_LAMBDA(const int d1)
324  {
325  // diagonal jacobian
326  const double REF_SPACE_EXTENT = 2.0;
327  dataView1(d1) = (domainExtents[d1] / REF_SPACE_EXTENT) / gridCellCounts[d1];
328  });
329  ExecutionSpace().fence();
330  }
331  }
332  else if (cellGeometryType_ == TENSOR_GRID)
333  {
334  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "tensor grid support not yet implemented");
335  }
336  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
337  {
338  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
339  if (simplex)
340  {
341  auto dataView3 = jacobianData.getUnderlyingView3(); // (numCells_, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
342 
343  // get local (shallow) copies to avoid implicit references to this
344  auto cellToNodes = cellToNodes_;
345  auto nodes = nodes_;
346 
347  using ExecutionSpace = typename DeviceType::execution_space;
348  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({startCell,0,0},{numCellsWorkset,spaceDim,spaceDim});
349 
350  Kokkos::parallel_for("compute first-order simplex Jacobians", policy,
351  KOKKOS_LAMBDA (const int &cellOrdinal, const int &d1, const int &d2) {
352  const int nodeZero = 0; // nodeZero has derivative -1 in every dimension.
353  const int node = d2+1; // this is the only node other than the 0 node that has non-zero derivative in the d2 direction -- and this has unit derivative (except in 1D, where each derivative is ±0.5)
354  const auto & nodeCoord = nodes(cellToNodes(cellOrdinal,node), d1);
355  const auto & nodeZeroCoord = nodes(cellToNodes(cellOrdinal,nodeZero), d1);
356  const PointScalar J_ij = nodeCoord - nodeZeroCoord;
357  dataView3(cellOrdinal,d1,d2) = (spaceDim != 1) ? J_ij : J_ij * 0.5;
358  });
359  }
360  else
361  {
363  auto basisForNodes = this->basisForNodes();
364 
365  if (affine_)
366  {
367  // no point variation
368  auto dataView3 = jacobianData.getUnderlyingView3(); // (numCellsWorkset, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
369 
370  // TODO: find an allocation-free way to do this… (consider modifying CellTools::setJacobian() to support affine case.)
371  const int onePoint = 1;
372  auto testPointView = getMatchingViewWithLabel(dataView3, "CellGeometryProvider: test point", onePoint, spaceDim);
373  auto tempData = getMatchingViewWithLabel(dataView3, "CellGeometryProvider: temporary Jacobian data", numCellsWorkset, onePoint, spaceDim, spaceDim);
374 
375  Kokkos::deep_copy(testPointView, 0.0);
376 
377  CellTools::setJacobian(tempData, testPointView, *this, basisForNodes, startCell, endCell);
378 
379  auto tempDataSubview = Kokkos::subview(tempData, Kokkos::ALL(), 0, Kokkos::ALL(), Kokkos::ALL());
380  Kokkos::deep_copy(dataView3, tempDataSubview);
381  }
382  else
383  {
384  auto dataView = jacobianData.getUnderlyingView(); // (numCellsWorkset, pointsPerCell, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
385  TEUCHOS_TEST_FOR_EXCEPTION(basisForNodes == Teuchos::null, std::invalid_argument, "basisForNodes must not be null");
386  TEUCHOS_TEST_FOR_EXCEPTION(dataView.size() == 0, std::invalid_argument, "underlying view is not valid");
387 
388  // refData should contain the basis gradients; shape is (F,P,D) or (C,F,P,D)
389  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!refData.isValid(), std::invalid_argument, "refData should be a valid container for cases with non-affine geometry");
390  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((refData.rank() != 3) && (refData.rank() != 4), std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
391  if (refData.rank() == 3)
392  {
393  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(0) != basisForNodes->getCardinality(), std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
394  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(1) != pointsPerCell, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
395  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(2) != spaceDim, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
396  }
397  else
398  {
399  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(0) != numCellsWorkset, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
400  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(1) != basisForNodes->getCardinality(), std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
401  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(2) != pointsPerCell, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
402  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(3) != spaceDim, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
403  }
404 
405  CellTools::setJacobian(dataView, *this, refData, startCell, endCell);
406  }
407  }
408  }
409  else
410  {
411  // TODO: handle the other cases
412  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
413  }
414  }
415 
416  // Uniform grid constructor, with optional subdivision into simplices
417  template<class PointScalar, int spaceDim, typename DeviceType>
418  CellGeometry<PointScalar,spaceDim,DeviceType>::CellGeometry(const Kokkos::Array<PointScalar,spaceDim> &origin,
419  const Kokkos::Array<PointScalar,spaceDim> &domainExtents,
420  const Kokkos::Array<int,spaceDim> &gridCellCounts,
421  SubdivisionStrategy subdivisionStrategy,
422  HypercubeNodeOrdering nodeOrdering)
423  :
424  nodeOrdering_(nodeOrdering),
425  cellGeometryType_(UNIFORM_GRID),
426  subdivisionStrategy_(subdivisionStrategy),
427  affine_(true),
428  origin_(origin),
429  domainExtents_(domainExtents),
430  gridCellCounts_(gridCellCounts)
431  {
432  numCells_ = 1;
433  for (int d=0; d<spaceDim; d++)
434  {
435  numCells_ *= gridCellCounts_[d];
436  }
437  numCells_ *= numCellsPerGridCell(subdivisionStrategy_);
438 
439  shards::CellTopology cellTopo; // will register with HostMemberLookup below
440  if (subdivisionStrategy_ == NO_SUBDIVISION)
441  {
442  // hypercube
443  numNodesPerCell_ = 1 << spaceDim; // 2^D vertices in a D-dimensional hypercube
444 
445  if (spaceDim == 1)
446  {
447  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Line<> >());
448  }
449  else if (spaceDim == 2)
450  {
451  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Quadrilateral<> >());
452  }
453  else if (spaceDim == 3)
454  {
455  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Hexahedron<> >());
456  }
457  else
458  {
459  // TODO: Once shards supports higher-dimensional hypercubes, initialize cellTopo accordingly
460  }
461  }
462  else
463  {
464  // simplex
465  numNodesPerCell_ = spaceDim + 1; // D+1 vertices in a D-dimensional simplex
466  if (spaceDim == 2)
467  {
468  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Triangle<> >());
469  }
470  else if (spaceDim == 3)
471  {
472  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Tetrahedron<> >());
473  }
474  else
475  {
476  // TODO: Once shards supports higher-dimensional simplices, initialize cellTopo_ accordingly
477  }
478  }
479 
481  const int linearPolyOrder = 1;
482  BasisPtr basisForNodes = getBasis<BasisFamily>(cellTopo, FUNCTION_SPACE_HGRAD, linearPolyOrder);
483 
484  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
485  {
486  // override basisForNodes for quad, hexahedron. Apparently the lowest-order bases below are *not* in the same order as their
487  // arbitrary-polynomial-order counterparts; the latter do not match the order of the shards::CellTopology nodes.
488  if (cellTopo.getKey() == shards::Quadrilateral<>::key)
489  {
490  basisForNodes = Teuchos::rcp( new Basis_HGRAD_QUAD_C1_FEM<DeviceType,PointScalar,PointScalar>() );
491  }
492  else if (cellTopo.getKey() == shards::Hexahedron<>::key)
493  {
494  basisForNodes = Teuchos::rcp( new Basis_HGRAD_HEX_C1_FEM<DeviceType,PointScalar,PointScalar>() );
495  }
496  }
497 
499  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
500  }
501 
502  // Node-based constructor for straight-edged cell geometry.
503  // If claimAffine is true, we assume (without checking) that the mapping from reference space is affine.
504  // (If claimAffine is false, we check whether the topology is simplicial; if so, we conclude that the mapping must be affine.)
505  template<class PointScalar, int spaceDim, typename DeviceType>
507  ScalarView<int,DeviceType> cellToNodes,
508  ScalarView<PointScalar,DeviceType> nodes,
509  const bool claimAffine,
510  const HypercubeNodeOrdering nodeOrdering)
511  :
512  nodeOrdering_(nodeOrdering),
513  cellGeometryType_(FIRST_ORDER),
514  cellToNodes_(cellToNodes),
515  nodes_(nodes)
516  {
517  if(cellToNodes.is_allocated())
518  {
519  numCells_ = cellToNodes.extent_int(0);
520  numNodesPerCell_ = cellToNodes.extent_int(1);
521  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(numNodesPerCell_ != cellTopo.getNodeCount(), std::invalid_argument, "cellToNodes.extent(1) does not match the cell topology node count");
522  }
523  else
524  {
525  numCells_ = nodes.extent_int(0);
526  numNodesPerCell_ = nodes.extent_int(1);
527  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(numNodesPerCell_ != cellTopo.getNodeCount(), std::invalid_argument, "nodes.extent(1) does not match the cell topology node count");
528  }
529 
530 
531  if (!claimAffine)
532  {
533  // if cellTopo is simplicial, then since the geometry is straight-edged, it is also affine
534  const bool simplicialTopo = (cellTopo.getNodeCount() == cellTopo.getDimension() + 1);
535  affine_ = simplicialTopo;
536  }
537  else
538  {
539  affine_ = true;
540  }
541 
543  const int linearPolyOrder = 1;
544  BasisPtr basisForNodes = getBasis<BasisFamily>(cellTopo, FUNCTION_SPACE_HGRAD, linearPolyOrder);
545 
546  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
547  {
548  // override basisForNodes for quad, hexahedron. Apparently the lowest-order bases below are *not* in the same order as their
549  // arbitrary-polynomial-order counterparts; the latter do not match the order of the shards::CellTopology nodes.
550  if (cellTopo.getKey() == shards::Quadrilateral<>::key)
551  {
552  basisForNodes = Teuchos::rcp( new Basis_HGRAD_QUAD_C1_FEM<DeviceType,PointScalar,PointScalar>() );
553  }
554  else if (cellTopo.getKey() == shards::Hexahedron<>::key)
555  {
556  basisForNodes = Teuchos::rcp( new Basis_HGRAD_HEX_C1_FEM<DeviceType,PointScalar,PointScalar>() );
557  }
558  }
559 
561  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
562  }
563 
564  // Constructor for higher-order geometry
565  template<class PointScalar, int spaceDim, typename DeviceType>
567  ScalarView<PointScalar,DeviceType> cellNodes)
568  :
569  nodeOrdering_(HYPERCUBE_NODE_ORDER_TENSOR),
570  cellGeometryType_(HIGHER_ORDER),
571  nodes_(cellNodes)
572  {
573  numCells_ = cellNodes.extent_int(0);
574  numNodesPerCell_ = cellNodes.extent_int(1);
575 
576  // if basis degree is 1, mark as first-order geometry
577  const bool firstOrderGeometry = (basisForNodes->getDegree() == 1);
578  cellGeometryType_ = firstOrderGeometry ? FIRST_ORDER : HIGHER_ORDER;
579 
580  shards::CellTopology cellTopo = basisForNodes->getBaseCellTopology();
581 
582  if (firstOrderGeometry && (cellTopo.getNodeCount() == spaceDim + 1)) // lowest-order and simplicial
583  {
584  affine_ = true;
585  }
586  else
587  {
588  affine_ = false;
589  }
591  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
592  }
593 
594  template<class PointScalar, int spaceDim, typename DeviceType>
595  KOKKOS_INLINE_FUNCTION
597  {
598  return affine_;
599  }
600 
601  template<class PointScalar, int spaceDim, typename DeviceType>
604  const TensorData<PointScalar,DeviceType> & cubatureWeights ) const
605  {
606  // Output possibilities for a cubatureWeights with N components:
607  // 1. For AFFINE elements (jacobianDet cell-wise constant), returns a container with N+1 tensorial components; the first component corresponds to cells
608  // 2. Otherwise, returns a container with 1 tensorial component
609 
610  INTREPID2_TEST_FOR_EXCEPTION(cubatureWeights.rank() != 1, std::invalid_argument, "cubatureWeights container must have shape (P)");
611 
612  const int numTensorComponents = affine_ ? cubatureWeights.numTensorComponents() + 1 : 1;
613  std::vector< Data<PointScalar,DeviceType> > tensorComponents(numTensorComponents);
614 
615  if (affine_)
616  {
617  const int cellExtent = jacobianDet.extent_int(0);
618  Kokkos::Array<DataVariationType,7> cellVariationTypes {jacobianDet.getVariationTypes()[0], CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
619  const int cellDataDim = jacobianDet.getDataExtent(0);
620  Kokkos::Array<int,7> cellExtents{cellExtent,1,1,1,1,1,1};
621 
622  ScalarView<PointScalar,DeviceType> detDataView ("cell relative volumes", cellDataDim);
623  tensorComponents[0] = Data<PointScalar,DeviceType>(detDataView,1,cellExtents,cellVariationTypes);
624 
625  for (int cubTensorComponent=0; cubTensorComponent<numTensorComponents-1; cubTensorComponent++)
626  {
627  auto cubatureComponent = cubatureWeights.getTensorComponent(cubTensorComponent);
628  const auto cubatureExtents = cubatureComponent.getExtents();
629  const auto cubatureVariationTypes = cubatureComponent.getVariationTypes();
630  const int numPoints = cubatureComponent.getDataExtent(0);
631  ScalarView<PointScalar,DeviceType> cubatureWeightView ("cubature component weights", numPoints);
632  const int pointComponentRank = 1;
633  tensorComponents[cubTensorComponent+1] = Data<PointScalar,DeviceType>(cubatureWeightView,pointComponentRank,cubatureExtents,cubatureVariationTypes);
634  }
635  }
636  else
637  {
638  const int cellExtent = jacobianDet.extent_int(0);
639  Kokkos::Array<DataVariationType,7> variationTypes {jacobianDet.getVariationTypes()[0], GENERAL, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
640  const int cellDataDim = jacobianDet.getDataExtent(0);
641 
642  const int numPoints = cubatureWeights.extent_int(0);
643  Kokkos::Array<int,7> extents{cellExtent,numPoints,1,1,1,1,1};
644 
645  ScalarView<PointScalar,DeviceType> cubatureWeightView;
646  if (variationTypes[0] != CONSTANT)
647  {
648  cubatureWeightView = ScalarView<PointScalar,DeviceType>("cell measure", cellDataDim, numPoints);
649  }
650  else
651  {
652  cubatureWeightView = ScalarView<PointScalar,DeviceType>("cell measure", numPoints);
653  }
654  const int cellMeasureRank = 2;
655  tensorComponents[0] = Data<PointScalar,DeviceType>(cubatureWeightView,cellMeasureRank,extents,variationTypes);
656  }
657  const bool separateFirstComponent = (numTensorComponents > 1);
658  return TensorData<PointScalar,DeviceType>(tensorComponents, separateFirstComponent);
659  }
660 
661  template<class PointScalar, int spaceDim, typename DeviceType>
663  const Data<PointScalar,DeviceType> & jacobianDet,
664  const TensorData<PointScalar,DeviceType> & cubatureWeights ) const
665  {
666  // Output possibilities for a cubatureWeights with N components:
667  // 1. For AFFINE elements (jacobianDet constant on each cell), returns a container with N+1 tensorial components; the first component corresponds to cells
668  // 2. Otherwise, returns a container with 1 tensorial component
669 
670  INTREPID2_TEST_FOR_EXCEPTION((cellMeasure.numTensorComponents() != cubatureWeights.numTensorComponents() + 1) && (cellMeasure.numTensorComponents() != 1), std::invalid_argument,
671  "cellMeasure must either have a tensor component count of 1 or a tensor component count that is one higher than that of cubatureWeights");
672 
673  INTREPID2_TEST_FOR_EXCEPTION(cubatureWeights.rank() != 1, std::invalid_argument, "cubatureWeights container must have shape (P)");
674 
675  if (cellMeasure.numTensorComponents() == cubatureWeights.numTensorComponents() + 1)
676  {
677  // affine case; the first component should contain the cell volume divided by ref cell volume; this should be stored in jacobianDet
678  Kokkos::deep_copy(cellMeasure.getTensorComponent(0).getUnderlyingView1(), jacobianDet.getUnderlyingView1()); // copy point-invariant data from jacobianDet to the first tensor component of cell measure container
679  const int numTensorDimensions = cubatureWeights.numTensorComponents();
680  for (int i=1; i<numTensorDimensions+1; i++)
681  {
682  Kokkos::deep_copy(cellMeasure.getTensorComponent(i).getUnderlyingView1(), cubatureWeights.getTensorComponent(i-1).getUnderlyingView1());
683  }
684  }
685  else
686  {
687  auto detVaries = jacobianDet.getVariationTypes();
688 
689  const bool detCellVaries = detVaries[0] != CONSTANT;
690  const bool detPointVaries = detVaries[1] != CONSTANT;
691 
692  if (detCellVaries && detPointVaries)
693  {
694  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView2();
695  auto detData = jacobianDet.getUnderlyingView2();
696  const int numCells = detData.extent_int(0);
697  const int numPoints = detData.extent_int(1);
698  INTREPID2_TEST_FOR_EXCEPTION(numCells != cellMeasureData.extent_int(0), std::invalid_argument, "cellMeasureData doesn't match jacobianDet in cell dimension");
699  INTREPID2_TEST_FOR_EXCEPTION(numPoints != cellMeasureData.extent_int(1), std::invalid_argument, "cellMeasureData doesn't match jacobianDet in point dimension");
700 
701  // We implement this case as a functor (rather than a lambda) to work around an apparent CUDA 10.1.243 compiler bug
702  Impl::CellMeasureFunctor<PointScalar,spaceDim,DeviceType> cellMeasureFunctor(cellMeasureData, detData, cubatureWeights);
703 
704  using ExecutionSpace = typename DeviceType::execution_space;
705  Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<2>> rangePolicy({0,0},{numCells,numPoints});
706  Kokkos::parallel_for(rangePolicy, cellMeasureFunctor);
707  }
708  else if (detCellVaries && !detPointVaries)
709  {
710  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView2();
711  auto detData = jacobianDet.getUnderlyingView1();
712  using ExecutionSpace = typename DeviceType::execution_space;
713  Kokkos::parallel_for(
714  Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<2>>({0,0},{detData.extent_int(0),cubatureWeights.extent_int(0)}),
715  KOKKOS_LAMBDA (int cellOrdinal, int pointOrdinal) {
716  cellMeasureData(cellOrdinal,pointOrdinal) = detData(cellOrdinal) * cubatureWeights(pointOrdinal);
717  });
718  }
719  else
720  {
721  // constant jacobian det case
722  // cell measure data has shape (P)
723  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView1();
724  auto detData = jacobianDet.getUnderlyingView1();
725  using ExecutionSpace = typename DeviceType::execution_space;
726  Kokkos::parallel_for(Kokkos::RangePolicy<ExecutionSpace>(0,cellMeasureData.extent_int(0)),
727  KOKKOS_LAMBDA (const int &pointOrdinal) {
728  cellMeasureData(pointOrdinal) = detData(0) * cubatureWeights(pointOrdinal);
729  });
730  }
731  }
732  }
733 
734  template<class PointScalar, int spaceDim, typename DeviceType>
735  typename CellGeometry<PointScalar,spaceDim,DeviceType>::BasisPtr
737  {
739  return HostMemberLookup::getBasis(this);
740  }
741 
742  template<class PointScalar, int spaceDim, typename DeviceType>
744  {
746  return HostMemberLookup::getCellTopology(this);
747  }
748 
749  template<class PointScalar, int spaceDim, typename DeviceType>
750  KOKKOS_INLINE_FUNCTION
751  ordinal_type CellGeometry<PointScalar,spaceDim,DeviceType>::cellToNode(ordinal_type cell, ordinal_type node) const
752  {
753  if (cellToNodes_.is_allocated())
754  {
755  return cellToNodes_(cell,node);
756  }
757  else if (cellGeometryType_ == UNIFORM_GRID)
758  {
759  const ordinal_type numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
760  const ordinal_type gridCell = cell / numSubdivisions;
761  const ordinal_type subdivisionOrdinal = cell % numSubdivisions;
762 
763  Kokkos::Array<ordinal_type,spaceDim> gridCellCoords;
764  ordinal_type gridCellDivided = gridCell;
765  ordinal_type numGridNodes = 1;
766  ordinal_type numGridCells = 1;
767  for (int d=0; d<spaceDim; d++)
768  {
769  gridCellCoords[d] = gridCellDivided % gridCellCounts_[d];
770  gridCellDivided = gridCellDivided / gridCellCounts_[d];
771  numGridCells *= gridCellCounts_[d];
772  numGridNodes *= (gridCellCounts_[d] + 1);
773  }
774 
775  const ordinal_type gridCellNode = gridCellNodeForSubdivisionNode(gridCell, subdivisionOrdinal, node);
776 
777  // most subdivision strategies don't add internal node(s), but a couple do. If so, the gridCellNode
778  // will be equal to the number of vertices in the grid cell.
779  const ordinal_type numInteriorNodes = ((subdivisionStrategy_ == FOUR_TRIANGLES) || (subdivisionStrategy_ == SIX_PYRAMIDS)) ? 1 : 0;
780 
781  // the global nodes list the grid-cell-interior nodes first.
782  const ordinal_type gridNodeNumberingOffset = numInteriorNodes * numGridCells;
783 
784  const ordinal_type numNodesPerGridCell = 1 << spaceDim;
785  if (gridCellNode >= numNodesPerGridCell)
786  {
787  const ordinal_type interiorGridNodeOffset = gridCellNode - numNodesPerGridCell;
788  return numNodesPerGridCell * gridCell + interiorGridNodeOffset;
789  }
790  else
791  {
792  // use gridCellCoords, plus hypercubeComponentNodeNumber(gridCellNode, d), to set up a Cartesian vertex numbering of the grid as a whole. Then, add gridNodeNumberingOffset to account for interior nodes.
793  // let x be the fastest-moving index
794  ordinal_type d_index_stride = 1; // number of node indices we move by moving 1 node in dimension d
795  ordinal_type cartesianNodeNumber = 0;
796  for (int d=0; d<spaceDim; d++)
797  {
798  const ordinal_type d_index = gridCellCoords[d] + hypercubeComponentNodeNumber(gridCellNode,d);
799  cartesianNodeNumber += d_index * d_index_stride;
800  d_index_stride *= (gridCellCounts_[d] + 1);
801  }
802  return cartesianNodeNumber + gridNodeNumberingOffset;
803  }
804  }
805  else
806  {
807  // cellToNode() not supported
808  return -1;
809  }
810  }
811 
812  template<class PointScalar, int spaceDim, typename DeviceType>
813  KOKKOS_INLINE_FUNCTION
815  {
816  if (cellGeometryType_ == UNIFORM_GRID)
817  {
818  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
819  if (numSubdivisions == 1)
820  {
821  return CONSTANT;
822  }
823  else
824  {
825  return MODULAR;
826  }
827  }
828  else return GENERAL;
829  }
830 
831  template<class PointScalar, int spaceDim, typename DeviceType>
833  {
834  Data<PointScalar,DeviceType> emptyRefData;
835  if (cellGeometryType_ == UNIFORM_GRID)
836  {
837  // no need for basis computations
838  return emptyRefData;
839  }
840  else if (cellGeometryType_ == TENSOR_GRID)
841  {
842  // no need for basis values
843  return emptyRefData;
844  }
845  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
846  {
847  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
848  if (simplex)
849  {
850  // no need for precomputed basis values
851  return emptyRefData;
852  }
853  else
854  {
855  auto basisForNodes = this->basisForNodes();
856 
857  if (affine_)
858  {
859  // no need for precomputed basis values
860  return emptyRefData;
861  }
862  else
863  {
864  // 2 use cases: (P,D) and (C,P,D).
865  // if (P,D), call the TensorPoints variant
866  if (points.rank() == 2)
867  {
868  TensorPoints<PointScalar,DeviceType> tensorPoints(points);
869  return getJacobianRefData(tensorPoints);
870  }
871  else
872  {
873  const int numCells = points.extent_int(0);
874  const int numPoints = points.extent_int(1);
875  const int numFields = basisForNodes->getCardinality();
876 
877  auto cellBasisGradientsView = getMatchingViewWithLabel(points, "CellGeometryProvider: cellBasisGradients", numCells, numFields, numPoints, spaceDim);
878  auto basisGradientsView = getMatchingViewWithLabel(points, "CellGeometryProvider: basisGradients", numFields, numPoints, spaceDim);
879 
880  for (int cellOrdinal=0; cellOrdinal<numCells; cellOrdinal++)
881  {
882  auto refPointsForCell = Kokkos::subview(points, cellOrdinal, Kokkos::ALL(), Kokkos::ALL());
883  basisForNodes->getValues(basisGradientsView, refPointsForCell, OPERATOR_GRAD);
884 
885  // At some (likely relatively small) memory cost, we copy the BasisGradients into an explicit (F,P,D) container.
886  // Given that we expect to reuse this for a non-trivial number of cell in the common use case, the extra memory
887  // cost is likely worth the increased flop count, etc. (One might want to revisit this in cases of high spaceDim
888  // and/or very high polynomial order.)
889 
890  using ExecutionSpace = typename DeviceType::execution_space;
891  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({0,0,0},{numFields,numPoints,spaceDim});
892 
893  Kokkos::parallel_for("copy basis gradients", policy,
894  KOKKOS_LAMBDA (const int &fieldOrdinal, const int &pointOrdinal, const int &d) {
895  cellBasisGradientsView(cellOrdinal,fieldOrdinal,pointOrdinal,d) = basisGradientsView(fieldOrdinal,pointOrdinal,d);
896  });
897  ExecutionSpace().fence();
898  }
899  Data<PointScalar,DeviceType> basisRefData(cellBasisGradientsView);
900  return basisRefData;
901  }
902  }
903  }
904  }
905  else
906  {
907  // TODO: handle the other cases
908  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
909  }
910  return emptyRefData;
911 
912 
913  }
914 
915  template<class PointScalar, int spaceDim, typename DeviceType>
917  {
918  Data<PointScalar,DeviceType> emptyRefData;
919  if (cellGeometryType_ == UNIFORM_GRID)
920  {
921  // no need for basis computations
922  return emptyRefData;
923  }
924  else if (cellGeometryType_ == TENSOR_GRID)
925  {
926  // no need for basis values
927  return emptyRefData;
928  }
929  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
930  {
931  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
932  if (simplex)
933  {
934  // no need for precomputed basis values
935  return emptyRefData;
936  }
937  else
938  {
939  auto basisForNodes = this->basisForNodes();
940 
941  if (affine_)
942  {
943  // no need for precomputed basis values
944  return emptyRefData;
945  }
946  else
947  {
948  auto basisGradients = basisForNodes->allocateBasisValues(points, OPERATOR_GRAD);
949  basisForNodes->getValues(basisGradients, points, OPERATOR_GRAD);
950 
951  int numPoints = points.extent_int(0);
952  int numFields = basisForNodes->getCardinality();
953 
954  // At some (likely relatively small) memory cost, we copy the BasisGradients into an explicit (F,P,D) container.
955  // Given that we expect to reuse this for a non-trivial number of cell in the common use case, the extra memory
956  // cost is likely worth the increased flop count, etc. (One might want to revisit this in cases of high spaceDim
957  // and/or very high polynomial order.)
958 
959  auto firstPointComponentView = points.getTensorComponent(0); // (P,D0)
960  auto basisGradientsView = getMatchingViewWithLabel(firstPointComponentView, "CellGeometryProvider: temporary basisGradients", numFields, numPoints, spaceDim);
961 
962  using ExecutionSpace = typename DeviceType::execution_space;
963  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({0,0,0},{numFields,numPoints,spaceDim});
964 
965  Kokkos::parallel_for("copy basis gradients", policy,
966  KOKKOS_LAMBDA (const int &fieldOrdinal, const int &pointOrdinal, const int &d) {
967  basisGradientsView(fieldOrdinal,pointOrdinal,d) = basisGradients(fieldOrdinal,pointOrdinal,d);
968  });
969  ExecutionSpace().fence();
970 
971  Data<PointScalar,DeviceType> basisRefData(basisGradientsView);
972  return basisRefData;
973  }
974  }
975  }
976  else
977  {
978  // TODO: handle the other cases
979  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
980  }
981  return emptyRefData;
982  }
983 
984  template<class PointScalar, int spaceDim, typename DeviceType>
985  KOKKOS_INLINE_FUNCTION
987  {
988  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
989  {
990  // note that Shards numbers nodes for quad counter-clockwise
991  // cube is tensor-product of the (x,y) quad with a z line segment
992  if (d==0)
993  {
994  if ((hypercubeNodeNumber % 4 == 1) || (hypercubeNodeNumber % 4 == 2))
995  return 1;
996  else
997  return 0;
998  }
999  else if (d==1)
1000  {
1001  if ((hypercubeNodeNumber % 4 == 2) || (hypercubeNodeNumber % 4 == 3))
1002  return 1;
1003  else
1004  return 0;
1005  }
1006  }
1007  // tensor formula coincides with shards formula for d ≥ 2
1008  const int nodesForPriorDimensions = 1 << d;
1009  if ((hypercubeNodeNumber / nodesForPriorDimensions) % 2 == 1)
1010  return 1;
1011  else
1012  return 0;
1013  }
1014 
1015  template<class PointScalar, int spaceDim, typename DeviceType>
1017  {
1018  using HostExecSpace = Kokkos::DefaultHostExecutionSpace;
1019 
1020  const bool isGridType = (cellGeometryType_ == TENSOR_GRID) || (cellGeometryType_ == UNIFORM_GRID);
1021  const int numOrientations = isGridType ? numCellsPerGridCell(subdivisionStrategy_) : numCells();
1022 
1023  const int nodesPerCell = numNodesPerCell();
1024 
1025  ScalarView<Orientation, DeviceType> orientationsView("orientations", numOrientations);
1026  auto orientationsHost = Kokkos::create_mirror_view(typename HostExecSpace::memory_space(), orientationsView);
1027 
1028  DataVariationType cellVariationType;
1029 
1030  if (isGridType)
1031  {
1032  // then there are as many distinct orientations possible as there are there are cells per grid cell
1033  // fill cellNodesHost with sample nodes from grid cell 0
1034  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_); // can be up to 6
1035  ScalarView<PointScalar, HostExecSpace> cellNodesHost("cellNodesHost",numOrientations,nodesPerCell); // (C,N) -- where C = numOrientations
1036 
1037 #if defined(INTREPID2_COMPILE_DEVICE_CODE)
1038 #else
1040  const int gridCellOrdinal = 0;
1041  auto hostPolicy = Kokkos::MDRangePolicy<HostExecSpace,Kokkos::Rank<2>>({0,0},{numSubdivisions,nodesPerCell});
1042  Kokkos::parallel_for("fill cellNodesHost", hostPolicy,
1043  [this,gridCellOrdinal,cellNodesHost] (const int &subdivisionOrdinal, const int &nodeInCell) {
1044  auto node = this->gridCellNodeForSubdivisionNode(gridCellOrdinal, subdivisionOrdinal, nodeInCell);
1045  cellNodesHost(subdivisionOrdinal,nodeInCell) = node;
1046  });
1047 #endif
1048  cellVariationType = (numSubdivisions == 1) ? CONSTANT : MODULAR;
1049  OrientationTools<HostExecSpace>::getOrientation(orientationsHost,cellNodesHost,this->cellTopology());
1050  }
1051  else
1052  {
1053  cellVariationType = GENERAL;
1054  auto cellNodesHost = Kokkos::create_mirror_view_and_copy(typename HostExecSpace::memory_space(), cellToNodes_);
1055  OrientationTools<HostExecSpace>::getOrientation(orientationsHost,cellNodesHost,this->cellTopology());
1056  }
1057  Kokkos::deep_copy(orientationsView,orientationsHost);
1058 
1059  const int orientationsRank = 1; // shape (C)
1060  const Kokkos::Array<int,7> orientationExtents {static_cast<int>(numCells_),1,1,1,1,1,1};
1061  const Kokkos::Array<DataVariationType,7> orientationVariationTypes { cellVariationType, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
1062  orientations_ = Data<Orientation,DeviceType>(orientationsView, orientationsRank, orientationExtents, orientationVariationTypes);
1063  }
1064 
1065  template<class PointScalar, int spaceDim, typename DeviceType>
1066  KOKKOS_INLINE_FUNCTION
1068  if (r == 0)
1069  {
1070  return numCells_;
1071  }
1072  else if (r == 1)
1073  {
1074  return numNodesPerCell_;
1075  }
1076  else if (r == 2)
1077  {
1078  return spaceDim;
1079  }
1080  else
1081  {
1082  return 1;
1083  }
1084  }
1085 
1086  template<class PointScalar, int spaceDim, typename DeviceType>
1087  template <typename iType>
1088  KOKKOS_INLINE_FUNCTION
1089  typename std::enable_if<std::is_integral<iType>::value, int>::type
1091  {
1092  return static_cast<int>(extent(r));
1093  }
1094 
1095  template<class PointScalar, int spaceDim, typename DeviceType>
1096  KOKKOS_INLINE_FUNCTION
1099  {
1100  return nodeOrdering_;
1101  }
1102 
1103  template<class PointScalar, int spaceDim, typename DeviceType>
1104  KOKKOS_INLINE_FUNCTION
1106  {
1107  return numCells_;
1108  }
1109 
1110  template<class PointScalar, int spaceDim, typename DeviceType>
1111  KOKKOS_INLINE_FUNCTION
1113  {
1114  if (cellGeometryType_ == UNIFORM_GRID)
1115  {
1116  return gridCellCounts_[dim];
1117  }
1118  else if (cellGeometryType_ == TENSOR_GRID)
1119  {
1120  return tensorVertices_.extent_int(dim);
1121  }
1122  else
1123  {
1124  return -1; // not valid for this cell geometry type
1125  }
1126  }
1127 
1128  template<class PointScalar, int spaceDim, typename DeviceType>
1129  KOKKOS_INLINE_FUNCTION
1131  {
1132  return numNodesPerCell_;
1133  }
1134 
1135  template<class PointScalar, int spaceDim, typename DeviceType>
1136  KOKKOS_INLINE_FUNCTION
1138  {
1139  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!orientations_.isValid(), std::invalid_argument, "orientations_ not initialized; call initializeOrientations() first");
1140  return orientations_(cellNumber);
1141  }
1142 
1143  template<class PointScalar, int spaceDim, typename DeviceType>
1145  {
1146  if (!orientations_.isValid())
1147  {
1148  initializeOrientations();
1149  }
1150  return orientations_;
1151  }
1152 
1153  template<class PointScalar, int spaceDim, typename DeviceType>
1154  KOKKOS_INLINE_FUNCTION
1155  PointScalar CellGeometry<PointScalar,spaceDim,DeviceType>::gridCellCoordinate(const int &gridCellOrdinal, const int &localNodeNumber, const int &dim) const
1156  {
1157  const int componentNode = hypercubeComponentNodeNumber(localNodeNumber, dim);
1158  int cellCountForPriorDimensions = 1;
1159  for (int d=0; d<dim; d++)
1160  {
1161  cellCountForPriorDimensions *= numCellsInDimension(d);
1162  }
1163  const int componentGridCellOrdinal = (gridCellOrdinal / cellCountForPriorDimensions) % numCellsInDimension(dim);
1164  const int vertexOrdinal = componentGridCellOrdinal + componentNode;
1165  if (cellGeometryType_ == UNIFORM_GRID)
1166  {
1167  return origin_[dim] + (vertexOrdinal * domainExtents_[dim]) / gridCellCounts_[dim];
1168  }
1169  else if (cellGeometryType_ == TENSOR_GRID)
1170  {
1171  Kokkos::Array<int,spaceDim> pointOrdinalComponents;
1172  for (int d=0; d<spaceDim; d++)
1173  {
1174  pointOrdinalComponents[d] = 0;
1175  }
1176  pointOrdinalComponents[dim] = vertexOrdinal;
1177  return tensorVertices_(pointOrdinalComponents,dim);
1178  }
1179  else
1180  {
1181  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported geometry type");
1182  return 0; // unreachable; here to avoid compiler warnings
1183  }
1184  }
1185 
1186  template<class PointScalar, int spaceDim, typename DeviceType>
1187  KOKKOS_INLINE_FUNCTION
1189  {
1190  return 3; // (C,N,D)
1191  }
1192 
1193  template<class PointScalar, int spaceDim, typename DeviceType>
1194  KOKKOS_INLINE_FUNCTION
1195  int CellGeometry<PointScalar,spaceDim,DeviceType>::gridCellNodeForSubdivisionNode(const int &gridCellOrdinal, const int &subdivisionOrdinal,
1196  const int &subdivisionNodeNumber) const
1197  {
1198  // TODO: do something to reuse the nodeLookup containers
1199  switch (subdivisionStrategy_)
1200  {
1201  case NO_SUBDIVISION:
1202  return subdivisionNodeNumber;
1203  case TWO_TRIANGLES_RIGHT:
1204  case TWO_TRIANGLES_LEFT:
1205  case FOUR_TRIANGLES:
1206  {
1207  Kokkos::Array<int,3> nodeLookup;
1208  if (subdivisionStrategy_ == TWO_TRIANGLES_RIGHT)
1209  {
1210  if (subdivisionOrdinal == 0)
1211  {
1212  // bottom-right cell: node numbers coincide with quad node numbers
1213  nodeLookup = {0,1,2};
1214  }
1215  else if (subdivisionOrdinal == 1)
1216  {
1217  // node 0 --> node 2
1218  // node 1 --> node 3
1219  // node 2 --> node 0
1220  nodeLookup = {2,3,0};
1221  }
1222  else
1223  {
1224  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported subdivision ordinal");
1225  }
1226  }
1227  else if (subdivisionStrategy_ == TWO_TRIANGLES_LEFT)
1228  {
1229  if (subdivisionOrdinal == 0)
1230  {
1231  // bottom-left cell:
1232  // node 0 --> node 3
1233  // node 1 --> node 0
1234  // node 2 --> node 1
1235  nodeLookup = {3,0,1};
1236  }
1237  else if (subdivisionOrdinal == 1)
1238  {
1239  // node 0 --> node 2
1240  // node 1 --> node 3
1241  // node 2 --> node 0
1242  nodeLookup = {2,3,0};
1243  }
1244  else
1245  {
1246  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported subdivision ordinal");
1247  }
1248  }
1249  else // FOUR_TRIANGLES
1250  {
1251  // counter-clockwise, bottom triangle first
1252  // bottom triangle goes:
1253  // 0 --> 1
1254  // 1 --> center
1255  // 2 --> 0
1256  // and similarly for the other triangles, proceeding counter-clockwise
1257  // node 1 always being the center, we special-case that
1258  if (subdivisionNodeNumber == 1)
1259  {
1260  // center coordinate: we call this node 4 in the quadrilateral
1261  return 4;
1262  }
1263  else
1264  {
1265  nodeLookup = {(subdivisionOrdinal + 1) % 4, -1, subdivisionOrdinal};
1266  }
1267  }
1268  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1269  return gridCellNodeNumber;
1270  }
1271  case FIVE_TETRAHEDRA:
1272  case SIX_TETRAHEDRA:
1273  {
1274  Kokkos::Array<int,4> nodeLookup;
1275  if (subdivisionStrategy_ == FIVE_TETRAHEDRA)
1276  {
1277  /*
1278  // to discretize a unit cube into 5 tetrahedra, we can take the four vertices
1279  // (1,1,1)
1280  // (0,0,1)
1281  // (0,1,0)
1282  // (1,0,0)
1283  // as an interior tetrahedron. Call this cell 0. The remaining 4 cells can be determined
1284  // by selecting three of the above points (there are exactly 4 such combinations) and then selecting
1285  // from the remaining four vertices of the cube the one nearest the plane defined by those three points.
1286  // The remaining four vertices are:
1287  // (0,0,0)
1288  // (1,1,0)
1289  // (1,0,1)
1290  // (0,1,1)
1291  // For each of these four, we pick one, and then take the three nearest vertices from cell 0 to form a new tetrahedron.
1292  // We enumerate as follows:
1293  // cell 0: (1,1,1), (0,0,1), (0,1,0), (1,0,0)
1294  // cell 1: (0,0,0), (1,0,0), (0,1,0), (0,0,1)
1295  // cell 2: (1,1,0), (1,1,1), (0,1,0), (1,0,0)
1296  // cell 3: (1,0,1), (1,1,1), (0,0,1), (1,0,0)
1297  // cell 4: (0,1,1), (1,1,1), (0,0,1), (0,1,0)
1298  */
1299  // tetrahedra are as follows:
1300  // 0: {1,3,4,6}
1301  // 1: {0,1,3,4}
1302  // 2: {1,2,3,6}
1303  // 3: {1,4,5,6}
1304  // 4: {3,4,6,7}
1305  switch (subdivisionOrdinal) {
1306  case 0:
1307  nodeLookup = {1,3,4,6};
1308  break;
1309  case 1:
1310  nodeLookup = {0,1,3,4};
1311  break;
1312  case 2:
1313  nodeLookup = {1,2,3,6};
1314  break;
1315  case 3:
1316  nodeLookup = {1,4,5,6};
1317  break;
1318  case 4:
1319  nodeLookup = {3,4,6,7};
1320  break;
1321  default:
1322  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "invalid subdivisionOrdinal");
1323  break;
1324  }
1325  }
1326  else if (subdivisionStrategy_ == SIX_TETRAHEDRA)
1327  {
1328  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for SIX_TETRAHEDRA not yet implemented");
1329  }
1330  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1331  return gridCellNodeNumber;
1332  }
1333  case SIX_PYRAMIDS:
1334  {
1335  Kokkos::Array<int,5> nodeLookup; // nodeLookup[4] = 8; // center
1336  // we order the subcell divisions as bottom, top, left, right, front, back
1337  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
1338  {
1339  switch (subdivisionOrdinal)
1340  {
1341  case 0: // bottom (min z)
1342  nodeLookup = {0,1,2,3,8};
1343  break;
1344  case 1: // top (max z)
1345  nodeLookup = {4,5,6,7,8};
1346  break;
1347  case 2: // left (min y)
1348  nodeLookup = {0,1,5,4,8};
1349  break;
1350  case 3: // right (max y)
1351  nodeLookup = {3,2,6,7,8};
1352  break;
1353  case 4: // front (max x)
1354  nodeLookup = {1,2,6,5,8};
1355  break;
1356  case 5: // back (min x)
1357  nodeLookup = {0,3,7,4,8};
1358  break;
1359  default:
1360  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Invalid subdivisionOrdinal!");
1361  }
1362  }
1363  else // tensor ordering
1364  {
1365  switch (subdivisionOrdinal)
1366  {
1367  case 0: // bottom (min z)
1368  nodeLookup = {0,1,3,2,8};
1369  break;
1370  case 1: // top (max z)
1371  nodeLookup = {4,5,7,6,8};
1372  break;
1373  case 2: // left (min y)
1374  nodeLookup = {0,1,5,4,8};
1375  break;
1376  case 3: // right (max y)
1377  nodeLookup = {2,3,7,6,8};
1378  break;
1379  case 4: // front (max x)
1380  nodeLookup = {1,3,7,5,8};
1381  break;
1382  case 5: // back (min x)
1383  nodeLookup = {0,2,6,4,8};
1384  break;
1385  default:
1386  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Invalid subdivisionOrdinal!");
1387  }
1388  }
1389  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1390  return gridCellNodeNumber;
1391  }
1392  default:
1393  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Subdivision strategy not yet implemented!");
1394  // some compilers complain about missing return
1395  return 0; // statement should be unreachable...
1396  }
1397  }
1398 
1399  template<class PointScalar, int spaceDim, typename DeviceType>
1400  KOKKOS_INLINE_FUNCTION
1401  PointScalar CellGeometry<PointScalar,spaceDim,DeviceType>::subdivisionCoordinate(const int &gridCellOrdinal, const int &subdivisionOrdinal,
1402  const int &subdivisionNodeNumber, const int &d) const
1403  {
1404  int gridCellNode = gridCellNodeForSubdivisionNode(gridCellOrdinal, subdivisionOrdinal, subdivisionNodeNumber);
1405 
1406  if (subdivisionStrategy_ == FOUR_TRIANGLES)
1407  {
1408  // this is a case in which the gridCellNode may not actually be a node in the grid cell
1409  if (gridCellNode == 4) // center vertex
1410  {
1411  // d == 0 means quad vertices 0 and 1 suffice;
1412  // d == 1 means quad vertices 0 and 3 suffice
1413  const int gridVertex0 = 0;
1414  const int gridVertex1 = (d == 0) ? 1 : 3;
1415  return 0.5 * (gridCellCoordinate(gridCellOrdinal, gridVertex0, d) + gridCellCoordinate(gridCellOrdinal, gridVertex1, d));
1416  }
1417  }
1418  else if (subdivisionStrategy_ == SIX_PYRAMIDS)
1419  {
1420  // this is a case in which the gridCellNode may not actually be a node in the grid cell
1421  if (gridCellNode == 8) // center vertex
1422  {
1423  // can compute the center vertex coord in dim d by averaging diagonally opposite vertices'
1424  // coords in dimension d.
1425  const int gridVertex0 = 0; // 0 is diagonally opposite to 6 or 7, depending on nodeOrdering_
1426  const int gridVertex1 = (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS) ? 6 : 7;
1427  return 0.5 * (gridCellCoordinate(gridCellOrdinal, gridVertex0, d) + gridCellCoordinate(gridCellOrdinal, gridVertex1, d));
1428  }
1429  }
1430  return gridCellCoordinate(gridCellOrdinal, gridCellNode, d);
1431  }
1432 
1433  template<class PointScalar, int spaceDim, typename DeviceType>
1434  KOKKOS_INLINE_FUNCTION
1435  PointScalar
1436  CellGeometry<PointScalar,spaceDim,DeviceType>::operator()(const int& cell, const int& node, const int& dim) const {
1437  if ((cellGeometryType_ == UNIFORM_GRID) || (cellGeometryType_ == TENSOR_GRID))
1438  {
1439  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
1440  if (numSubdivisions == 1)
1441  {
1442  // hypercube
1443  return gridCellCoordinate(cell, node, dim);
1444  }
1445  else
1446  {
1447  const int subdivisionOrdinal = cell % numSubdivisions;
1448  const int gridCellOrdinal = cell / numSubdivisions;
1449  return subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, node, dim);
1450  }
1451  }
1452  else
1453  {
1454 #ifdef HAVE_INTREPID2_DEBUG
1455  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((cell < 0), std::invalid_argument, "cell out of bounds");
1456  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(static_cast<unsigned>(cell) > numCells_, std::invalid_argument, "cell out of bounds");
1457  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((node < 0), std::invalid_argument, "node out of bounds");
1458  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(static_cast<unsigned>(node) > numNodesPerCell_, std::invalid_argument, "node out of bounds");
1459  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((dim < 0), std::invalid_argument, "dim out of bounds" );
1460  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(dim > spaceDim, std::invalid_argument, "dim out of bounds" );
1461 #endif
1462  if (cellToNodes_.is_allocated())
1463  {
1464  const int nodeNumber = cellToNodes_(cell,node);
1465  return nodes_(nodeNumber,dim);
1466  }
1467  else
1468  {
1469  return nodes_(cell,node,dim);
1470  }
1471  }
1472  }
1473 
1474  template<class PointScalar, int spaceDim, typename DeviceType>
1475  void CellGeometry<PointScalar,spaceDim,DeviceType>::orientations(ScalarView<Orientation,DeviceType> orientationsView, const int &startCell, const int &endCell)
1476  {
1477  auto orientationsData = getOrientations();
1478  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
1479 
1480  using ExecutionSpace = typename DeviceType::execution_space;
1481  auto policy = Kokkos::RangePolicy<ExecutionSpace>(ExecutionSpace(),0,numCellsWorkset);
1482  Kokkos::parallel_for("copy orientations", policy, KOKKOS_LAMBDA(const int cellOrdinal)
1483  {
1484  orientationsView(cellOrdinal) = orientationsData(cellOrdinal);
1485  });
1486  ExecutionSpace().fence();
1487  }
1488 
1489  template<class PointScalar, int spaceDim, typename DeviceType>
1490  KOKKOS_INLINE_FUNCTION
1492  {
1493  if (cellGeometryType_ == UNIFORM_GRID)
1494  {
1495  return numCellsPerGridCell(subdivisionStrategy_);
1496  }
1497  else
1498  {
1499  return numCells_;
1500  }
1501  }
1502 
1503  template<class PointScalar, int spaceDim, typename DeviceType>
1505  {
1506  const int pointsPerCell = points.extent_int(0);
1507  return allocateJacobianDataPrivate(points.getTensorComponent(0),pointsPerCell,startCell,endCell);
1508  }
1509 
1510  template<class PointScalar, int spaceDim, typename DeviceType>
1511  Data<PointScalar,DeviceType> CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianData(const ScalarView<PointScalar,DeviceType> &points, const int startCell, const int endCell) const
1512  {
1513  // if points is rank 3, it has shape (C,P,D). If it's rank 2, (P,D).
1514  const int pointDimension = (points.rank() == 3) ? 1 : 0;
1515  const int pointsPerCell = points.extent_int(pointDimension);
1516  return allocateJacobianDataPrivate(points,pointsPerCell,startCell,endCell);
1517  }
1518 
1519  template<class PointScalar, int spaceDim, typename DeviceType>
1520  Data<PointScalar,DeviceType> CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianData(const int &numPoints, const int startCell, const int endCell) const
1521  {
1522  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!affine_, std::invalid_argument, "this version of allocateJacobianData() is only supported for affine CellGeometry");
1523 
1524  ScalarView<PointScalar,DeviceType> emptyPoints;
1525  return allocateJacobianDataPrivate(emptyPoints,numPoints,startCell,endCell);
1526  }
1527 
1528  template<class PointScalar, int spaceDim, typename DeviceType>
1530  const Data<PointScalar,DeviceType> &refData, const int startCell, const int endCell) const
1531  {
1532  const int pointsPerCell = points.extent_int(0);
1533  setJacobianDataPrivate(jacobianData,pointsPerCell,refData,startCell,endCell);
1534  }
1535 
1536  template<class PointScalar, int spaceDim, typename DeviceType>
1537  void CellGeometry<PointScalar,spaceDim,DeviceType>::setJacobian(Data<PointScalar,DeviceType> &jacobianData, const ScalarView<PointScalar,DeviceType> &points,
1538  const Data<PointScalar,DeviceType> &refData, const int startCell, const int endCell) const
1539  {
1540  // if points is rank 3, it has shape (C,P,D). If it's rank 2, (P,D).
1541  const int pointDimension = (points.rank() == 3) ? 1 : 0;
1542  const int pointsPerCell = points.extent_int(pointDimension);
1543  setJacobianDataPrivate(jacobianData,pointsPerCell,refData,startCell,endCell);
1544  }
1545 
1546  template<class PointScalar, int spaceDim, typename DeviceType>
1547  void CellGeometry<PointScalar,spaceDim,DeviceType>::setJacobian(Data<PointScalar,DeviceType> &jacobianData, const int &numPoints, const int startCell, const int endCell) const
1548  {
1549  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!affine_, std::invalid_argument, "this version of setJacobian() is only supported for affine CellGeometry");
1550 
1551  Data<PointScalar,DeviceType> emptyRefData;
1552  setJacobianDataPrivate(jacobianData,numPoints,emptyRefData,startCell,endCell);
1553  }
1554 } // namespace Intrepid2
1555 
1556 #endif /* Intrepid2_CellGeometryDef_h */
void setJacobianDataPrivate(Data< PointScalar, DeviceType > &jacobianData, const int &pointsPerCell, const Data< PointScalar, DeviceType > &refData, const int startCell, const int endCell) const
Notionally-private method that provides a common interface for multiple public-facing setJacobianData...
Implementation of the default H(grad)-compatible FEM basis of degree 1 on Quadrilateral cell...
geometry expressible in terms of vertices of the cell
BasisPtr basisForNodes() const
H^1 Basis used in the reference-to-physical transformation. Linear for straight-edged geometry; highe...
KOKKOS_INLINE_FUNCTION int numCellsPerGridCell(SubdivisionStrategy subdivisionStrategy) const
Helper method that returns the number of cells into which each grid cell will be subdivided based on ...
Data< PointScalar, DeviceType > allocateJacobianDataPrivate(const ScalarView< PointScalar, DeviceType > &pointComponentView, const int &pointsPerCell, const int startCell, const int endCell) const
Notionally-private method that provides a common interface for multiple public-facing allocateJacobia...
KOKKOS_INLINE_FUNCTION PointScalar subdivisionCoordinate(const int &gridCellOrdinal, const int &subdivisionOrdinal, const int &subdivisionNodeNumber, const int &d) const
returns coordinate in dimension d for the indicated subdivision of the indicated grid cell ...
CellGeometry provides the nodes for a set of cells; has options that support efficient definition of ...
Functor for full (C,P) Jacobian determinant container. CUDA compiler issues led us to avoid lambdas f...
View-like interface to tensor points; point components are stored separately; the appropriate coordin...
#define INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(test, x, msg)
const shards::CellTopology & cellTopology() const
The shards CellTopology for each cell within the CellGeometry object. Note that this is always a lowe...
KOKKOS_INLINE_FUNCTION ordinal_type cellToNode(ordinal_type cellIndex, ordinal_type cellNodeNumber) const
Returns a global node number corresponding to the specified (cell, node) combination. If uniform grid (possibly subdivided), this number is computed dynamically; for more general meshes, this simply returns the result of a lookup in the cellToNodes container provided at construction.
KOKKOS_INLINE_FUNCTION PointScalar operator()(const int &cell, const int &node, const int &dim) const
Return the coordinate (weight) of the specified node. For straight-edged geometry, this is simply the physical coordinate of the vertex. For all geometries, this can be understood as a weight on the corresponding H^1 basis function used in the reference-to-physical map.
An abstract base class that defines interface for concrete basis implementations for Finite Element (...
void orientations(ScalarView< Orientation, DeviceType > orientationsView, const int &startCell=0, const int &endCell=-1)
Fills the provided container with the orientations for the specified cell range. Calls getOrientation...
CellGeometry(const Kokkos::Array< PointScalar, spaceDim > &origin, const Kokkos::Array< PointScalar, spaceDim > &domainExtents, const Kokkos::Array< int, spaceDim > &gridCellCounts, SubdivisionStrategy subdivisionStrategy=NO_SUBDIVISION, HypercubeNodeOrdering nodeOrdering=HYPERCUBE_NODE_ORDER_TENSOR)
Uniform grid constructor, with optional subdivision into simplices.
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar *, DeviceType > & getUnderlyingView1() const
returns the View that stores the unique data. For rank-1 underlying containers.
KOKKOS_INLINE_FUNCTION unsigned rank() const
Returns the logical rank of this container. This is always 3.
KOKKOS_INLINE_FUNCTION const Kokkos::Array< DataVariationType, 7 > & getVariationTypes() const
Returns an array with the variation types in each logical dimension.
A stateless class for operations on cell data. Provides methods for:
KOKKOS_INLINE_FUNCTION int hypercubeComponentNodeNumber(int hypercubeNodeNumber, int d) const
For hypercube vertex number hypercubeNodeNumber, returns the component node number in specified dimen...
Data< PointScalar, DeviceType > allocateJacobianData(const TensorPoints< PointScalar, DeviceType > &points, const int startCell=0, const int endCell=-1) const
Allocate a container into which Jacobians of the reference-to-physical mapping can be placed...
KOKKOS_INLINE_FUNCTION enable_if_t< rank==1, const Kokkos::View< typename RankExpander< DataScalar, rank >::value_type, DeviceType > & > getUnderlyingView() const
Returns the underlying view. Throws an exception if the underlying view is not rank 1...
KOKKOS_INLINE_FUNCTION bool affine() const
Returns true if Jacobian is constant within each cell.
Wrapper around a Kokkos::View that allows data that is constant or repeating in various logical dimen...
KOKKOS_INLINE_FUNCTION const Data< Scalar, DeviceType > & getTensorComponent(const ordinal_type &r) const
Returns the requested tensor component.
KOKKOS_INLINE_FUNCTION HypercubeNodeOrdering nodeOrderingForHypercubes() const
Returns the node ordering used for hypercubes.
KOKKOS_INLINE_FUNCTION int gridCellNodeForSubdivisionNode(const int &gridCellOrdinal, const int &subdivisionOrdinal, const int &subdivisionNodeNumber) const
returns coordinate in dimension d for the indicated subdivision of the indicated grid cell ...
TensorData< PointScalar, DeviceType > allocateCellMeasure(const Data< PointScalar, DeviceType > &jacobianDet, const TensorData< PointScalar, DeviceType > &cubatureWeights) const
Allocate a TensorData object appropriate for passing to computeCellMeasure().
KOKKOS_INLINE_FUNCTION ordinal_type rank() const
Returns the rank of the container.
KOKKOS_INLINE_FUNCTION PointScalar gridCellCoordinate(const int &gridCellOrdinal, const int &localNodeNumber, const int &dim) const
returns coordinate in dimension dim of the indicated node in the indicated grid cell ...
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, int >::type extent_int(const iType &r) const
Returns the logical extent of the container in the specified dimension as an int; the shape of CellGe...
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar ***, DeviceType > & getUnderlyingView3() const
returns the View that stores the unique data. For rank-3 underlying containers.
KOKKOS_INLINE_FUNCTION constexpr bool isValid() const
returns true for containers that have data; false for those that don&#39;t (namely, those that have been ...
KOKKOS_INLINE_FUNCTION int uniformJacobianModulus() const
Returns an integer indicating the number of distinct cell types vis-a-vis Jacobians.
Data< PointScalar, DeviceType > getJacobianRefData(const ScalarView< PointScalar, DeviceType > &points) const
Computes reference-space data for the specified points, to be used in setJacobian().
Orientation encoding and decoding.
Store host-only &quot;members&quot; of CellGeometry using a static map indexed on the CellGeometry pointer...
geometry expressible in terms of a higher-order basis (must be specified)
void initializeOrientations()
Initialize the internal orientations_ member with the orientations of each member cell...
Data< Orientation, DeviceType > getOrientations()
Returns the orientations for all cells. Calls initializeOrientations() if it has not previously been ...
KOKKOS_INLINE_FUNCTION Orientation getOrientation(int &cellNumber) const
Returns the orientation for the specified cell. Requires that initializeOrientations() has been calle...
KOKKOS_INLINE_FUNCTION DataVariationType cellVariationType() const
static void setJacobian(JacobianViewType jacobian, const PointViewType points, const WorksetType worksetCell, const Teuchos::RCP< HGradBasisType > basis, const int startCell=0, const int endCell=-1)
Computes the Jacobian matrix DF of the reference-to-physical frame map F.
A family of nodal basis functions which is related to, but not identical with, the Lagrangian basis f...
KOKKOS_INLINE_FUNCTION int numNodesPerCell() const
Returns the number of nodes per cell; may be more than the number of vertices in the corresponding Ce...
KOKKOS_INLINE_FUNCTION int numCellsInDimension(const int &dim) const
For uniform grid and tensor grid CellGeometry, returns the number of cells in the specified component...
void setJacobian(Data< PointScalar, DeviceType > &jacobianData, const TensorPoints< PointScalar, DeviceType > &points, const Data< PointScalar, DeviceType > &refData, const int startCell=0, const int endCell=-1) const
Compute Jacobian values for the reference-to-physical transformation, and place them in the provided ...
KOKKOS_INLINE_FUNCTION unsigned rank() const
Returns the logical rank of the Data container.
KOKKOS_INLINE_FUNCTION int extent_int(const int &r) const
Returns the logical extent in the specified dimension.
static void getOrientation(Kokkos::DynRankView< elemOrtValueType, elemOrtProperties...> elemOrts, const Kokkos::DynRankView< elemNodeValueType, elemNodeProperties...> elemNodes, const shards::CellTopology cellTopo, bool isSide=false)
Compute orientations of cells in a workset.
KOKKOS_INLINE_FUNCTION int getDataExtent(const ordinal_type &d) const
returns the true extent of the data corresponding to the logical dimension provided; if the data does...
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, ordinal_type >::type extent_int(const iType &d) const
Returns the logical extent in the requested dimension.
KOKKOS_INLINE_FUNCTION int numCells() const
Returns the number of cells.
KOKKOS_INLINE_FUNCTION ~CellGeometry()
Destructor.
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar **, DeviceType > & getUnderlyingView2() const
returns the View that stores the unique data. For rank-2 underlying containers.
Implementation of the default H(grad)-compatible FEM basis of degree 1 on Hexahedron cell...
KOKKOS_INLINE_FUNCTION size_t extent(const int &r) const
Returns the logical extent of the container in the specified dimension; the shape of CellGeometry is ...
KOKKOS_INLINE_FUNCTION ordinal_type numTensorComponents() const
Return the number of tensorial components.
KOKKOS_INLINE_FUNCTION ScalarView< PointScalar, DeviceType > getTensorComponent(const ordinal_type &r) const
Returns the requested tensor component.
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, int >::type extent_int(const iType &r) const
Returns the logical extent in the requested dimension.
void computeCellMeasure(TensorData< PointScalar, DeviceType > &cellMeasure, const Data< PointScalar, DeviceType > &jacobianDet, const TensorData< PointScalar, DeviceType > &cubatureWeights) const
Compute cell measures that correspond to provided Jacobian determinants and.