Intrepid2
Intrepid2_CellGeometryDef.hpp
1 // @HEADER
2 // *****************************************************************************
3 // Intrepid2 Package
4 //
5 // Copyright 2007 NTESS and the Intrepid2 contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
16 #ifndef Intrepid2_CellGeometryDef_h
17 #define Intrepid2_CellGeometryDef_h
18 
19 namespace Intrepid2
20 {
21 
22  namespace Impl
23  {
26  template<class PointScalar, int spaceDim, typename DeviceType>
28  {
29  using BasisPtr = Teuchos::RCP<Intrepid2::Basis<DeviceType,PointScalar,PointScalar> >;
31  public:
32  // conceptually, these should be private members, but for the definition of these, we need them to be externally accessible.
33  static std::map<const CellGeometryType *, shards::CellTopology> cellTopology_;
34  static std::map<const CellGeometryType *, BasisPtr> basisForNodes_;
35 
36  public:
37  static void constructorCalled(const CellGeometryType *cellGeometry, const shards::CellTopology &cellTopo, BasisPtr basisForNodes)
38  {
39  cellTopology_[cellGeometry] = cellTopo;
40  basisForNodes_[cellGeometry] = basisForNodes;
41  }
42 
43  static void destructorCalled(const CellGeometryType *cellGeometry)
44  {
45  cellTopology_.erase(cellGeometry);
46  basisForNodes_.erase(cellGeometry);
47  }
48 
49  static BasisPtr getBasis(const CellGeometryType *cellGeometry)
50  {
51  return basisForNodes_[cellGeometry];
52  }
53 
54  static const shards::CellTopology & getCellTopology(const CellGeometryType *cellGeometry)
55  {
56  return cellTopology_[cellGeometry];
57  }
58  };
59 
60  // member lookup map definitions for CellGeometryHostMembers:
61  template< class PointScalar, int spaceDim, typename DeviceType > typename std::map<const CellGeometry<PointScalar,spaceDim,DeviceType> *, shards::CellTopology> CellGeometryHostMembers< PointScalar,spaceDim,DeviceType>::cellTopology_;
62 
63  template< class PointScalar, int spaceDim, typename DeviceType > typename std::map<const CellGeometry<PointScalar,spaceDim,DeviceType> *, Teuchos::RCP<Intrepid2::Basis<DeviceType,PointScalar,PointScalar> >> CellGeometryHostMembers< PointScalar,spaceDim,DeviceType>::basisForNodes_;
64 
67  template<class PointScalar, int spaceDim, typename DeviceType>
69  {
70  Kokkos::View<PointScalar**, DeviceType> cellMeasures_; // (C,P)
71  Kokkos::View<PointScalar**, DeviceType> detData_; // (C,P)
72  TensorData<PointScalar,DeviceType> cubatureWeights_; // (P)
73  public:
74  CellMeasureFunctor(Kokkos::View<PointScalar**, DeviceType> cellMeasures,
75  Kokkos::View<PointScalar**, DeviceType> detData, TensorData<PointScalar,DeviceType> cubatureWeights)
76  :
77  cellMeasures_(cellMeasures),
78  detData_(detData),
79  cubatureWeights_(cubatureWeights)
80  {}
81 
82  KOKKOS_INLINE_FUNCTION void
83  operator () (const ordinal_type cellOrdinal, const ordinal_type pointOrdinal) const
84  {
85  cellMeasures_(cellOrdinal,pointOrdinal) = detData_(cellOrdinal,pointOrdinal) * cubatureWeights_(pointOrdinal);
86  }
87  };
88  }
89 
90  template<class PointScalar, int spaceDim, typename DeviceType>
91  KOKKOS_INLINE_FUNCTION
93 :
94  nodeOrdering_(cellGeometry.nodeOrdering_),
95  cellGeometryType_(cellGeometry.cellGeometryType_),
96  subdivisionStrategy_(cellGeometry.subdivisionStrategy_),
97  affine_(cellGeometry.affine_),
98  orientations_(cellGeometry.orientations_),
99  origin_(cellGeometry.origin_),
100  domainExtents_(cellGeometry.domainExtents_),
101  gridCellCounts_(cellGeometry.gridCellCounts_),
102  tensorVertices_(cellGeometry.tensorVertices_),
103  cellToNodes_(cellGeometry.cellToNodes_),
104  nodes_(cellGeometry.nodes_),
105  numCells_(cellGeometry.numCells_),
106  numNodesPerCell_(cellGeometry.numNodesPerCell_)
107  {
108  // host-only registration with HostMemberLookup:
109 #ifndef INTREPID2_COMPILE_DEVICE_CODE
110  shards::CellTopology cellTopo = cellGeometry.cellTopology();
111  BasisPtr basisForNodes = cellGeometry.basisForNodes();
113  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
114 #endif
115  }
116 
117  template<class PointScalar, int spaceDim, typename DeviceType>
118  KOKKOS_INLINE_FUNCTION
120  {
121  // host-only deregistration with HostMemberLookup:
122 #ifndef INTREPID2_COMPILE_DEVICE_CODE
124  HostMemberLookup::destructorCalled(this);
125 #endif
126  }
127 
128  template<class PointScalar, int spaceDim, typename DeviceType>
129  KOKKOS_INLINE_FUNCTION
131  {
132  switch (subdivisionStrategy) {
133  case NO_SUBDIVISION:
134  return 1;
135  case TWO_TRIANGLES_LEFT:
136  case TWO_TRIANGLES_RIGHT:
137  return 2;
138  case FOUR_TRIANGLES:
139  return 4;
140  case FIVE_TETRAHEDRA:
141  return 5;
142  case SIX_PYRAMIDS:
143  case SIX_TETRAHEDRA:
144  return 6;
145  }
146  return -1;
147  }
148 
149  template<class PointScalar, int spaceDim, typename DeviceType>
151  CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianDataPrivate(const ScalarView<PointScalar,DeviceType> &pointComponentView, const int &pointsPerCell, const int startCell, const int endCell) const
152  {
153  ScalarView<PointScalar,DeviceType> data;
154  const int rank = 4; // C,P,D,D
155  const int CELL_DIM = 0;
156  const int POINT_DIM = 1;
157  const int D1_DIM = 2;
158  const int D2_DIM = 3;
159 
160  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
161 
162  Kokkos::Array<int,7> extents { numCellsWorkset, pointsPerCell, spaceDim, spaceDim, 1, 1, 1 };
163  Kokkos::Array<DataVariationType,7> variationType { CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT };
164 
165  int blockPlusDiagonalLastNonDiagonal = -1;
166 
167  if (cellGeometryType_ == UNIFORM_GRID)
168  {
169  if (uniformJacobianModulus() != 1)
170  {
171  variationType[CELL_DIM] = MODULAR;
172  variationType[POINT_DIM] = CONSTANT;
173  variationType[D1_DIM] = GENERAL;
174  variationType[D2_DIM] = GENERAL;
175 
176  int cellTypeModulus = uniformJacobianModulus();
177 
178  data = getMatchingViewWithLabel(pointComponentView, "CellGeometryProvider: Jacobian data", cellTypeModulus, spaceDim, spaceDim);
179  }
180  else
181  {
182  // diagonal Jacobian
183  variationType[D1_DIM] = BLOCK_PLUS_DIAGONAL;
184  variationType[D2_DIM] = BLOCK_PLUS_DIAGONAL;
185  blockPlusDiagonalLastNonDiagonal = -1;
186 
187  data = getMatchingViewWithLabel(pointComponentView, "CellGeometryProvider: Jacobian data", spaceDim);
188  }
189  }
190  else if (cellGeometryType_ == TENSOR_GRID)
191  {
192  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "tensor grid support not yet implemented");
193  }
194  else if (cellGeometryType_ == FIRST_ORDER)
195  {
196  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
197  if (simplex)
198  {
199  variationType[CELL_DIM] = GENERAL;
200  variationType[POINT_DIM] = CONSTANT; // affine: no point variation
201  variationType[D1_DIM] = GENERAL;
202  variationType[D2_DIM] = GENERAL;
203 
204  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCells_, spaceDim, spaceDim);
205  }
206  else
207  {
208  variationType[CELL_DIM] = GENERAL;
209  variationType[D1_DIM] = GENERAL;
210  variationType[D2_DIM] = GENERAL;
211  if (affine_)
212  {
213  // no point variation
214  variationType[POINT_DIM] = CONSTANT;
215  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, spaceDim, spaceDim);
216  }
217  else
218  {
219  variationType[POINT_DIM] = GENERAL;
220  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, pointsPerCell, spaceDim, spaceDim);
221  }
222  }
223  }
224  else if (cellGeometryType_ == HIGHER_ORDER)
225  {
226  // most general case: varies in all 4 dimensions
227  variationType[CELL_DIM] = GENERAL;
228  variationType[POINT_DIM] = GENERAL;
229  variationType[D1_DIM] = GENERAL;
230  variationType[D2_DIM] = GENERAL;
231  data = getMatchingViewWithLabel(data, "CellGeometryProvider: Jacobian data", numCellsWorkset, pointsPerCell, spaceDim, spaceDim);
232  }
233  else
234  {
235  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
236  }
237 
238  Data<PointScalar,DeviceType> jacobianData(data,rank,extents,variationType,blockPlusDiagonalLastNonDiagonal);
239  return jacobianData;
240  }
241 
242  template<class PointScalar, int spaceDim, typename DeviceType>
244  const int &pointsPerCell, const Data<PointScalar,DeviceType> &refData,
245  const int startCell, const int endCell) const
246  {
247  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
248 
249  if (cellGeometryType_ == UNIFORM_GRID)
250  {
251  if (uniformJacobianModulus() != 1)
252  {
253  int cellTypeModulus = uniformJacobianModulus();
254 
255  auto dataView3 = jacobianData.getUnderlyingView3(); // (cellTypeModulus, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
256  auto dataHost = Kokkos::create_mirror_view(dataView3);
257 
258  const int startCellType = startCell % cellTypeModulus;
259  const int endCellType = (numCellsWorkset >= cellTypeModulus) ? startCellType + cellTypeModulus : startCellType + numCellsWorkset;
260  const int gridCellOrdinal = 0; // sample cell
261  for (int cellType=startCellType; cellType<endCellType; cellType++)
262  {
263  const int subdivisionOrdinal = cellType % cellTypeModulus;
264  const int nodeZero = 0;
265  // simplex Jacobian formula is J_00 = x1 - x0, J_01 = x2 - x0, etc.
266  for (int i=0; i<spaceDim; i++)
267  {
268  for (int j=0; j<spaceDim; j++)
269  {
270  const int node = j+1; // this is the only node other than the 0 node that has non-zero derivative in the j direction -- and this has unit derivative
271  // nodeZero has derivative -1 in every dimension.
272  const auto J_ij = subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, node, i) - subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, nodeZero, i);
273  dataHost(cellType,i,j) = J_ij;
274  }
275  }
276  }
277 
278  Kokkos::deep_copy(dataView3,dataHost);
279  }
280  else
281  {
282  // diagonal Jacobian
283  auto dataView1 = jacobianData.getUnderlyingView1(); // (spaceDim) allocated in allocateJacobianDataPrivate()
284  const auto domainExtents = domainExtents_;
285  const auto gridCellCounts = gridCellCounts_;
286 
287  using ExecutionSpace = typename DeviceType::execution_space;
288  auto policy = Kokkos::RangePolicy<>(ExecutionSpace(),0,spaceDim);
289  Kokkos::parallel_for("fill jacobian", policy, KOKKOS_LAMBDA(const int d1)
290  {
291  // diagonal jacobian
292  const double REF_SPACE_EXTENT = 2.0;
293  dataView1(d1) = (domainExtents[d1] / REF_SPACE_EXTENT) / gridCellCounts[d1];
294  });
295  ExecutionSpace().fence();
296  }
297  }
298  else if (cellGeometryType_ == TENSOR_GRID)
299  {
300  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "tensor grid support not yet implemented");
301  }
302  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
303  {
304  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
305  if (simplex)
306  {
307  auto dataView3 = jacobianData.getUnderlyingView3(); // (numCells_, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
308 
309  // get local (shallow) copies to avoid implicit references to this
310  auto cellToNodes = cellToNodes_;
311  auto nodes = nodes_;
312 
313  using ExecutionSpace = typename DeviceType::execution_space;
314  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({startCell,0,0},{numCellsWorkset,spaceDim,spaceDim});
315 
316  Kokkos::parallel_for("compute first-order simplex Jacobians", policy,
317  KOKKOS_LAMBDA (const int &cellOrdinal, const int &d1, const int &d2) {
318  const int nodeZero = 0; // nodeZero has derivative -1 in every dimension.
319  const int node = d2+1; // this is the only node other than the 0 node that has non-zero derivative in the d2 direction -- and this has unit derivative (except in 1D, where each derivative is ±0.5)
320  const auto & nodeCoord = nodes(cellToNodes(cellOrdinal,node), d1);
321  const auto & nodeZeroCoord = nodes(cellToNodes(cellOrdinal,nodeZero), d1);
322  const PointScalar J_ij = nodeCoord - nodeZeroCoord;
323  dataView3(cellOrdinal,d1,d2) = (spaceDim != 1) ? J_ij : J_ij * 0.5;
324  });
325  }
326  else
327  {
329  auto basisForNodes = this->basisForNodes();
330 
331  if (affine_)
332  {
333  // no point variation
334  auto dataView3 = jacobianData.getUnderlyingView3(); // (numCellsWorkset, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
335 
336  // TODO: find an allocation-free way to do this… (consider modifying CellTools::setJacobian() to support affine case.)
337  const int onePoint = 1;
338  auto testPointView = getMatchingViewWithLabel(dataView3, "CellGeometryProvider: test point", onePoint, spaceDim);
339  auto tempData = getMatchingViewWithLabel(dataView3, "CellGeometryProvider: temporary Jacobian data", numCellsWorkset, onePoint, spaceDim, spaceDim);
340 
341  Kokkos::deep_copy(testPointView, 0.0);
342 
343  CellTools::setJacobian(tempData, testPointView, *this, basisForNodes, startCell, endCell);
344 
345  auto tempDataSubview = Kokkos::subview(tempData, Kokkos::ALL(), 0, Kokkos::ALL(), Kokkos::ALL());
346  Kokkos::deep_copy(dataView3, tempDataSubview);
347  }
348  else
349  {
350  auto dataView = jacobianData.getUnderlyingView(); // (numCellsWorkset, pointsPerCell, spaceDim, spaceDim) allocated in allocateJacobianDataPrivate()
351  TEUCHOS_TEST_FOR_EXCEPTION(basisForNodes == Teuchos::null, std::invalid_argument, "basisForNodes must not be null");
352  TEUCHOS_TEST_FOR_EXCEPTION(dataView.size() == 0, std::invalid_argument, "underlying view is not valid");
353 
354  // refData should contain the basis gradients; shape is (F,P,D) or (C,F,P,D)
355  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!refData.isValid(), std::invalid_argument, "refData should be a valid container for cases with non-affine geometry");
356  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((refData.rank() != 3) && (refData.rank() != 4), std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
357  if (refData.rank() == 3)
358  {
359  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(0) != basisForNodes->getCardinality(), std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
360  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(1) != pointsPerCell, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
361  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(2) != spaceDim, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
362  }
363  else
364  {
365  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(0) != numCellsWorkset, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
366  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(1) != basisForNodes->getCardinality(), std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
367  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(2) != pointsPerCell, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
368  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(refData.extent_int(3) != spaceDim, std::invalid_argument, "refData should have shape (F,P,D) or (C,F,P,D)");
369  }
370 
371  CellTools::setJacobian(dataView, *this, refData, startCell, endCell);
372  }
373  }
374  }
375  else
376  {
377  // TODO: handle the other cases
378  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
379  }
380  }
381 
382  // Uniform grid constructor, with optional subdivision into simplices
383  template<class PointScalar, int spaceDim, typename DeviceType>
384  CellGeometry<PointScalar,spaceDim,DeviceType>::CellGeometry(const Kokkos::Array<PointScalar,spaceDim> &origin,
385  const Kokkos::Array<PointScalar,spaceDim> &domainExtents,
386  const Kokkos::Array<int,spaceDim> &gridCellCounts,
387  SubdivisionStrategy subdivisionStrategy,
388  HypercubeNodeOrdering nodeOrdering)
389  :
390  nodeOrdering_(nodeOrdering),
391  cellGeometryType_(UNIFORM_GRID),
392  subdivisionStrategy_(subdivisionStrategy),
393  affine_(true),
394  origin_(origin),
395  domainExtents_(domainExtents),
396  gridCellCounts_(gridCellCounts)
397  {
398  numCells_ = 1;
399  for (int d=0; d<spaceDim; d++)
400  {
401  numCells_ *= gridCellCounts_[d];
402  }
403  numCells_ *= numCellsPerGridCell(subdivisionStrategy_);
404 
405  shards::CellTopology cellTopo; // will register with HostMemberLookup below
406  if (subdivisionStrategy_ == NO_SUBDIVISION)
407  {
408  // hypercube
409  numNodesPerCell_ = 1 << spaceDim; // 2^D vertices in a D-dimensional hypercube
410 
411  if (spaceDim == 1)
412  {
413  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Line<> >());
414  }
415  else if (spaceDim == 2)
416  {
417  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Quadrilateral<> >());
418  }
419  else if (spaceDim == 3)
420  {
421  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Hexahedron<> >());
422  }
423  else
424  {
425  // TODO: Once shards supports higher-dimensional hypercubes, initialize cellTopo accordingly
426  }
427  }
428  else
429  {
430  // simplex
431  numNodesPerCell_ = spaceDim + 1; // D+1 vertices in a D-dimensional simplex
432  if (spaceDim == 2)
433  {
434  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Triangle<> >());
435  }
436  else if (spaceDim == 3)
437  {
438  cellTopo = shards::CellTopology(shards::getCellTopologyData<shards::Tetrahedron<> >());
439  }
440  else
441  {
442  // TODO: Once shards supports higher-dimensional simplices, initialize cellTopo_ accordingly
443  }
444  }
445 
447  const int linearPolyOrder = 1;
448  BasisPtr basisForNodes = getBasis<BasisFamily>(cellTopo, FUNCTION_SPACE_HGRAD, linearPolyOrder);
449 
450  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
451  {
452  // override basisForNodes for quad, hexahedron. Apparently the lowest-order bases below are *not* in the same order as their
453  // arbitrary-polynomial-order counterparts; the latter do not match the order of the shards::CellTopology nodes.
454  if (cellTopo.getKey() == shards::Quadrilateral<>::key)
455  {
456  basisForNodes = Teuchos::rcp( new Basis_HGRAD_QUAD_C1_FEM<DeviceType,PointScalar,PointScalar>() );
457  }
458  else if (cellTopo.getKey() == shards::Hexahedron<>::key)
459  {
460  basisForNodes = Teuchos::rcp( new Basis_HGRAD_HEX_C1_FEM<DeviceType,PointScalar,PointScalar>() );
461  }
462  }
463 
465  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
466  }
467 
468  // Node-based constructor for straight-edged cell geometry.
469  // If claimAffine is true, we assume (without checking) that the mapping from reference space is affine.
470  // (If claimAffine is false, we check whether the topology is simplicial; if so, we conclude that the mapping must be affine.)
471  template<class PointScalar, int spaceDim, typename DeviceType>
473  ScalarView<int,DeviceType> cellToNodes,
474  ScalarView<PointScalar,DeviceType> nodes,
475  const bool claimAffine,
476  const HypercubeNodeOrdering nodeOrdering)
477  :
478  nodeOrdering_(nodeOrdering),
479  cellGeometryType_(FIRST_ORDER),
480  cellToNodes_(cellToNodes),
481  nodes_(nodes)
482  {
483  if(cellToNodes.is_allocated())
484  {
485  numCells_ = cellToNodes.extent_int(0);
486  numNodesPerCell_ = cellToNodes.extent_int(1);
487  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(numNodesPerCell_ != cellTopo.getNodeCount(), std::invalid_argument, "cellToNodes.extent(1) does not match the cell topology node count");
488  }
489  else
490  {
491  numCells_ = nodes.extent_int(0);
492  numNodesPerCell_ = nodes.extent_int(1);
493  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(numNodesPerCell_ != cellTopo.getNodeCount(), std::invalid_argument, "nodes.extent(1) does not match the cell topology node count");
494  }
495 
496 
497  if (!claimAffine)
498  {
499  // if cellTopo is simplicial, then since the geometry is straight-edged, it is also affine
500  const bool simplicialTopo = (cellTopo.getNodeCount() == cellTopo.getDimension() + 1);
501  affine_ = simplicialTopo;
502  }
503  else
504  {
505  affine_ = true;
506  }
507 
509  const int linearPolyOrder = 1;
510  BasisPtr basisForNodes = getBasis<BasisFamily>(cellTopo, FUNCTION_SPACE_HGRAD, linearPolyOrder);
511 
512  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
513  {
514  // override basisForNodes for quad, hexahedron. Apparently the lowest-order bases below are *not* in the same order as their
515  // arbitrary-polynomial-order counterparts; the latter do not match the order of the shards::CellTopology nodes.
516  if (cellTopo.getKey() == shards::Quadrilateral<>::key)
517  {
518  basisForNodes = Teuchos::rcp( new Basis_HGRAD_QUAD_C1_FEM<DeviceType,PointScalar,PointScalar>() );
519  }
520  else if (cellTopo.getKey() == shards::Hexahedron<>::key)
521  {
522  basisForNodes = Teuchos::rcp( new Basis_HGRAD_HEX_C1_FEM<DeviceType,PointScalar,PointScalar>() );
523  }
524  }
525 
527  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
528  }
529 
530  // Constructor for higher-order geometry
531  template<class PointScalar, int spaceDim, typename DeviceType>
533  ScalarView<PointScalar,DeviceType> cellNodes)
534  :
535  nodeOrdering_(HYPERCUBE_NODE_ORDER_TENSOR),
536  cellGeometryType_(HIGHER_ORDER),
537  nodes_(cellNodes)
538  {
539  numCells_ = cellNodes.extent_int(0);
540  numNodesPerCell_ = cellNodes.extent_int(1);
541 
542  // if basis degree is 1, mark as first-order geometry
543  const bool firstOrderGeometry = (basisForNodes->getDegree() == 1);
544  cellGeometryType_ = firstOrderGeometry ? FIRST_ORDER : HIGHER_ORDER;
545 
546  shards::CellTopology cellTopo = basisForNodes->getBaseCellTopology();
547 
548  if (firstOrderGeometry && (cellTopo.getNodeCount() == spaceDim + 1)) // lowest-order and simplicial
549  {
550  affine_ = true;
551  }
552  else
553  {
554  affine_ = false;
555  }
557  HostMemberLookup::constructorCalled(this, cellTopo, basisForNodes);
558  }
559 
560  template<class PointScalar, int spaceDim, typename DeviceType>
561  KOKKOS_INLINE_FUNCTION
563  {
564  return affine_;
565  }
566 
567  template<class PointScalar, int spaceDim, typename DeviceType>
570  const TensorData<PointScalar,DeviceType> & cubatureWeights ) const
571  {
572  // Output possibilities for a cubatureWeights with N components:
573  // 1. For AFFINE elements (jacobianDet cell-wise constant), returns a container with N+1 tensorial components; the first component corresponds to cells
574  // 2. Otherwise, returns a container with 1 tensorial component
575 
576  INTREPID2_TEST_FOR_EXCEPTION(cubatureWeights.rank() != 1, std::invalid_argument, "cubatureWeights container must have shape (P)");
577 
578  const int numTensorComponents = affine_ ? cubatureWeights.numTensorComponents() + 1 : 1;
579  std::vector< Data<PointScalar,DeviceType> > tensorComponents(numTensorComponents);
580 
581  if (affine_)
582  {
583  const int cellExtent = jacobianDet.extent_int(0);
584  Kokkos::Array<DataVariationType,7> cellVariationTypes {jacobianDet.getVariationTypes()[0], CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
585  const int cellDataDim = jacobianDet.getDataExtent(0);
586  Kokkos::Array<int,7> cellExtents{cellExtent,1,1,1,1,1,1};
587 
588  ScalarView<PointScalar,DeviceType> detDataView ("cell relative volumes", cellDataDim);
589  tensorComponents[0] = Data<PointScalar,DeviceType>(detDataView,1,cellExtents,cellVariationTypes);
590 
591  for (int cubTensorComponent=0; cubTensorComponent<numTensorComponents-1; cubTensorComponent++)
592  {
593  auto cubatureComponent = cubatureWeights.getTensorComponent(cubTensorComponent);
594  const auto cubatureExtents = cubatureComponent.getExtents();
595  const auto cubatureVariationTypes = cubatureComponent.getVariationTypes();
596  const int numPoints = cubatureComponent.getDataExtent(0);
597  ScalarView<PointScalar,DeviceType> cubatureWeightView ("cubature component weights", numPoints);
598  const int pointComponentRank = 1;
599  tensorComponents[cubTensorComponent+1] = Data<PointScalar,DeviceType>(cubatureWeightView,pointComponentRank,cubatureExtents,cubatureVariationTypes);
600  }
601  }
602  else
603  {
604  const int cellExtent = jacobianDet.extent_int(0);
605  Kokkos::Array<DataVariationType,7> variationTypes {jacobianDet.getVariationTypes()[0], GENERAL, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
606  const int cellDataDim = jacobianDet.getDataExtent(0);
607 
608  const int numPoints = cubatureWeights.extent_int(0);
609  Kokkos::Array<int,7> extents{cellExtent,numPoints,1,1,1,1,1};
610 
611  ScalarView<PointScalar,DeviceType> cubatureWeightView;
612  if (variationTypes[0] != CONSTANT)
613  {
614  cubatureWeightView = ScalarView<PointScalar,DeviceType>("cell measure", cellDataDim, numPoints);
615  }
616  else
617  {
618  cubatureWeightView = ScalarView<PointScalar,DeviceType>("cell measure", numPoints);
619  }
620  const int cellMeasureRank = 2;
621  tensorComponents[0] = Data<PointScalar,DeviceType>(cubatureWeightView,cellMeasureRank,extents,variationTypes);
622  }
623  const bool separateFirstComponent = (numTensorComponents > 1);
624  return TensorData<PointScalar,DeviceType>(tensorComponents, separateFirstComponent);
625  }
626 
627  template<class PointScalar, int spaceDim, typename DeviceType>
629  const Data<PointScalar,DeviceType> & jacobianDet,
630  const TensorData<PointScalar,DeviceType> & cubatureWeights ) const
631  {
632  // Output possibilities for a cubatureWeights with N components:
633  // 1. For AFFINE elements (jacobianDet constant on each cell), returns a container with N+1 tensorial components; the first component corresponds to cells
634  // 2. Otherwise, returns a container with 1 tensorial component
635 
636  INTREPID2_TEST_FOR_EXCEPTION((cellMeasure.numTensorComponents() != cubatureWeights.numTensorComponents() + 1) && (cellMeasure.numTensorComponents() != 1), std::invalid_argument,
637  "cellMeasure must either have a tensor component count of 1 or a tensor component count that is one higher than that of cubatureWeights");
638 
639  INTREPID2_TEST_FOR_EXCEPTION(cubatureWeights.rank() != 1, std::invalid_argument, "cubatureWeights container must have shape (P)");
640 
641  if (cellMeasure.numTensorComponents() == cubatureWeights.numTensorComponents() + 1)
642  {
643  // affine case; the first component should contain the cell volume divided by ref cell volume; this should be stored in jacobianDet
644  Kokkos::deep_copy(cellMeasure.getTensorComponent(0).getUnderlyingView1(), jacobianDet.getUnderlyingView1()); // copy point-invariant data from jacobianDet to the first tensor component of cell measure container
645  const int numTensorDimensions = cubatureWeights.numTensorComponents();
646  for (int i=1; i<numTensorDimensions+1; i++)
647  {
648  Kokkos::deep_copy(cellMeasure.getTensorComponent(i).getUnderlyingView1(), cubatureWeights.getTensorComponent(i-1).getUnderlyingView1());
649  }
650  }
651  else
652  {
653  auto detVaries = jacobianDet.getVariationTypes();
654 
655  const bool detCellVaries = detVaries[0] != CONSTANT;
656  const bool detPointVaries = detVaries[1] != CONSTANT;
657 
658  if (detCellVaries && detPointVaries)
659  {
660  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView2();
661  auto detData = jacobianDet.getUnderlyingView2();
662  const int numCells = detData.extent_int(0);
663  const int numPoints = detData.extent_int(1);
664  INTREPID2_TEST_FOR_EXCEPTION(numCells != cellMeasureData.extent_int(0), std::invalid_argument, "cellMeasureData doesn't match jacobianDet in cell dimension");
665  INTREPID2_TEST_FOR_EXCEPTION(numPoints != cellMeasureData.extent_int(1), std::invalid_argument, "cellMeasureData doesn't match jacobianDet in point dimension");
666 
667  // We implement this case as a functor (rather than a lambda) to work around an apparent CUDA 10.1.243 compiler bug
668  Impl::CellMeasureFunctor<PointScalar,spaceDim,DeviceType> cellMeasureFunctor(cellMeasureData, detData, cubatureWeights);
669 
670  using ExecutionSpace = typename DeviceType::execution_space;
671  Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<2>> rangePolicy({0,0},{numCells,numPoints});
672  Kokkos::parallel_for(rangePolicy, cellMeasureFunctor);
673  }
674  else if (detCellVaries && !detPointVaries)
675  {
676  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView2();
677  auto detData = jacobianDet.getUnderlyingView1();
678  using ExecutionSpace = typename DeviceType::execution_space;
679  Kokkos::parallel_for(
680  Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<2>>({0,0},{detData.extent_int(0),cubatureWeights.extent_int(0)}),
681  KOKKOS_LAMBDA (int cellOrdinal, int pointOrdinal) {
682  cellMeasureData(cellOrdinal,pointOrdinal) = detData(cellOrdinal) * cubatureWeights(pointOrdinal);
683  });
684  }
685  else
686  {
687  // constant jacobian det case
688  // cell measure data has shape (P)
689  auto cellMeasureData = cellMeasure.getTensorComponent(0).getUnderlyingView1();
690  auto detData = jacobianDet.getUnderlyingView1();
691  using ExecutionSpace = typename DeviceType::execution_space;
692  Kokkos::parallel_for(Kokkos::RangePolicy<ExecutionSpace>(0,cellMeasureData.extent_int(0)),
693  KOKKOS_LAMBDA (const int &pointOrdinal) {
694  cellMeasureData(pointOrdinal) = detData(0) * cubatureWeights(pointOrdinal);
695  });
696  }
697  }
698  }
699 
700  template<class PointScalar, int spaceDim, typename DeviceType>
701  typename CellGeometry<PointScalar,spaceDim,DeviceType>::BasisPtr
703  {
705  return HostMemberLookup::getBasis(this);
706  }
707 
708  template<class PointScalar, int spaceDim, typename DeviceType>
710  {
712  return HostMemberLookup::getCellTopology(this);
713  }
714 
715  template<class PointScalar, int spaceDim, typename DeviceType>
716  KOKKOS_INLINE_FUNCTION
717  ordinal_type CellGeometry<PointScalar,spaceDim,DeviceType>::cellToNode(ordinal_type cell, ordinal_type node) const
718  {
719  if (cellToNodes_.is_allocated())
720  {
721  return cellToNodes_(cell,node);
722  }
723  else if (cellGeometryType_ == UNIFORM_GRID)
724  {
725  const ordinal_type numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
726  const ordinal_type gridCell = cell / numSubdivisions;
727  const ordinal_type subdivisionOrdinal = cell % numSubdivisions;
728 
729  Kokkos::Array<ordinal_type,spaceDim> gridCellCoords;
730  ordinal_type gridCellDivided = gridCell;
731  ordinal_type numGridNodes = 1;
732  ordinal_type numGridCells = 1;
733  for (int d=0; d<spaceDim; d++)
734  {
735  gridCellCoords[d] = gridCellDivided % gridCellCounts_[d];
736  gridCellDivided = gridCellDivided / gridCellCounts_[d];
737  numGridCells *= gridCellCounts_[d];
738  numGridNodes *= (gridCellCounts_[d] + 1);
739  }
740 
741  const ordinal_type gridCellNode = gridCellNodeForSubdivisionNode(gridCell, subdivisionOrdinal, node);
742 
743  // most subdivision strategies don't add internal node(s), but a couple do. If so, the gridCellNode
744  // will be equal to the number of vertices in the grid cell.
745  const ordinal_type numInteriorNodes = ((subdivisionStrategy_ == FOUR_TRIANGLES) || (subdivisionStrategy_ == SIX_PYRAMIDS)) ? 1 : 0;
746 
747  // the global nodes list the grid-cell-interior nodes first.
748  const ordinal_type gridNodeNumberingOffset = numInteriorNodes * numGridCells;
749 
750  const ordinal_type numNodesPerGridCell = 1 << spaceDim;
751  if (gridCellNode >= numNodesPerGridCell)
752  {
753  const ordinal_type interiorGridNodeOffset = gridCellNode - numNodesPerGridCell;
754  return numNodesPerGridCell * gridCell + interiorGridNodeOffset;
755  }
756  else
757  {
758  // use gridCellCoords, plus hypercubeComponentNodeNumber(gridCellNode, d), to set up a Cartesian vertex numbering of the grid as a whole. Then, add gridNodeNumberingOffset to account for interior nodes.
759  // let x be the fastest-moving index
760  ordinal_type d_index_stride = 1; // number of node indices we move by moving 1 node in dimension d
761  ordinal_type cartesianNodeNumber = 0;
762  for (int d=0; d<spaceDim; d++)
763  {
764  const ordinal_type d_index = gridCellCoords[d] + hypercubeComponentNodeNumber(gridCellNode,d);
765  cartesianNodeNumber += d_index * d_index_stride;
766  d_index_stride *= (gridCellCounts_[d] + 1);
767  }
768  return cartesianNodeNumber + gridNodeNumberingOffset;
769  }
770  }
771  else
772  {
773  // cellToNode() not supported
774  return -1;
775  }
776  }
777 
778  template<class PointScalar, int spaceDim, typename DeviceType>
779  KOKKOS_INLINE_FUNCTION
781  {
782  if (cellGeometryType_ == UNIFORM_GRID)
783  {
784  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
785  if (numSubdivisions == 1)
786  {
787  return CONSTANT;
788  }
789  else
790  {
791  return MODULAR;
792  }
793  }
794  else return GENERAL;
795  }
796 
797  template<class PointScalar, int spaceDim, typename DeviceType>
799  {
800  Data<PointScalar,DeviceType> emptyRefData;
801  if (cellGeometryType_ == UNIFORM_GRID)
802  {
803  // no need for basis computations
804  return emptyRefData;
805  }
806  else if (cellGeometryType_ == TENSOR_GRID)
807  {
808  // no need for basis values
809  return emptyRefData;
810  }
811  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
812  {
813  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
814  if (simplex)
815  {
816  // no need for precomputed basis values
817  return emptyRefData;
818  }
819  else
820  {
821  auto basisForNodes = this->basisForNodes();
822 
823  if (affine_)
824  {
825  // no need for precomputed basis values
826  return emptyRefData;
827  }
828  else
829  {
830  // 2 use cases: (P,D) and (C,P,D).
831  // if (P,D), call the TensorPoints variant
832  if (points.rank() == 2)
833  {
834  TensorPoints<PointScalar,DeviceType> tensorPoints(points);
835  return getJacobianRefData(tensorPoints);
836  }
837  else
838  {
839  const int numCells = points.extent_int(0);
840  const int numPoints = points.extent_int(1);
841  const int numFields = basisForNodes->getCardinality();
842 
843  auto cellBasisGradientsView = getMatchingViewWithLabel(points, "CellGeometryProvider: cellBasisGradients", numCells, numFields, numPoints, spaceDim);
844  auto basisGradientsView = getMatchingViewWithLabel(points, "CellGeometryProvider: basisGradients", numFields, numPoints, spaceDim);
845 
846  for (int cellOrdinal=0; cellOrdinal<numCells; cellOrdinal++)
847  {
848  auto refPointsForCell = Kokkos::subview(points, cellOrdinal, Kokkos::ALL(), Kokkos::ALL());
849  basisForNodes->getValues(basisGradientsView, refPointsForCell, OPERATOR_GRAD);
850 
851  // At some (likely relatively small) memory cost, we copy the BasisGradients into an explicit (F,P,D) container.
852  // Given that we expect to reuse this for a non-trivial number of cell in the common use case, the extra memory
853  // cost is likely worth the increased flop count, etc. (One might want to revisit this in cases of high spaceDim
854  // and/or very high polynomial order.)
855 
856  using ExecutionSpace = typename DeviceType::execution_space;
857  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({0,0,0},{numFields,numPoints,spaceDim});
858 
859  Kokkos::parallel_for("copy basis gradients", policy,
860  KOKKOS_LAMBDA (const int &fieldOrdinal, const int &pointOrdinal, const int &d) {
861  cellBasisGradientsView(cellOrdinal,fieldOrdinal,pointOrdinal,d) = basisGradientsView(fieldOrdinal,pointOrdinal,d);
862  });
863  ExecutionSpace().fence();
864  }
865  Data<PointScalar,DeviceType> basisRefData(cellBasisGradientsView);
866  return basisRefData;
867  }
868  }
869  }
870  }
871  else
872  {
873  // TODO: handle the other cases
874  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
875  }
876  return emptyRefData;
877 
878 
879  }
880 
881  template<class PointScalar, int spaceDim, typename DeviceType>
883  {
884  Data<PointScalar,DeviceType> emptyRefData;
885  if (cellGeometryType_ == UNIFORM_GRID)
886  {
887  // no need for basis computations
888  return emptyRefData;
889  }
890  else if (cellGeometryType_ == TENSOR_GRID)
891  {
892  // no need for basis values
893  return emptyRefData;
894  }
895  else if ((cellGeometryType_ == FIRST_ORDER) || (cellGeometryType_ == HIGHER_ORDER))
896  {
897  const bool simplex = (spaceDim + 1 == cellToNodes_.extent_int(1));
898  if (simplex)
899  {
900  // no need for precomputed basis values
901  return emptyRefData;
902  }
903  else
904  {
905  auto basisForNodes = this->basisForNodes();
906 
907  if (affine_)
908  {
909  // no need for precomputed basis values
910  return emptyRefData;
911  }
912  else
913  {
914  auto basisGradients = basisForNodes->allocateBasisValues(points, OPERATOR_GRAD);
915  basisForNodes->getValues(basisGradients, points, OPERATOR_GRAD);
916 
917  int numPoints = points.extent_int(0);
918  int numFields = basisForNodes->getCardinality();
919 
920  // At some (likely relatively small) memory cost, we copy the BasisGradients into an explicit (F,P,D) container.
921  // Given that we expect to reuse this for a non-trivial number of cell in the common use case, the extra memory
922  // cost is likely worth the increased flop count, etc. (One might want to revisit this in cases of high spaceDim
923  // and/or very high polynomial order.)
924 
925  auto firstPointComponentView = points.getTensorComponent(0); // (P,D0)
926  auto basisGradientsView = getMatchingViewWithLabel(firstPointComponentView, "CellGeometryProvider: temporary basisGradients", numFields, numPoints, spaceDim);
927 
928  using ExecutionSpace = typename DeviceType::execution_space;
929  auto policy = Kokkos::MDRangePolicy<ExecutionSpace,Kokkos::Rank<3>>({0,0,0},{numFields,numPoints,spaceDim});
930 
931  Kokkos::parallel_for("copy basis gradients", policy,
932  KOKKOS_LAMBDA (const int &fieldOrdinal, const int &pointOrdinal, const int &d) {
933  basisGradientsView(fieldOrdinal,pointOrdinal,d) = basisGradients(fieldOrdinal,pointOrdinal,d);
934  });
935  ExecutionSpace().fence();
936 
937  Data<PointScalar,DeviceType> basisRefData(basisGradientsView);
938  return basisRefData;
939  }
940  }
941  }
942  else
943  {
944  // TODO: handle the other cases
945  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for this CellGeometryType is not yet implemented");
946  }
947  return emptyRefData;
948  }
949 
950  template<class PointScalar, int spaceDim, typename DeviceType>
951  KOKKOS_INLINE_FUNCTION
953  {
954  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
955  {
956  // note that Shards numbers nodes for quad counter-clockwise
957  // cube is tensor-product of the (x,y) quad with a z line segment
958  if (d==0)
959  {
960  if ((hypercubeNodeNumber % 4 == 1) || (hypercubeNodeNumber % 4 == 2))
961  return 1;
962  else
963  return 0;
964  }
965  else if (d==1)
966  {
967  if ((hypercubeNodeNumber % 4 == 2) || (hypercubeNodeNumber % 4 == 3))
968  return 1;
969  else
970  return 0;
971  }
972  }
973  // tensor formula coincides with shards formula for d ≥ 2
974  const int nodesForPriorDimensions = 1 << d;
975  if ((hypercubeNodeNumber / nodesForPriorDimensions) % 2 == 1)
976  return 1;
977  else
978  return 0;
979  }
980 
981  template<class PointScalar, int spaceDim, typename DeviceType>
983  {
984  using HostExecSpace = Kokkos::DefaultHostExecutionSpace;
985 
986  const bool isGridType = (cellGeometryType_ == TENSOR_GRID) || (cellGeometryType_ == UNIFORM_GRID);
987  const int numOrientations = isGridType ? numCellsPerGridCell(subdivisionStrategy_) : numCells();
988 
989  const int nodesPerCell = numNodesPerCell();
990 
991  ScalarView<Orientation, DeviceType> orientationsView("orientations", numOrientations);
992  auto orientationsHost = Kokkos::create_mirror_view(typename HostExecSpace::memory_space(), orientationsView);
993 
994  DataVariationType cellVariationType;
995 
996  if (isGridType)
997  {
998  // then there are as many distinct orientations possible as there are there are cells per grid cell
999  // fill cellNodesHost with sample nodes from grid cell 0
1000  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_); // can be up to 6
1001  ScalarView<PointScalar, HostExecSpace> cellNodesHost("cellNodesHost",numOrientations,nodesPerCell); // (C,N) -- where C = numOrientations
1002 
1003 #if defined(INTREPID2_COMPILE_DEVICE_CODE)
1004 #else
1006  const int gridCellOrdinal = 0;
1007  auto hostPolicy = Kokkos::MDRangePolicy<HostExecSpace,Kokkos::Rank<2>>({0,0},{numSubdivisions,nodesPerCell});
1008  Kokkos::parallel_for("fill cellNodesHost", hostPolicy,
1009  [this,gridCellOrdinal,cellNodesHost] (const int &subdivisionOrdinal, const int &nodeInCell) {
1010  auto node = this->gridCellNodeForSubdivisionNode(gridCellOrdinal, subdivisionOrdinal, nodeInCell);
1011  cellNodesHost(subdivisionOrdinal,nodeInCell) = node;
1012  });
1013 #endif
1014  cellVariationType = (numSubdivisions == 1) ? CONSTANT : MODULAR;
1015  OrientationTools<HostExecSpace>::getOrientation(orientationsHost,cellNodesHost,this->cellTopology());
1016  }
1017  else
1018  {
1019  cellVariationType = GENERAL;
1020  auto cellNodesHost = Kokkos::create_mirror_view_and_copy(typename HostExecSpace::memory_space(), cellToNodes_);
1021  OrientationTools<HostExecSpace>::getOrientation(orientationsHost,cellNodesHost,this->cellTopology());
1022  }
1023  Kokkos::deep_copy(orientationsView,orientationsHost);
1024 
1025  const int orientationsRank = 1; // shape (C)
1026  const Kokkos::Array<int,7> orientationExtents {static_cast<int>(numCells_),1,1,1,1,1,1};
1027  const Kokkos::Array<DataVariationType,7> orientationVariationTypes { cellVariationType, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT, CONSTANT};
1028  orientations_ = Data<Orientation,DeviceType>(orientationsView, orientationsRank, orientationExtents, orientationVariationTypes);
1029  }
1030 
1031  template<class PointScalar, int spaceDim, typename DeviceType>
1032  KOKKOS_INLINE_FUNCTION
1034  if (r == 0)
1035  {
1036  return numCells_;
1037  }
1038  else if (r == 1)
1039  {
1040  return numNodesPerCell_;
1041  }
1042  else if (r == 2)
1043  {
1044  return spaceDim;
1045  }
1046  else
1047  {
1048  return 1;
1049  }
1050  }
1051 
1052  template<class PointScalar, int spaceDim, typename DeviceType>
1053  template <typename iType>
1054  KOKKOS_INLINE_FUNCTION
1055  typename std::enable_if<std::is_integral<iType>::value, int>::type
1057  {
1058  return static_cast<int>(extent(r));
1059  }
1060 
1061  template<class PointScalar, int spaceDim, typename DeviceType>
1062  KOKKOS_INLINE_FUNCTION
1065  {
1066  return nodeOrdering_;
1067  }
1068 
1069  template<class PointScalar, int spaceDim, typename DeviceType>
1070  KOKKOS_INLINE_FUNCTION
1072  {
1073  return numCells_;
1074  }
1075 
1076  template<class PointScalar, int spaceDim, typename DeviceType>
1077  KOKKOS_INLINE_FUNCTION
1079  {
1080  if (cellGeometryType_ == UNIFORM_GRID)
1081  {
1082  return gridCellCounts_[dim];
1083  }
1084  else if (cellGeometryType_ == TENSOR_GRID)
1085  {
1086  return tensorVertices_.extent_int(dim);
1087  }
1088  else
1089  {
1090  return -1; // not valid for this cell geometry type
1091  }
1092  }
1093 
1094  template<class PointScalar, int spaceDim, typename DeviceType>
1095  KOKKOS_INLINE_FUNCTION
1097  {
1098  return numNodesPerCell_;
1099  }
1100 
1101  template<class PointScalar, int spaceDim, typename DeviceType>
1102  KOKKOS_INLINE_FUNCTION
1104  {
1105  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!orientations_.isValid(), std::invalid_argument, "orientations_ not initialized; call initializeOrientations() first");
1106  return orientations_(cellNumber);
1107  }
1108 
1109  template<class PointScalar, int spaceDim, typename DeviceType>
1111  {
1112  if (!orientations_.isValid())
1113  {
1114  initializeOrientations();
1115  }
1116  return orientations_;
1117  }
1118 
1119  template<class PointScalar, int spaceDim, typename DeviceType>
1120  KOKKOS_INLINE_FUNCTION
1121  PointScalar CellGeometry<PointScalar,spaceDim,DeviceType>::gridCellCoordinate(const int &gridCellOrdinal, const int &localNodeNumber, const int &dim) const
1122  {
1123  const int componentNode = hypercubeComponentNodeNumber(localNodeNumber, dim);
1124  int cellCountForPriorDimensions = 1;
1125  for (int d=0; d<dim; d++)
1126  {
1127  cellCountForPriorDimensions *= numCellsInDimension(d);
1128  }
1129  const int componentGridCellOrdinal = (gridCellOrdinal / cellCountForPriorDimensions) % numCellsInDimension(dim);
1130  const int vertexOrdinal = componentGridCellOrdinal + componentNode;
1131  if (cellGeometryType_ == UNIFORM_GRID)
1132  {
1133  return origin_[dim] + (vertexOrdinal * domainExtents_[dim]) / gridCellCounts_[dim];
1134  }
1135  else if (cellGeometryType_ == TENSOR_GRID)
1136  {
1137  Kokkos::Array<int,spaceDim> pointOrdinalComponents;
1138  for (int d=0; d<spaceDim; d++)
1139  {
1140  pointOrdinalComponents[d] = 0;
1141  }
1142  pointOrdinalComponents[dim] = vertexOrdinal;
1143  return tensorVertices_(pointOrdinalComponents,dim);
1144  }
1145  else
1146  {
1147  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported geometry type");
1148  return 0; // unreachable; here to avoid compiler warnings
1149  }
1150  }
1151 
1152  template<class PointScalar, int spaceDim, typename DeviceType>
1153  KOKKOS_INLINE_FUNCTION
1155  {
1156  return 3; // (C,N,D)
1157  }
1158 
1159  template<class PointScalar, int spaceDim, typename DeviceType>
1160  KOKKOS_INLINE_FUNCTION
1161  int CellGeometry<PointScalar,spaceDim,DeviceType>::gridCellNodeForSubdivisionNode(const int &gridCellOrdinal, const int &subdivisionOrdinal,
1162  const int &subdivisionNodeNumber) const
1163  {
1164  // TODO: do something to reuse the nodeLookup containers
1165  switch (subdivisionStrategy_)
1166  {
1167  case NO_SUBDIVISION:
1168  return subdivisionNodeNumber;
1169  case TWO_TRIANGLES_RIGHT:
1170  case TWO_TRIANGLES_LEFT:
1171  case FOUR_TRIANGLES:
1172  {
1173  Kokkos::Array<int,3> nodeLookup;
1174  if (subdivisionStrategy_ == TWO_TRIANGLES_RIGHT)
1175  {
1176  if (subdivisionOrdinal == 0)
1177  {
1178  // bottom-right cell: node numbers coincide with quad node numbers
1179  nodeLookup = {0,1,2};
1180  }
1181  else if (subdivisionOrdinal == 1)
1182  {
1183  // node 0 --> node 2
1184  // node 1 --> node 3
1185  // node 2 --> node 0
1186  nodeLookup = {2,3,0};
1187  }
1188  else
1189  {
1190  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported subdivision ordinal");
1191  }
1192  }
1193  else if (subdivisionStrategy_ == TWO_TRIANGLES_LEFT)
1194  {
1195  if (subdivisionOrdinal == 0)
1196  {
1197  // bottom-left cell:
1198  // node 0 --> node 3
1199  // node 1 --> node 0
1200  // node 2 --> node 1
1201  nodeLookup = {3,0,1};
1202  }
1203  else if (subdivisionOrdinal == 1)
1204  {
1205  // node 0 --> node 2
1206  // node 1 --> node 3
1207  // node 2 --> node 0
1208  nodeLookup = {2,3,0};
1209  }
1210  else
1211  {
1212  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Unsupported subdivision ordinal");
1213  }
1214  }
1215  else // FOUR_TRIANGLES
1216  {
1217  // counter-clockwise, bottom triangle first
1218  // bottom triangle goes:
1219  // 0 --> 1
1220  // 1 --> center
1221  // 2 --> 0
1222  // and similarly for the other triangles, proceeding counter-clockwise
1223  // node 1 always being the center, we special-case that
1224  if (subdivisionNodeNumber == 1)
1225  {
1226  // center coordinate: we call this node 4 in the quadrilateral
1227  return 4;
1228  }
1229  else
1230  {
1231  nodeLookup = {(subdivisionOrdinal + 1) % 4, -1, subdivisionOrdinal};
1232  }
1233  }
1234  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1235  return gridCellNodeNumber;
1236  }
1237  case FIVE_TETRAHEDRA:
1238  case SIX_TETRAHEDRA:
1239  {
1240  Kokkos::Array<int,4> nodeLookup;
1241  if (subdivisionStrategy_ == FIVE_TETRAHEDRA)
1242  {
1243  /*
1244  // to discretize a unit cube into 5 tetrahedra, we can take the four vertices
1245  // (1,1,1)
1246  // (0,0,1)
1247  // (0,1,0)
1248  // (1,0,0)
1249  // as an interior tetrahedron. Call this cell 0. The remaining 4 cells can be determined
1250  // by selecting three of the above points (there are exactly 4 such combinations) and then selecting
1251  // from the remaining four vertices of the cube the one nearest the plane defined by those three points.
1252  // The remaining four vertices are:
1253  // (0,0,0)
1254  // (1,1,0)
1255  // (1,0,1)
1256  // (0,1,1)
1257  // For each of these four, we pick one, and then take the three nearest vertices from cell 0 to form a new tetrahedron.
1258  // We enumerate as follows:
1259  // cell 0: (1,1,1), (0,0,1), (0,1,0), (1,0,0)
1260  // cell 1: (0,0,0), (1,0,0), (0,1,0), (0,0,1)
1261  // cell 2: (1,1,0), (1,1,1), (0,1,0), (1,0,0)
1262  // cell 3: (1,0,1), (1,1,1), (0,0,1), (1,0,0)
1263  // cell 4: (0,1,1), (1,1,1), (0,0,1), (0,1,0)
1264  */
1265  // tetrahedra are as follows:
1266  // 0: {1,3,4,6}
1267  // 1: {0,1,3,4}
1268  // 2: {1,2,3,6}
1269  // 3: {1,4,5,6}
1270  // 4: {3,4,6,7}
1271  switch (subdivisionOrdinal) {
1272  case 0:
1273  nodeLookup = {1,3,4,6};
1274  break;
1275  case 1:
1276  nodeLookup = {0,1,3,4};
1277  break;
1278  case 2:
1279  nodeLookup = {1,2,3,6};
1280  break;
1281  case 3:
1282  nodeLookup = {1,4,5,6};
1283  break;
1284  case 4:
1285  nodeLookup = {3,4,6,7};
1286  break;
1287  default:
1288  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "invalid subdivisionOrdinal");
1289  break;
1290  }
1291  }
1292  else if (subdivisionStrategy_ == SIX_TETRAHEDRA)
1293  {
1294  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "support for SIX_TETRAHEDRA not yet implemented");
1295  }
1296  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1297  return gridCellNodeNumber;
1298  }
1299  case SIX_PYRAMIDS:
1300  {
1301  Kokkos::Array<int,5> nodeLookup; // nodeLookup[4] = 8; // center
1302  // we order the subcell divisions as bottom, top, left, right, front, back
1303  if (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS)
1304  {
1305  switch (subdivisionOrdinal)
1306  {
1307  case 0: // bottom (min z)
1308  nodeLookup = {0,1,2,3,8};
1309  break;
1310  case 1: // top (max z)
1311  nodeLookup = {4,5,6,7,8};
1312  break;
1313  case 2: // left (min y)
1314  nodeLookup = {0,1,5,4,8};
1315  break;
1316  case 3: // right (max y)
1317  nodeLookup = {3,2,6,7,8};
1318  break;
1319  case 4: // front (max x)
1320  nodeLookup = {1,2,6,5,8};
1321  break;
1322  case 5: // back (min x)
1323  nodeLookup = {0,3,7,4,8};
1324  break;
1325  default:
1326  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Invalid subdivisionOrdinal!");
1327  }
1328  }
1329  else // tensor ordering
1330  {
1331  switch (subdivisionOrdinal)
1332  {
1333  case 0: // bottom (min z)
1334  nodeLookup = {0,1,3,2,8};
1335  break;
1336  case 1: // top (max z)
1337  nodeLookup = {4,5,7,6,8};
1338  break;
1339  case 2: // left (min y)
1340  nodeLookup = {0,1,5,4,8};
1341  break;
1342  case 3: // right (max y)
1343  nodeLookup = {2,3,7,6,8};
1344  break;
1345  case 4: // front (max x)
1346  nodeLookup = {1,3,7,5,8};
1347  break;
1348  case 5: // back (min x)
1349  nodeLookup = {0,2,6,4,8};
1350  break;
1351  default:
1352  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Invalid subdivisionOrdinal!");
1353  }
1354  }
1355  const int gridCellNodeNumber = nodeLookup[subdivisionNodeNumber];
1356  return gridCellNodeNumber;
1357  }
1358  default:
1359  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(true, std::invalid_argument, "Subdivision strategy not yet implemented!");
1360  // some compilers complain about missing return
1361  return 0; // statement should be unreachable...
1362  }
1363  }
1364 
1365  template<class PointScalar, int spaceDim, typename DeviceType>
1366  KOKKOS_INLINE_FUNCTION
1367  PointScalar CellGeometry<PointScalar,spaceDim,DeviceType>::subdivisionCoordinate(const int &gridCellOrdinal, const int &subdivisionOrdinal,
1368  const int &subdivisionNodeNumber, const int &d) const
1369  {
1370  int gridCellNode = gridCellNodeForSubdivisionNode(gridCellOrdinal, subdivisionOrdinal, subdivisionNodeNumber);
1371 
1372  if (subdivisionStrategy_ == FOUR_TRIANGLES)
1373  {
1374  // this is a case in which the gridCellNode may not actually be a node in the grid cell
1375  if (gridCellNode == 4) // center vertex
1376  {
1377  // d == 0 means quad vertices 0 and 1 suffice;
1378  // d == 1 means quad vertices 0 and 3 suffice
1379  const int gridVertex0 = 0;
1380  const int gridVertex1 = (d == 0) ? 1 : 3;
1381  return 0.5 * (gridCellCoordinate(gridCellOrdinal, gridVertex0, d) + gridCellCoordinate(gridCellOrdinal, gridVertex1, d));
1382  }
1383  }
1384  else if (subdivisionStrategy_ == SIX_PYRAMIDS)
1385  {
1386  // this is a case in which the gridCellNode may not actually be a node in the grid cell
1387  if (gridCellNode == 8) // center vertex
1388  {
1389  // can compute the center vertex coord in dim d by averaging diagonally opposite vertices'
1390  // coords in dimension d.
1391  const int gridVertex0 = 0; // 0 is diagonally opposite to 6 or 7, depending on nodeOrdering_
1392  const int gridVertex1 = (nodeOrdering_ == HYPERCUBE_NODE_ORDER_CLASSIC_SHARDS) ? 6 : 7;
1393  return 0.5 * (gridCellCoordinate(gridCellOrdinal, gridVertex0, d) + gridCellCoordinate(gridCellOrdinal, gridVertex1, d));
1394  }
1395  }
1396  return gridCellCoordinate(gridCellOrdinal, gridCellNode, d);
1397  }
1398 
1399  template<class PointScalar, int spaceDim, typename DeviceType>
1400  KOKKOS_INLINE_FUNCTION
1401  PointScalar
1402  CellGeometry<PointScalar,spaceDim,DeviceType>::operator()(const int& cell, const int& node, const int& dim) const {
1403  if ((cellGeometryType_ == UNIFORM_GRID) || (cellGeometryType_ == TENSOR_GRID))
1404  {
1405  const int numSubdivisions = numCellsPerGridCell(subdivisionStrategy_);
1406  if (numSubdivisions == 1)
1407  {
1408  // hypercube
1409  return gridCellCoordinate(cell, node, dim);
1410  }
1411  else
1412  {
1413  const int subdivisionOrdinal = cell % numSubdivisions;
1414  const int gridCellOrdinal = cell / numSubdivisions;
1415  return subdivisionCoordinate(gridCellOrdinal, subdivisionOrdinal, node, dim);
1416  }
1417  }
1418  else
1419  {
1420 #ifdef HAVE_INTREPID2_DEBUG
1421  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((cell < 0), std::invalid_argument, "cell out of bounds");
1422  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(static_cast<unsigned>(cell) > numCells_, std::invalid_argument, "cell out of bounds");
1423  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((node < 0), std::invalid_argument, "node out of bounds");
1424  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(static_cast<unsigned>(node) > numNodesPerCell_, std::invalid_argument, "node out of bounds");
1425  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE((dim < 0), std::invalid_argument, "dim out of bounds" );
1426  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(dim > spaceDim, std::invalid_argument, "dim out of bounds" );
1427 #endif
1428  if (cellToNodes_.is_allocated())
1429  {
1430  const int nodeNumber = cellToNodes_(cell,node);
1431  return nodes_(nodeNumber,dim);
1432  }
1433  else
1434  {
1435  return nodes_(cell,node,dim);
1436  }
1437  }
1438  }
1439 
1440  template<class PointScalar, int spaceDim, typename DeviceType>
1441  void CellGeometry<PointScalar,spaceDim,DeviceType>::orientations(ScalarView<Orientation,DeviceType> orientationsView, const int &startCell, const int &endCell)
1442  {
1443  auto orientationsData = getOrientations();
1444  const int numCellsWorkset = (endCell == -1) ? (numCells_ - startCell) : (endCell - startCell);
1445 
1446  using ExecutionSpace = typename DeviceType::execution_space;
1447  auto policy = Kokkos::RangePolicy<ExecutionSpace>(ExecutionSpace(),0,numCellsWorkset);
1448  Kokkos::parallel_for("copy orientations", policy, KOKKOS_LAMBDA(const int cellOrdinal)
1449  {
1450  orientationsView(cellOrdinal) = orientationsData(cellOrdinal);
1451  });
1452  ExecutionSpace().fence();
1453  }
1454 
1455  template<class PointScalar, int spaceDim, typename DeviceType>
1456  KOKKOS_INLINE_FUNCTION
1458  {
1459  if (cellGeometryType_ == UNIFORM_GRID)
1460  {
1461  return numCellsPerGridCell(subdivisionStrategy_);
1462  }
1463  else
1464  {
1465  return numCells_;
1466  }
1467  }
1468 
1469  template<class PointScalar, int spaceDim, typename DeviceType>
1471  {
1472  const int pointsPerCell = points.extent_int(0);
1473  return allocateJacobianDataPrivate(points.getTensorComponent(0),pointsPerCell,startCell,endCell);
1474  }
1475 
1476  template<class PointScalar, int spaceDim, typename DeviceType>
1477  Data<PointScalar,DeviceType> CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianData(const ScalarView<PointScalar,DeviceType> &points, const int startCell, const int endCell) const
1478  {
1479  // if points is rank 3, it has shape (C,P,D). If it's rank 2, (P,D).
1480  const int pointDimension = (points.rank() == 3) ? 1 : 0;
1481  const int pointsPerCell = points.extent_int(pointDimension);
1482  return allocateJacobianDataPrivate(points,pointsPerCell,startCell,endCell);
1483  }
1484 
1485  template<class PointScalar, int spaceDim, typename DeviceType>
1486  Data<PointScalar,DeviceType> CellGeometry<PointScalar,spaceDim,DeviceType>::allocateJacobianData(const int &numPoints, const int startCell, const int endCell) const
1487  {
1488  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!affine_, std::invalid_argument, "this version of allocateJacobianData() is only supported for affine CellGeometry");
1489 
1490  ScalarView<PointScalar,DeviceType> emptyPoints;
1491  return allocateJacobianDataPrivate(emptyPoints,numPoints,startCell,endCell);
1492  }
1493 
1494  template<class PointScalar, int spaceDim, typename DeviceType>
1496  const Data<PointScalar,DeviceType> &refData, const int startCell, const int endCell) const
1497  {
1498  const int pointsPerCell = points.extent_int(0);
1499  setJacobianDataPrivate(jacobianData,pointsPerCell,refData,startCell,endCell);
1500  }
1501 
1502  template<class PointScalar, int spaceDim, typename DeviceType>
1503  void CellGeometry<PointScalar,spaceDim,DeviceType>::setJacobian(Data<PointScalar,DeviceType> &jacobianData, const ScalarView<PointScalar,DeviceType> &points,
1504  const Data<PointScalar,DeviceType> &refData, const int startCell, const int endCell) const
1505  {
1506  // if points is rank 3, it has shape (C,P,D). If it's rank 2, (P,D).
1507  const int pointDimension = (points.rank() == 3) ? 1 : 0;
1508  const int pointsPerCell = points.extent_int(pointDimension);
1509  setJacobianDataPrivate(jacobianData,pointsPerCell,refData,startCell,endCell);
1510  }
1511 
1512  template<class PointScalar, int spaceDim, typename DeviceType>
1513  void CellGeometry<PointScalar,spaceDim,DeviceType>::setJacobian(Data<PointScalar,DeviceType> &jacobianData, const int &numPoints, const int startCell, const int endCell) const
1514  {
1515  INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(!affine_, std::invalid_argument, "this version of setJacobian() is only supported for affine CellGeometry");
1516 
1517  Data<PointScalar,DeviceType> emptyRefData;
1518  setJacobianDataPrivate(jacobianData,numPoints,emptyRefData,startCell,endCell);
1519  }
1520 } // namespace Intrepid2
1521 
1522 #endif /* Intrepid2_CellGeometryDef_h */
void setJacobianDataPrivate(Data< PointScalar, DeviceType > &jacobianData, const int &pointsPerCell, const Data< PointScalar, DeviceType > &refData, const int startCell, const int endCell) const
Notionally-private method that provides a common interface for multiple public-facing setJacobianData...
Implementation of the default H(grad)-compatible FEM basis of degree 1 on Quadrilateral cell...
geometry expressible in terms of vertices of the cell
BasisPtr basisForNodes() const
H^1 Basis used in the reference-to-physical transformation. Linear for straight-edged geometry; highe...
KOKKOS_INLINE_FUNCTION int numCellsPerGridCell(SubdivisionStrategy subdivisionStrategy) const
Helper method that returns the number of cells into which each grid cell will be subdivided based on ...
Data< PointScalar, DeviceType > allocateJacobianDataPrivate(const ScalarView< PointScalar, DeviceType > &pointComponentView, const int &pointsPerCell, const int startCell, const int endCell) const
Notionally-private method that provides a common interface for multiple public-facing allocateJacobia...
KOKKOS_INLINE_FUNCTION PointScalar subdivisionCoordinate(const int &gridCellOrdinal, const int &subdivisionOrdinal, const int &subdivisionNodeNumber, const int &d) const
returns coordinate in dimension d for the indicated subdivision of the indicated grid cell ...
CellGeometry provides the nodes for a set of cells; has options that support efficient definition of ...
Functor for full (C,P) Jacobian determinant container. CUDA compiler issues led us to avoid lambdas f...
View-like interface to tensor points; point components are stored separately; the appropriate coordin...
#define INTREPID2_TEST_FOR_EXCEPTION_DEVICE_SAFE(test, x, msg)
const shards::CellTopology & cellTopology() const
The shards CellTopology for each cell within the CellGeometry object. Note that this is always a lowe...
KOKKOS_INLINE_FUNCTION ordinal_type cellToNode(ordinal_type cellIndex, ordinal_type cellNodeNumber) const
Returns a global node number corresponding to the specified (cell, node) combination. If uniform grid (possibly subdivided), this number is computed dynamically; for more general meshes, this simply returns the result of a lookup in the cellToNodes container provided at construction.
KOKKOS_INLINE_FUNCTION PointScalar operator()(const int &cell, const int &node, const int &dim) const
Return the coordinate (weight) of the specified node. For straight-edged geometry, this is simply the physical coordinate of the vertex. For all geometries, this can be understood as a weight on the corresponding H^1 basis function used in the reference-to-physical map.
An abstract base class that defines interface for concrete basis implementations for Finite Element (...
void orientations(ScalarView< Orientation, DeviceType > orientationsView, const int &startCell=0, const int &endCell=-1)
Fills the provided container with the orientations for the specified cell range. Calls getOrientation...
CellGeometry(const Kokkos::Array< PointScalar, spaceDim > &origin, const Kokkos::Array< PointScalar, spaceDim > &domainExtents, const Kokkos::Array< int, spaceDim > &gridCellCounts, SubdivisionStrategy subdivisionStrategy=NO_SUBDIVISION, HypercubeNodeOrdering nodeOrdering=HYPERCUBE_NODE_ORDER_TENSOR)
Uniform grid constructor, with optional subdivision into simplices.
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar *, DeviceType > & getUnderlyingView1() const
returns the View that stores the unique data. For rank-1 underlying containers.
KOKKOS_INLINE_FUNCTION unsigned rank() const
Returns the logical rank of this container. This is always 3.
KOKKOS_INLINE_FUNCTION const Kokkos::Array< DataVariationType, 7 > & getVariationTypes() const
Returns an array with the variation types in each logical dimension.
A stateless class for operations on cell data. Provides methods for:
KOKKOS_INLINE_FUNCTION int hypercubeComponentNodeNumber(int hypercubeNodeNumber, int d) const
For hypercube vertex number hypercubeNodeNumber, returns the component node number in specified dimen...
Data< PointScalar, DeviceType > allocateJacobianData(const TensorPoints< PointScalar, DeviceType > &points, const int startCell=0, const int endCell=-1) const
Allocate a container into which Jacobians of the reference-to-physical mapping can be placed...
KOKKOS_INLINE_FUNCTION enable_if_t< rank==1, const Kokkos::View< typename RankExpander< DataScalar, rank >::value_type, DeviceType > & > getUnderlyingView() const
Returns the underlying view. Throws an exception if the underlying view is not rank 1...
KOKKOS_INLINE_FUNCTION bool affine() const
Returns true if Jacobian is constant within each cell.
Wrapper around a Kokkos::View that allows data that is constant or repeating in various logical dimen...
KOKKOS_INLINE_FUNCTION const Data< Scalar, DeviceType > & getTensorComponent(const ordinal_type &r) const
Returns the requested tensor component.
KOKKOS_INLINE_FUNCTION HypercubeNodeOrdering nodeOrderingForHypercubes() const
Returns the node ordering used for hypercubes.
KOKKOS_INLINE_FUNCTION int gridCellNodeForSubdivisionNode(const int &gridCellOrdinal, const int &subdivisionOrdinal, const int &subdivisionNodeNumber) const
returns coordinate in dimension d for the indicated subdivision of the indicated grid cell ...
TensorData< PointScalar, DeviceType > allocateCellMeasure(const Data< PointScalar, DeviceType > &jacobianDet, const TensorData< PointScalar, DeviceType > &cubatureWeights) const
Allocate a TensorData object appropriate for passing to computeCellMeasure().
KOKKOS_INLINE_FUNCTION ordinal_type rank() const
Returns the rank of the container.
KOKKOS_INLINE_FUNCTION PointScalar gridCellCoordinate(const int &gridCellOrdinal, const int &localNodeNumber, const int &dim) const
returns coordinate in dimension dim of the indicated node in the indicated grid cell ...
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, int >::type extent_int(const iType &r) const
Returns the logical extent of the container in the specified dimension as an int; the shape of CellGe...
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar ***, DeviceType > & getUnderlyingView3() const
returns the View that stores the unique data. For rank-3 underlying containers.
KOKKOS_INLINE_FUNCTION constexpr bool isValid() const
returns true for containers that have data; false for those that don&#39;t (namely, those that have been ...
KOKKOS_INLINE_FUNCTION int uniformJacobianModulus() const
Returns an integer indicating the number of distinct cell types vis-a-vis Jacobians.
Data< PointScalar, DeviceType > getJacobianRefData(const ScalarView< PointScalar, DeviceType > &points) const
Computes reference-space data for the specified points, to be used in setJacobian().
Orientation encoding and decoding.
Store host-only &quot;members&quot; of CellGeometry using a static map indexed on the CellGeometry pointer...
geometry expressible in terms of a higher-order basis (must be specified)
void initializeOrientations()
Initialize the internal orientations_ member with the orientations of each member cell...
Data< Orientation, DeviceType > getOrientations()
Returns the orientations for all cells. Calls initializeOrientations() if it has not previously been ...
KOKKOS_INLINE_FUNCTION Orientation getOrientation(int &cellNumber) const
Returns the orientation for the specified cell. Requires that initializeOrientations() has been calle...
KOKKOS_INLINE_FUNCTION DataVariationType cellVariationType() const
static void setJacobian(JacobianViewType jacobian, const PointViewType points, const WorksetType worksetCell, const Teuchos::RCP< HGradBasisType > basis, const int startCell=0, const int endCell=-1)
Computes the Jacobian matrix DF of the reference-to-physical frame map F.
A family of nodal basis functions which is related to, but not identical with, the Lagrangian basis f...
KOKKOS_INLINE_FUNCTION int numNodesPerCell() const
Returns the number of nodes per cell; may be more than the number of vertices in the corresponding Ce...
KOKKOS_INLINE_FUNCTION int numCellsInDimension(const int &dim) const
For uniform grid and tensor grid CellGeometry, returns the number of cells in the specified component...
void setJacobian(Data< PointScalar, DeviceType > &jacobianData, const TensorPoints< PointScalar, DeviceType > &points, const Data< PointScalar, DeviceType > &refData, const int startCell=0, const int endCell=-1) const
Compute Jacobian values for the reference-to-physical transformation, and place them in the provided ...
KOKKOS_INLINE_FUNCTION unsigned rank() const
Returns the logical rank of the Data container.
KOKKOS_INLINE_FUNCTION int extent_int(const int &r) const
Returns the logical extent in the specified dimension.
static void getOrientation(Kokkos::DynRankView< elemOrtValueType, elemOrtProperties...> elemOrts, const Kokkos::DynRankView< elemNodeValueType, elemNodeProperties...> elemNodes, const shards::CellTopology cellTopo, bool isSide=false)
Compute orientations of cells in a workset.
KOKKOS_INLINE_FUNCTION int getDataExtent(const ordinal_type &d) const
returns the true extent of the data corresponding to the logical dimension provided; if the data does...
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, ordinal_type >::type extent_int(const iType &d) const
Returns the logical extent in the requested dimension.
KOKKOS_INLINE_FUNCTION int numCells() const
Returns the number of cells.
KOKKOS_INLINE_FUNCTION ~CellGeometry()
Destructor.
KOKKOS_INLINE_FUNCTION const Kokkos::View< DataScalar **, DeviceType > & getUnderlyingView2() const
returns the View that stores the unique data. For rank-2 underlying containers.
Implementation of the default H(grad)-compatible FEM basis of degree 1 on Hexahedron cell...
KOKKOS_INLINE_FUNCTION size_t extent(const int &r) const
Returns the logical extent of the container in the specified dimension; the shape of CellGeometry is ...
KOKKOS_INLINE_FUNCTION ordinal_type numTensorComponents() const
Return the number of tensorial components.
KOKKOS_INLINE_FUNCTION ScalarView< PointScalar, DeviceType > getTensorComponent(const ordinal_type &r) const
Returns the requested tensor component.
KOKKOS_INLINE_FUNCTION std::enable_if< std::is_integral< iType >::value, int >::type extent_int(const iType &r) const
Returns the logical extent in the requested dimension.
void computeCellMeasure(TensorData< PointScalar, DeviceType > &cellMeasure, const Data< PointScalar, DeviceType > &jacobianDet, const TensorData< PointScalar, DeviceType > &cubatureWeights) const
Compute cell measures that correspond to provided Jacobian determinants and.