Thyra  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
Thyra_EpetraThyraWrappers.cpp
1 // @HEADER
2 // *****************************************************************************
3 // Thyra: Interfaces and Support for Abstract Numerical Algorithms
4 //
5 // Copyright 2004 NTESS and the Thyra contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
11 #include "Thyra_DefaultSpmdVectorSpace.hpp"
12 #include "Thyra_DefaultSpmdMultiVector.hpp"
13 #include "Thyra_DefaultSpmdVector.hpp"
14 #include "Thyra_DetachedVectorView.hpp"
15 #include "Thyra_DetachedMultiVectorView.hpp"
16 #include "Thyra_VectorSpaceFactoryBase.hpp"
17 #include "Thyra_ProductVectorSpaceBase.hpp"
18 #include "Teuchos_Assert.hpp"
19 #include "Teuchos_dyn_cast.hpp"
20 
21 #include "Teuchos_DefaultSerialComm.hpp"
22 #ifdef HAVE_MPI
23 #include "Teuchos_DefaultMpiComm.hpp"
24 #endif
25 
26 #include "Epetra_Map.h"
27 #include "Epetra_Comm.h"
28 #include "Epetra_SerialComm.h"
29 #ifdef HAVE_MPI
30 # include "Epetra_MpiComm.h"
31 #endif
32 #include "Epetra_MultiVector.h"
33 #include "Epetra_Vector.h"
34 
35 //
36 // Helpers
37 //
38 
39 
40 namespace {
41 
42 
44 unwrapSingleProductVectorSpace(
45  const Teuchos::RCP<const Thyra::VectorSpaceBase<double> > &vs_in
46  )
47 {
48  using Teuchos::RCP;
49  using Teuchos::rcp_dynamic_cast;
51  const RCP<const ProductVectorSpaceBase<double> > pvs =
52  rcp_dynamic_cast<const ProductVectorSpaceBase<double> >(vs_in);
53  if (nonnull(pvs)) {
54  TEUCHOS_ASSERT_EQUALITY( pvs->numBlocks(), 1 );
55  return pvs->getBlock(0);
56  }
57  return vs_in;
58 }
59 
60 
61 } // namespace
62 
63 
64 //
65 // Implementations of user function
66 //
67 
68 
71 {
72  using Teuchos::rcp;
73  using Teuchos::rcp_dynamic_cast;
74  using Teuchos::set_extra_data;
75 
77  serialEpetraComm = rcp_dynamic_cast<const Epetra_SerialComm>(epetraComm);
78  if( serialEpetraComm.get() ) {
80  serialComm = rcp(new Teuchos::SerialComm<Ordinal>());
81  set_extra_data( serialEpetraComm, "serialEpetraComm", Teuchos::inOutArg(serialComm) );
82  return serialComm;
83  }
84 
85 #ifdef HAVE_MPI
86 
88  mpiEpetraComm = rcp_dynamic_cast<const Epetra_MpiComm>(epetraComm);
89  if( mpiEpetraComm.get() ) {
91  rawMpiComm = Teuchos::opaqueWrapper(mpiEpetraComm->Comm());
92  set_extra_data( mpiEpetraComm, "mpiEpetraComm", Teuchos::inOutArg(rawMpiComm) );
94  mpiComm = rcp(new Teuchos::MpiComm<Ordinal>(rawMpiComm));
95  return mpiComm;
96  }
97 
98 #endif // HAVE_MPI
99 
100  // If you get here then the failed!
101  return Teuchos::null;
102 
103 }
104 
105 
108  const RCP<const Epetra_Map> &epetra_map
109  )
110 {
111  using Teuchos::as; using Teuchos::inoutArg;
112 
113 #ifdef TEUCHOS_DEBUG
115  !epetra_map.get(), std::invalid_argument,
116  "create_VectorSpace::initialize(...): Error!" );
117 #endif // TEUCHOS_DEBUG
118 
120  create_Comm(Teuchos::rcpFromRef(epetra_map->Comm())).assert_not_null();
121 
122  Teuchos::set_extra_data(epetra_map, "epetra_map", inoutArg(comm));
123  const Ordinal localSubDim = epetra_map->NumMyElements();
124 
126  defaultSpmdVectorSpace<double>(
127  comm, localSubDim, epetra_map->NumGlobalElements64(),
128  !epetra_map->DistributedGlobal());
129 
130  TEUCHOS_ASSERT_EQUALITY(vs->dim(), as<Ordinal>(epetra_map->NumGlobalElements64()));
131  // NOTE: the above assert just checks to make sure that the size of the
132  // Ordinal type can hold the size returned from NumGlobalElemenets64(). A
133  // 64 bit system will always have Ordinal=ptrdiff_t by default which will
134  // always be 64 bit so this should be fine. However, if Ordinal were
135  // defined to only be 32 bit and then this exception could trigger. Because
136  // this assert will only likely trigger in a non-debug build, we will leave
137  // the assert unguarded since it is very cheap to perform.
138  Teuchos::set_extra_data( epetra_map, "epetra_map", inoutArg(vs) );
139 
140  return vs;
141 }
142 
143 
146  const RCP<const VectorSpaceBase<double> > &parentSpace,
147  const int dim
148  )
149 {
150  using Teuchos::rcp_dynamic_cast;
151 #ifdef TEUCHOS_DEBUG
152  TEUCHOS_TEST_FOR_EXCEPT(parentSpace.get()==NULL);
153  Teuchos::dyn_cast<const SpmdVectorSpaceBase<double> >(*parentSpace);
154  TEUCHOS_TEST_FOR_EXCEPT(dim <= 0);
155 #endif
156  return parentSpace->smallVecSpcFcty()->createVecSpc(dim);
157 }
158 
159 
162  const RCP<Epetra_Vector> &epetra_v,
163  const RCP<const VectorSpaceBase<double> > &space_in
164  )
165 {
166 #ifdef TEUCHOS_DEBUG
167  TEUCHOS_TEST_FOR_EXCEPT(space_in.get()==NULL);
168 #endif
170  space = Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
171  space_in,true);
172  // mfh 06 Dec 2017: This return should not trigger an issue like
173  // #1941, because if epetra_v is NULL on some process but not
174  // others, then that process should not be participating in
175  // collectives with the other processes anyway.
176  if(!epetra_v.get())
177  return Teuchos::null;
178  // New local view of raw data
179  double *localValues;
180  epetra_v->ExtractView( &localValues );
181  // Build the Vector
183  v = Teuchos::rcp(
185  space,
186  Teuchos::arcp(localValues,0,epetra_v->Map().NumMyElements(),false),
187  1
188  )
189  );
190  Teuchos::set_extra_data<RCP<Epetra_Vector> >( epetra_v, "Epetra_Vector",
191  Teuchos::inOutArg(v) );
192  return v;
193 }
194 
195 
198  const RCP<const Epetra_Vector> &epetra_v,
199  const RCP<const VectorSpaceBase<double> > &space_in
200  )
201 {
202 #ifdef TEUCHOS_DEBUG
203  TEUCHOS_TEST_FOR_EXCEPT(space_in.get()==NULL);
204 #endif
206  space = Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
207  space_in,true);
208  // mfh 06 Dec 2017: This return should not trigger an issue like
209  // #1941, because if epetra_v is NULL on some process but not
210  // others, then that process should not be participating in
211  // collectives with the other processes anyway.
212  if(!epetra_v.get())
213  return Teuchos::null;
214  // New local view of raw data
215  double *localValues;
216  epetra_v->ExtractView( &localValues );
217  // Build the Vector
219  v = Teuchos::rcp(
221  space,
222  Teuchos::arcp(localValues,0,epetra_v->Map().NumMyElements(),false),
223  1
224  )
225  );
226  Teuchos::set_extra_data<RCP<const Epetra_Vector> >( epetra_v, "Epetra_Vector",
227  Teuchos::inOutArg(v) );
228  return v;
229 }
230 
231 
234  const RCP<Epetra_MultiVector> &epetra_mv,
235  const RCP<const VectorSpaceBase<double> > &range_in,
236  const RCP<const VectorSpaceBase<double> > &domain_in
237  )
238 {
239  using Teuchos::rcp_dynamic_cast;
240 #ifdef TEUCHOS_DEBUG
241  TEUCHOS_TEST_FOR_EXCEPT(range_in.get()==NULL);
242 #endif
244  Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
245  unwrapSingleProductVectorSpace(range_in),
246  true
247  );
249  Teuchos::rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
250  unwrapSingleProductVectorSpace(domain_in),
251  true
252  );
253  // mfh 06 Dec 2017: This return should not trigger an issue like
254  // #1941, because if epetra_mv is NULL on some process but not
255  // others, then that process should not be participating in
256  // collectives with the other processes anyway.
257  if (!epetra_mv.get() )
258  return Teuchos::null;
259  if ( is_null(domain) ) {
260  domain = rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
261  create_LocallyReplicatedVectorSpace(range,epetra_mv->NumVectors())
262  );
263  }
264  // New local view of raw data
265  double *localValues; int leadingDim;
266  if( epetra_mv->ConstantStride() ) {
267  epetra_mv->ExtractView( &localValues, &leadingDim );
268  }
269  else {
270  TEUCHOS_TEST_FOR_EXCEPT(true); // ToDo: Implement views of non-contiguous mult-vectors!
271  }
272  // Build the MultiVector
274  mv = Teuchos::rcp(
276  range,
277  domain,
278  Teuchos::arcp(localValues,0,leadingDim*epetra_mv->NumVectors(),false),
279  leadingDim
280  )
281  );
282  Teuchos::set_extra_data<RCP<Epetra_MultiVector> >(
283  epetra_mv, "Epetra_MultiVector", Teuchos::inOutArg(mv) );
284  return mv;
285 }
286 
287 
290  const RCP<const Epetra_MultiVector> &epetra_mv,
291  const RCP<const VectorSpaceBase<double> > &range_in,
292  const RCP<const VectorSpaceBase<double> > &domain_in
293  )
294 {
295  using Teuchos::rcp_dynamic_cast;
296 #ifdef TEUCHOS_DEBUG
297  TEUCHOS_TEST_FOR_EXCEPT(range_in.get()==NULL);
298 #endif
300  Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
301  unwrapSingleProductVectorSpace(range_in),
302  true
303  );
305  Teuchos::rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
306  unwrapSingleProductVectorSpace(domain_in),
307  true
308  );
309  // mfh 06 Dec 2017: This return should not trigger an issue like
310  // #1941, because if epetra_mv is NULL on some process but not
311  // others, then that process should not be participating in
312  // collectives with the other processes anyway.
313  if (!epetra_mv.get())
314  return Teuchos::null;
315  if ( is_null(domain) ) {
316  domain = rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
317  create_LocallyReplicatedVectorSpace(range,epetra_mv->NumVectors())
318  );
319  }
320  // New local view of raw data
321  double *localValues; int leadingDim;
322  if( epetra_mv->ConstantStride() ) {
323  epetra_mv->ExtractView( &localValues, &leadingDim );
324  }
325  else {
326  TEUCHOS_TEST_FOR_EXCEPT(true); // ToDo: Implement views of non-contiguous mult-vectors!
327  }
328  // Build the MultiVector
330  mv = Teuchos::rcp(
332  range,
333  domain,
334  Teuchos::arcp(localValues,0,leadingDim*epetra_mv->NumVectors(),false),
335  leadingDim
336  )
337  );
338  Teuchos::set_extra_data<RCP<const Epetra_MultiVector> >(
339  epetra_mv, "Epetra_MultiVector", Teuchos::inOutArg(mv) );
340  return mv;
341 }
342 
343 
346 {
347 
348  using Teuchos::rcp;
349  using Teuchos::ptrFromRef;
350  using Teuchos::ptr_dynamic_cast;
351  using Teuchos::SerialComm;
352 #ifdef HAVE_MPI
353  using Teuchos::MpiComm;
354 #endif
355 
356  const Ptr<const Teuchos::Comm<Ordinal> > comm = Teuchos::ptrFromRef(comm_in);
357 
358  const Ptr<const SerialComm<Ordinal> > serialComm =
359  ptr_dynamic_cast<const SerialComm<Ordinal> >(comm);
360 
361  RCP<const Epetra_Comm> epetraComm;
362 
363 #ifdef HAVE_MPI
364 
365  const Ptr<const MpiComm<Ordinal> > mpiComm =
366  ptr_dynamic_cast<const MpiComm<Ordinal> >(comm);
367 
368  TEUCHOS_TEST_FOR_EXCEPTION(is_null(mpiComm) && is_null(serialComm),
369  std::runtime_error,
370  "SPMD std::vector space has a communicator that is "
371  "neither a serial comm nor an MPI comm");
372 
373  if (nonnull(mpiComm)) {
374  epetraComm = rcp(new Epetra_MpiComm(*mpiComm->getRawMpiComm()()));
375  }
376  else {
377  epetraComm = rcp(new Epetra_SerialComm());
378  }
379 
380 #else
381 
382  TEUCHOS_TEST_FOR_EXCEPTION(is_null(serialComm), std::runtime_error,
383  "SPMD std::vector space has a communicator that is "
384  "neither a serial comm nor an MPI comm");
385 
386  epetraComm = rcp(new Epetra_SerialComm());
387 
388 #endif
389 
390  TEUCHOS_TEST_FOR_EXCEPTION(is_null(epetraComm), std::runtime_error,
391  "null communicator created");
392 
393  return epetraComm;
394 
395 }
396 
397 
400  const VectorSpaceBase<double>& vs_in,
401  const RCP<const Epetra_Comm>& comm)
402 {
403 
404  using Teuchos::rcpFromRef;
405  using Teuchos::rcpFromPtr;
406  using Teuchos::rcp_dynamic_cast;
407  using Teuchos::ptrFromRef;
408  using Teuchos::ptr_dynamic_cast;
409 
410  const Ptr<const VectorSpaceBase<double> > vs_ptr = ptrFromRef(vs_in);
411 
412  const Ptr<const SpmdVectorSpaceBase<double> > spmd_vs =
413  ptr_dynamic_cast<const SpmdVectorSpaceBase<double> >(vs_ptr);
414 
416  ptr_dynamic_cast<const ProductVectorSpaceBase<double> >(vs_ptr);
417 
418  TEUCHOS_TEST_FOR_EXCEPTION( is_null(spmd_vs) && is_null(prod_vs), std::logic_error,
419  "Error, the concrete VectorSpaceBase object of type "
420  +Teuchos::demangleName(typeid(vs_in).name())+" does not support the"
421  " SpmdVectorSpaceBase or the ProductVectorSpaceBase interfaces!" );
422 
423  const int numBlocks = (nonnull(prod_vs) ? prod_vs->numBlocks() : 1);
424 
425  // Get an array of SpmdVectorBase objects for the blocks
426 
428  if (nonnull(prod_vs)) {
429  for (int block_i = 0; block_i < numBlocks; ++block_i) {
430  const RCP<const SpmdVectorSpaceBase<double> > spmd_vs_i =
431  rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
432  prod_vs->getBlock(block_i), true);
433  spmd_vs_blocks.push_back(spmd_vs_i);
434  }
435  }
436  else {
437  spmd_vs_blocks.push_back(rcpFromPtr(spmd_vs));
438  }
439 
440  // Find the number of local elements, summed over all blocks
441 
442  int myLocalElements = 0;
443  for (int block_i = 0; block_i < numBlocks; ++block_i) {
444  myLocalElements += spmd_vs_blocks[block_i]->localSubDim();
445  }
446 
447  // Find the GIDs owned by this processor, taken from all blocks
448 
449  int count=0;
450  int blockOffset = 0;
451 #ifndef EPETRA_NO_32BIT_GLOBAL_INDICES
452  Array<int> myGIDs(myLocalElements);
453 #else
454  Array<long long> myGIDs(myLocalElements);
455 #endif
456  for (int block_i = 0; block_i < numBlocks; ++block_i) {
457  const RCP<const SpmdVectorSpaceBase<double> > spmd_vs_i = spmd_vs_blocks[block_i];
458  const int lowGIDInBlock = spmd_vs_i->localOffset();
459  const int numLocalElementsInBlock = spmd_vs_i->localSubDim();
460  for (int i=0; i < numLocalElementsInBlock; ++i, ++count) {
461  myGIDs[count] = blockOffset + lowGIDInBlock + i;
462  }
463  blockOffset += spmd_vs_i->dim();
464  }
465 
466 #ifndef EPETRA_NO_32BIT_GLOBAL_INDICES
467  const int globalDim = vs_in.dim();
468 #else
469  const long long globalDim = vs_in.dim();
470 #endif
471 
472  return Teuchos::rcp(
473  new Epetra_Map(globalDim, myLocalElements, myGIDs.getRawPtr(), 0, *comm));
474 
475 }
476 
477 // Almost like the above one, but working on an RCP vs as input, we can check for the
478 // presence of RCP<const Epetra_Map> in the RCP extra data, to save us time.
481  const RCP<const VectorSpaceBase<double>>& vs,
482  const RCP<const Epetra_Comm>& comm)
483 {
484  //
485  // First, try to grab the Epetra_Map straight out of the
486  // RCP since this is the fastest way.
487  //
488  const Ptr<const RCP<const Epetra_Map> >
489  epetra_map_ptr = Teuchos::get_optional_extra_data<RCP<const Epetra_Map> >(
490  vs,"epetra_map");
491  // mfh 06 Dec 2017: This should be consistent over all processes
492  // that participate in v's communicator.
493  if(!is_null(epetra_map_ptr)) {
494  return *epetra_map_ptr;
495  }
496 
497  // No luck. We need to call get_Epetra_Map(*vs,comm).
498  TEUCHOS_TEST_FOR_EXCEPTION(comm.is_null(), std::runtime_error,
499  "Error! No RCP Epetra_Map attached to the input vector space RCP, "
500  "and the input comm RCP is null.\n");
501 
502  return get_Epetra_Map(*vs,comm);
503 }
504 
507  const Epetra_Map &map,
508  const RCP<VectorBase<double> > &v
509  )
510 {
511  using Teuchos::get_optional_extra_data;
512 #ifdef TEUCHOS_DEBUG
514  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
516  "Thyra::get_Epetra_Vector(map,v)", *epetra_vs, *v->space() );
517 #endif
518  //
519  // First, try to grab the Epetra_Vector straight out of the
520  // RCP since this is the fastest way.
521  //
523  epetra_v_ptr = get_optional_extra_data<RCP<Epetra_Vector> >(
524  v,"Epetra_Vector");
525  // mfh 06 Dec 2017: This should be consistent over all processes
526  // that participate in v's communicator.
527  if(!is_null(epetra_v_ptr)) {
528  return *epetra_v_ptr;
529  }
530  //
531  // The assumption that we (rightly) make here is that if the vector spaces
532  // are compatible, that either the multi-vectors are both in-core or the
533  // vector spaces are both derived from SpmdVectorSpaceBase and have
534  // compatible maps.
535  //
536  const VectorSpaceBase<double> &vs = *v->range();
537  const SpmdVectorSpaceBase<double> *mpi_vs
538  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
539  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
540  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
541  //
542  // Here we will extract a view of the local elements in the underlying
543  // VectorBase object. In most cases, no data will be allocated or copied
544  // and only some small objects will be created and a few virtual functions
545  // will be called so the overhead should be low and fixed.
546  //
547  // Create a *mutable* view of the local elements, this view will be set on
548  // the RCP that is returned. As a result, this view will be relased
549  // when the returned Epetra_Vector is released.
550  //
551  // Note that the input vector 'v' will be remembered through this detached
552  // view!
553  //
555  emvv = Teuchos::rcp(
557  v
558  ,Range1D(localOffset,localOffset+localSubDim-1)
559  ,true // forceContiguous
560  )
561  );
562  // Create a temporary Epetra_Vector object and give it
563  // the above local view
565  epetra_v = Teuchos::rcp(
566  new Epetra_Vector(
567  ::View // CV
568  ,map // Map
569  ,const_cast<double*>(emvv->values()) // V
570  )
571  );
572  // Give the explict view object to the above Epetra_Vector smart pointer
573  // object. In this way, when the client is finished with the Epetra_Vector
574  // view the destructor from the object in emvv will automatically commit the
575  // changes to the elements in the input v VectorBase object (reguardless of
576  // its implementation). This is truly an elegant result!
577  Teuchos::set_extra_data( emvv, "emvv", Teuchos::inOutArg(epetra_v),
578  Teuchos::PRE_DESTROY );
579  // We are done!
580  return epetra_v;
581 }
582 
583 // Same as above, except allows to not pass the map (in case the RCP of v
584 // already has an attached RCP<Epetra_Vector>)
587  const RCP<VectorBase<double> > &v,
588  const RCP<const Epetra_Map>& map
589  )
590 {
591  //
592  // First, try to grab the Epetra_Vector straight out of the
593  // RCP since this is the fastest way.
594  //
595  const Ptr<const RCP<Epetra_Vector> >
596  epetra_v_ptr = Teuchos::get_optional_extra_data<RCP<Epetra_Vector> >(
597  v,"Epetra_Vector");
598  // mfh 06 Dec 2017: This should be consistent over all processes
599  // that participate in v's communicator.
600  if(!is_null(epetra_v_ptr)) {
601  return *epetra_v_ptr;
602  }
603 
604  // No luck. We need to call get_Epetra_Vector(*map,v).
605  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
606  "Error! No RCP Epetra_Vector attached to the input vector RCP, "
607  "and the input map RCP is null.\n");
608 
609  return get_Epetra_Vector(*map,v);
610 }
611 
614  const Epetra_Map &map,
615  const RCP<const VectorBase<double> > &v
616  )
617 {
618  using Teuchos::get_optional_extra_data;
619 #ifdef TEUCHOS_DEBUG
621  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
623  "Thyra::get_Epetra_Vector(map,v)", *epetra_vs, *v->space() );
624 #endif
625  //
626  // First, try to grab the Epetra_Vector straight out of the
627  // RCP since this is the fastest way.
628  //
630  epetra_v_ptr = get_optional_extra_data<RCP<const Epetra_Vector> >(
631  v,"Epetra_Vector");
632  // mfh 06 Dec 2017: This should be consistent over all processes
633  // that participate in v's communicator.
634  if(!is_null(epetra_v_ptr))
635  return *epetra_v_ptr;
637  epetra_nonconst_v_ptr = get_optional_extra_data<RCP<Epetra_Vector> >(
638  v,"Epetra_Vector");
639  // mfh 06 Dec 2017: This should be consistent over all processes
640  // that participate in v's communicator.
641  if(!is_null(epetra_nonconst_v_ptr))
642  return *epetra_nonconst_v_ptr;
643  //
644  // Same as above function except as stated below
645  //
646  const VectorSpaceBase<double> &vs = *v->range();
647  const SpmdVectorSpaceBase<double> *mpi_vs
648  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
649  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
650  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
651  // Get an explicit *non-mutable* view of all of the elements in the multi
652  // vector. Note that 'v' will be remembered by this view!
654  evv = Teuchos::rcp(
656  v
657  ,Range1D(localOffset,localOffset+localSubDim-1)
658  ,true // forceContiguous
659  )
660  );
661  // Create a temporary Epetra_Vector object and give it
662  // the above view
664  epetra_v = Teuchos::rcp(
665  new Epetra_Vector(
666  ::View // CV
667  ,map // Map
668  ,const_cast<double*>(evv->values()) // V
669  )
670  );
671  // This next statement will cause the destructor to free the view if
672  // needed (see above function). Since this view is non-mutable,
673  // only a releaseDetachedView(...) and not a commit will be called.
674  // This is the whole reason there is a seperate implementation for
675  // the const and non-const cases.
676  Teuchos::set_extra_data( evv, "evv", Teuchos::inOutArg(epetra_v),
677  Teuchos::PRE_DESTROY );
678  // We are done!
679  return epetra_v;
680 }
681 
682 // Same as above, except allows to not pass the map (in case the RCP of v
683 // already has an attached RCP<Epetra_Vector>)
686  const RCP<const VectorBase<double> > &v,
687  const RCP<const Epetra_Map>& map
688  )
689 {
690  //
691  // First, try to grab the Epetra_Vector straight out of the
692  // RCP since this is the fastest way.
693  //
694  const Ptr<const RCP<const Epetra_Vector> >
695  epetra_v_ptr = Teuchos::get_optional_extra_data<RCP<const Epetra_Vector> >(
696  v,"Epetra_Vector");
697  // mfh 06 Dec 2017: This should be consistent over all processes
698  // that participate in v's communicator.
699  if(!is_null(epetra_v_ptr)) {
700  return *epetra_v_ptr;
701  }
702 
703  // No luck. We need to call get_Epetra_Vector(*map,v).
704  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
705  "Error! No RCP to Epetra_Vector attached to the input vector RCP, "
706  "and the input map RCP is null.\n");
707 
708  return get_Epetra_Vector(*map,v);
709 }
710 
713  const Epetra_Map &map,
714  const RCP<MultiVectorBase<double> > &mv
715  )
716 {
717  using Teuchos::get_optional_extra_data;
718 #ifdef TEUCHOS_DEBUG
720  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
722  "Thyra::get_Epetra_MultiVector(map,mv)", *epetra_vs, *mv->range() );
723 #endif
724  //
725  // First, try to grab the Epetra_MultiVector straight out of the
726  // RCP since this is the fastest way.
727  //
729  epetra_mv_ptr = get_optional_extra_data<RCP<Epetra_MultiVector> >(
730  mv,"Epetra_MultiVector");
731  // mfh 06 Dec 2017: This should be consistent over all processes
732  // that participate in v's communicator.
733  if(!is_null(epetra_mv_ptr)) {
734  return *epetra_mv_ptr;
735  }
736  //
737  // The assumption that we (rightly) make here is that if the vector spaces
738  // are compatible, that either the multi-vectors are both in-core or the
739  // vector spaces are both derived from SpmdVectorSpaceBase and have
740  // compatible maps.
741  //
742  const VectorSpaceBase<double> &vs = *mv->range();
743  const SpmdVectorSpaceBase<double> *mpi_vs
744  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
745  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
746  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
747  //
748  // Here we will extract a view of the local elements in the underlying
749  // MultiVectorBase object. In most cases, no data will be allocated or
750  // copied and only some small objects will be created and a few virtual
751  // functions will be called so the overhead should be low and fixed.
752  //
753  // Create a *mutable* view of the local elements, this view will be set on
754  // the RCP that is returned. As a result, this view will be relased
755  // when the returned Epetra_MultiVector is released.
756  //
758  emmvv = Teuchos::rcp(
760  *mv
761  ,Range1D(localOffset,localOffset+localSubDim-1)
762  )
763  );
764  // Create a temporary Epetra_MultiVector object and give it
765  // the above local view
767  epetra_mv = Teuchos::rcp(
768  new Epetra_MultiVector(
769  ::View // CV
770  ,map // Map
771  ,const_cast<double*>(emmvv->values()) // A
772  ,emmvv->leadingDim() // MyLDA
773  ,emmvv->numSubCols() // NumVectors
774  )
775  );
776  // Give the explict view object to the above Epetra_MultiVector
777  // smart pointer object. In this way, when the client is finished
778  // with the Epetra_MultiVector view the destructor from the object
779  // in emmvv will automatically commit the changes to the elements in
780  // the input mv MultiVectorBase object (reguardless of its
781  // implementation). This is truly an elegant result!
782  Teuchos::set_extra_data( emmvv, "emmvv", Teuchos::inOutArg(epetra_mv),
783  Teuchos::PRE_DESTROY );
784  // Also set the mv itself as extra data just to be safe
785  Teuchos::set_extra_data( mv, "mv", Teuchos::inOutArg(epetra_mv) );
786  // We are done!
787  return epetra_mv;
788 }
789 
790 // Same as above, except allows to not pass the map (in case the RCP of v
791 // already has an attached RCP<const Epetra_MultiVector>)
794  const RCP<MultiVectorBase<double> > &mv,
795  const RCP<const Epetra_Map>& map
796  )
797 {
798  //
799  // First, try to grab the Epetra_MultiVector straight out of the
800  // RCP since this is the fastest way.
801  //
802  const Ptr<const RCP<Epetra_MultiVector> >
803  epetra_mv_ptr = Teuchos::get_optional_extra_data<RCP<Epetra_MultiVector> >(
804  mv,"Epetra_MultiVector");
805  // mfh 06 Dec 2017: This should be consistent over all processes
806  // that participate in v's communicator.
807  if(!is_null(epetra_mv_ptr)) {
808  return *epetra_mv_ptr;
809  }
810 
811  // No luck. We need to call get_Epetra_MultiVector(*map,mv).
812  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
813  "Error! No RCP to Epetra_MultiVector attached to the input vector RCP, "
814  "and the input map RCP is null.\n");
815 
816  return get_Epetra_MultiVector(*map,mv);
817 }
818 
821  const Epetra_Map &map,
822  const RCP<const MultiVectorBase<double> > &mv
823  )
824 {
825  using Teuchos::get_optional_extra_data;
826 
827 #ifdef TEUCHOS_DEBUG
829  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
830 
832  "Thyra::get_Epetra_MultiVector(map,mv)",
833  *epetra_vs, *mv->range() );
834 #endif
835 
836  //
837  // First, try to grab the Epetra_MultiVector straight out of the
838  // RCP since this is the fastest way.
839  //
841  epetra_mv_ptr
842  = get_optional_extra_data<RCP<const Epetra_MultiVector> >(
843  mv,"Epetra_MultiVector" );
844  // mfh 06 Dec 2017: This should be consistent over all processes
845  // that participate in v's communicator.
846  if(!is_null(epetra_mv_ptr)) {
847  return *epetra_mv_ptr;
848  }
849 
850  //
851  // Same as above function except as stated below
852  //
853  const VectorSpaceBase<double> &vs = *mv->range();
854  const SpmdVectorSpaceBase<double> *mpi_vs
855  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
856  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
857  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
858  // Get an explicit *non-mutable* view of all of the elements in
859  // the multi vector.
861  emvv = Teuchos::rcp(
863  *mv
864  ,Range1D(localOffset,localOffset+localSubDim-1)
865  )
866  );
867 
868  // Create a temporary Epetra_MultiVector object and give it
869  // the above view
871  epetra_mv = Teuchos::rcp(
872  new Epetra_MultiVector(
873  ::View // CV
874  ,map // Map
875  ,const_cast<double*>(emvv->values()) // A
876  ,emvv->leadingDim() // MyLDA
877  ,emvv->numSubCols() // NumVectors
878  )
879  );
880 
881  // This next statement will cause the destructor to free the view if
882  // needed (see above function). Since this view is non-mutable,
883  // only a releaseDetachedView(...) and not a commit will be called.
884  // This is the whole reason there is a seperate implementation for
885  // the const and non-const cases.
886  Teuchos::set_extra_data( emvv, "emvv", Teuchos::inOutArg(epetra_mv),
887  Teuchos::PRE_DESTROY );
888  // Also set the mv itself as extra data just to be safe
889  Teuchos::set_extra_data( mv, "mv", Teuchos::inOutArg(epetra_mv) );
890 
891  // We are done!
892  return epetra_mv;
893 }
894 
895 // Same as above, except allows to not pass the map (in case the RCP of v
896 // already has an attached RCP<const Epetra_MultiVector>)
899  const RCP<const MultiVectorBase<double> > &mv,
900  const RCP<const Epetra_Map>& map
901  )
902 {
903  //
904  // First, try to grab the Epetra_MultiVector straight out of the
905  // RCP since this is the fastest way.
906  //
907  const Ptr<const RCP<const Epetra_MultiVector> >
908  epetra_mv_ptr = Teuchos::get_optional_extra_data<RCP<const Epetra_MultiVector> >(
909  mv,"Epetra_MultiVector");
910  // mfh 06 Dec 2017: This should be consistent over all processes
911  // that participate in v's communicator.
912  if(!is_null(epetra_mv_ptr)) {
913  return *epetra_mv_ptr;
914  }
915 
916  // No luck. We need to call get_Epetra_MultiVector(*map,mv).
917  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
918  "Error! No RCP to Epetra_MultiVector attached to the input vector RCP, "
919  "and the input map RCP is null.\n");
920 
921  return get_Epetra_MultiVector(*map,mv);
922 }
923 
926  const Epetra_Map &map,
928  )
929 {
930  using Teuchos::rcpWithEmbeddedObj;
931  using Teuchos::ptrFromRef;
932  using Teuchos::ptr_dynamic_cast;
933  using Teuchos::outArg;
934 
936  ptr_dynamic_cast<SpmdMultiVectorBase<double> >(ptrFromRef(mv));
937 
938  ArrayRCP<double> mvData;
939  Ordinal mvLeadingDim = 0;
940  if (nonnull(mvSpmdMv)) {
941  mvSpmdMv->getNonconstLocalData(outArg(mvData), outArg(mvLeadingDim));
942  }
943 
944  return rcpWithEmbeddedObj(
945  new Epetra_MultiVector(
946  ::View, map, mvData.getRawPtr(), mvLeadingDim, mv.domain()->dim()
947  ),
948  mvData
949  );
950 }
951 
952 
955  const Epetra_Map &map,
956  const MultiVectorBase<double> &mv
957  )
958 {
959  using Teuchos::rcpWithEmbeddedObj;
960  using Teuchos::ptrFromRef;
961  using Teuchos::ptr_dynamic_cast;
962  using Teuchos::outArg;
963 
965  ptr_dynamic_cast<const SpmdMultiVectorBase<double> >(ptrFromRef(mv));
966 
967  ArrayRCP<const double> mvData;
968  Ordinal mvLeadingDim = 0;
969  if (nonnull(mvSpmdMv)) {
970  mvSpmdMv->getLocalData(outArg(mvData), outArg(mvLeadingDim));
971  }
972 
973  return rcpWithEmbeddedObj(
974  new Epetra_MultiVector(
975  ::View, map, const_cast<double*>(mvData.getRawPtr()), mvLeadingDim, mv.domain()->dim()
976  ),
977  mvData
978  );
979 }
RCP< Epetra_MultiVector > get_Epetra_MultiVector(const Epetra_Map &map, const RCP< MultiVectorBase< double > > &mv)
Get a non-const Epetra_MultiVector view from a non-const MultiVectorBase object if possible...
bool DistributedGlobal() const
#define THYRA_ASSERT_VEC_SPACES(FUNC_NAME, VS1, VS2)
This is a very useful macro that should be used to validate that two vector spaces are compatible...
Create an explicit non-mutable (const) view of a MultiVectorBase object.
bool is_null(const boost::shared_ptr< T > &p)
Base interface class for SPMD multi-vectors.
RCP< const Epetra_Map > get_Epetra_Map(const VectorSpaceBase< double > &vs, const RCP< const Epetra_Comm > &comm)
Get (or create) an Epetra_Map object given an VectorSpaceBase object an optionally an extra Epetra_Co...
RCP< const VectorSpaceBase< double > > create_VectorSpace(const RCP< const Epetra_Map > &epetra_map)
Create an VectorSpaceBase object given an Epetra_Map object.
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
T_To & dyn_cast(T_From &from)
Create an explicit non-mutable (const) view of a VectorBase object.
T * get() const
TEUCHOSCORE_LIB_DLL_EXPORT std::string demangleName(const std::string &mangledName)
Abstract interface for objects that represent a space for vectors.
RCP< MultiVectorBase< double > > create_MultiVector(const RCP< Epetra_MultiVector > &epetra_mv, const RCP< const VectorSpaceBase< double > > &range=Teuchos::null, const RCP< const VectorSpaceBase< double > > &domain=Teuchos::null)
Create a non-const MultiVectorBase object from a non-const Epetra_MultiVector object.
Create an explicit mutable (non-const) view of a MultiVectorBase object.
int NumMyElements() const
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Teuchos::Ordinal Ordinal
Type for the dimension of a vector space. `*.
Interface for a collection of column vectors called a multi-vector.
RCP< VectorBase< double > > create_Vector(const RCP< Epetra_Vector > &epetra_v, const RCP< const VectorSpaceBase< double > > &space=Teuchos::null)
Create a non-const VectorBase object from a non-const Epetra_Vector object.
Create an explicit mutable (non-const) view of a VectorBase object.
RCP< Epetra_Vector > get_Epetra_Vector(const Epetra_Map &map, const RCP< VectorBase< double > > &v)
Get a non-const Epetra_Vector view from a non-const VectorBase object if possible.
Abstract interface for finite-dimensional dense vectors.
MPI_Comm Comm() const
const Epetra_Comm & Comm() const
const RCP< T > & assert_not_null() const
Efficient concrete implementation subclass for SPMD vectors.
RCP< const Epetra_Comm > get_Epetra_Comm(const Teuchos::Comm< Ordinal > &comm)
Get (or create) and Epetra_Comm given a Teuchos::Comm object.
virtual RCP< const VectorSpaceBase< Scalar > > domain() const =0
Return a smart pointer for the domain space for this operator.
bool nonnull(const boost::shared_ptr< T > &p)
void push_back(const value_type &x)
virtual Ordinal localOffset() const =0
Returns the offset for the local sub-vector stored on this process.
TypeTo as(const TypeFrom &t)
Efficient concrete implementation subclass for SPMD multi-vectors.
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
int ExtractView(double **V) const
RCP< const VectorSpaceBase< double > > create_LocallyReplicatedVectorSpace(const RCP< const VectorSpaceBase< double > > &parentSpace, const int dim)
Create a VectorSpaceBase object that creates locally replicated vector objects.
RCP< const Teuchos::Comm< Ordinal > > create_Comm(const RCP< const Epetra_Comm > &epetraComm)
Given an Epetra_Comm object, return an equivalent Teuchos::Comm object.
Base abstract VectorSpaceBase class for all SPMD-based vector spaces.
virtual Ordinal dim() const =0
Return the dimension of the vector space.
#define TEUCHOS_TEST_FOR_EXCEPT(throw_exception_test)
Teuchos::Range1D Range1D
virtual Ordinal localSubDim() const =0
Returns the number of local elements stored on this process.