Domi
Multi-dimensional, distributed data structures
Domi_MDVector.hpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Domi: Multi-dimensional Distributed Linear Algebra Services
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia
8 // Corporation, the U.S. Government retains certain rights in this
9 // software.
10 //
11 // Redistribution and use in source and binary forms, with or without
12 // modification, are permitted provided that the following conditions are
13 // met:
14 //
15 // 1. Redistributions of source code must retain the above copyright
16 // notice, this list of conditions and the following disclaimer.
17 //
18 // 2. Redistributions in binary form must reproduce the above copyright
19 // notice, this list of conditions and the following disclaimer in the
20 // documentation and/or other materials provided with the distribution.
21 //
22 // 3. Neither the name of the Corporation nor the names of the
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Questions? Contact William F. Spotz (wfspotz@sandia.gov)
39 //
40 // ***********************************************************************
41 // @HEADER
42 
43 #ifndef DOMI_MDVECTOR_HPP
44 #define DOMI_MDVECTOR_HPP
45 
46 // #define DOMI_MDVECTOR_VERBOSE
47 
48 // Standard includes
49 #include <ctime>
50 
51 // Domi includes
52 #include "Domi_ConfigDefs.hpp"
53 #include "Domi_DefaultNode.hpp"
54 #include "Domi_MDMap.hpp"
55 #include "Domi_MDArrayRCP.hpp"
56 
57 // Teuchos includes
58 #include "Teuchos_DataAccess.hpp"
59 #include "Teuchos_Describable.hpp"
60 #include "Teuchos_ScalarTraitsDecl.hpp"
61 #include "Teuchos_Comm.hpp"
62 #include "Teuchos_CommHelpers.hpp"
63 
64 #ifdef HAVE_EPETRA
65 #include "Epetra_IntVector.h"
66 #include "Epetra_Vector.h"
67 #endif
68 
69 #ifdef HAVE_TPETRA
70 #include "Tpetra_Vector.hpp"
71 #endif
72 
73 
74 #ifdef OPEN_MPI
75 // I provide dummy definitions for MPI structs so that
76 // typeid(MPI_Datatype) and typeid(MPI_Request) will compile.
77 // Defining empty structs like this should be fine since only the guts
78 // of OpenMPI will ever see the real definitions. This is a silly game
79 // we are playing with the C++ type system here but it should work
80 // just fine.
81 struct ompi_datatype_t {};
82 //struct ompi_request_t {};
83 #endif
84 
85 #include <iostream>
86 using std::cout;
87 using std::endl;
88 
89 
90 namespace Domi
91 {
92 
175 template< class Scalar,
176  class Node = DefaultNode::DefaultNodeType >
177 class MDVector : public Teuchos::Describable
178 {
179 public:
180 
183 
191  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
192  bool zeroOut = true);
193 
215  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
216  const dim_type leadingDim,
217  const dim_type trailingDim = 0,
218  bool zeroOut = true);
219 
227  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
228  const MDArrayView< Scalar > & source);
229 
237  MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
238  const MDArrayRCP< Scalar > & source);
239 
252  MDVector(const MDVector< Scalar, Node > & source,
253  Teuchos::DataAccess access = Teuchos::View);
254 
268  MDVector(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
269  Teuchos::ParameterList & plist);
270 
283  MDVector(const Teuchos::RCP< const MDComm > mdComm,
284  Teuchos::ParameterList & plist);
285 
295  MDVector(const MDVector< Scalar, Node > & parent,
296  int axis,
297  dim_type index);
298 
314  MDVector(const MDVector< Scalar, Node > & parent,
315  int axis,
316  const Slice & slice,
317  int bndryPad = 0);
318 
332  MDVector(const MDVector< Scalar, Node > & parent,
333  const Teuchos::ArrayView< Slice > & slices,
334  const Teuchos::ArrayView< int > & bndryPad =
335  Teuchos::ArrayView< int >());
336 
342  operator=(const MDVector< Scalar, Node > & source);
343 
346  virtual ~MDVector();
347 
349 
352 
355  inline const Teuchos::RCP< const MDMap< Node > >
356  getMDMap() const;
357 
364  inline bool onSubcommunicator() const;
365 
372  inline Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const;
373 
380  inline int numDims() const;
381 
391  inline int getCommDim(int axis) const;
392 
402  inline bool isPeriodic(int axis) const;
403 
413  inline int getCommIndex(int axis) const;
414 
431  inline int getLowerNeighbor(int axis) const;
432 
450  inline int getUpperNeighbor(int axis) const;
451 
460  inline dim_type getGlobalDim(int axis, bool withBndryPad=false) const;
461 
470  inline Slice getGlobalBounds(int axis, bool withBndryPadding=false) const;
471 
486  inline Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const;
487 
496  inline dim_type getLocalDim(int axis, bool withCommPad=false) const;
497 
505  inline Teuchos::ArrayView< const Slice > getLocalBounds() const;
506 
520  inline Slice getLocalBounds(int axis, bool withPad=false) const;
521 
539  inline Slice getLocalInteriorBounds(int axis) const;
540 
551  inline bool hasPadding() const;
552 
561  inline int getLowerPadSize(int axis) const;
562 
571  inline int getUpperPadSize(int axis) const;
572 
582  inline int getCommPadSize(int axis) const;
583 
590  inline int getLowerBndryPad(int axis) const;
591 
598  inline int getUpperBndryPad(int axis) const;
599 
609  inline int getBndryPadSize(int axis) const;
610 
618  void setLowerPad(int axis, const Scalar value);
619 
627  void setUpperPad(int axis, const Scalar value);
628 
631  inline bool isReplicatedBoundary(int axis) const;
632 
635  inline Layout getLayout() const;
636 
646  inline bool isContiguous() const;
647 
649 
652 
653 #ifdef HAVE_EPETRA
654 
670  Teuchos::RCP< Epetra_IntVector > getEpetraIntVectorView() const;
671 
687  Teuchos::RCP< Epetra_Vector > getEpetraVectorView() const;
688 
709  Teuchos::RCP< Epetra_MultiVector > getEpetraMultiVectorView() const;
710 
720  Teuchos::RCP< Epetra_IntVector > getEpetraIntVectorCopy() const;
721 
731  Teuchos::RCP< Epetra_Vector > getEpetraVectorCopy() const;
732 
749  Teuchos::RCP< Epetra_MultiVector > getEpetraMultiVectorCopy() const;
750 
751 #endif
752 
753 #ifdef HAVE_TPETRA
754 
765  template< class LocalOrdinal,
766  class GlobalOrdinal = LocalOrdinal,
767  class Node2 = Node>
768  Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
769  getTpetraVectorView() const;
770 
788  template < class LocalOrdinal,
789  class GlobalOrdinal = LocalOrdinal,
790  class Node2 = Node >
791  Teuchos::RCP< Tpetra::MultiVector< Scalar,
792  LocalOrdinal,
793  GlobalOrdinal,
794  Node2 > >
795  getTpetraMultiVectorView() const;
796 
802  template< class LocalOrdinal,
803  class GlobalOrdinal = LocalOrdinal,
804  class Node2 = Node >
805  Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
806  getTpetraVectorCopy() const;
807 
820  template < class LocalOrdinal,
821  class GlobalOrdinal = LocalOrdinal,
822  class Node2 = Node >
823  Teuchos::RCP< Tpetra::MultiVector< Scalar,
824  LocalOrdinal,
825  GlobalOrdinal,
826  Node2 > >
827  getTpetraMultiVectorCopy() const;
828 
829 #endif
830 
832 
835 
841  MDArrayView< Scalar > getDataNonConst(bool includePadding = true);
842 
848  MDArrayView< const Scalar > getData(bool includePadding = true) const;
849 
856 
863 
870 
877 
879 
882 
887  Scalar
888  dot(const MDVector< Scalar, Node > & a) const;
889 
892  typename Teuchos::ScalarTraits< Scalar >::magnitudeType norm1() const;
893 
896  typename Teuchos::ScalarTraits< Scalar >::magnitudeType norm2() const;
897 
900  typename Teuchos::ScalarTraits< Scalar >::magnitudeType normInf() const;
901 
906  typename Teuchos::ScalarTraits< Scalar >::magnitudeType
907  normWeighted(const MDVector< Scalar, Node > & weights) const;
908 
911  Scalar meanValue() const;
912 
914 
917 
920  virtual std::string description() const;
921 
929  virtual void
930  describe(Teuchos::FancyOStream &out,
931  const Teuchos::EVerbosityLevel verbLevel =
932  Teuchos::Describable::verbLevel_default) const;
933 
935 
938 
946  void putScalar(const Scalar & value,
947  bool includePadding = true);
948 
951  void randomize();
952 
954 
957 
958  // /** \brief Sum values of a locally replicated multivector across all
959  // * processes.
960  // */
961  // void reduce();
962 
967  void updateCommPad();
968 
969  /* \brief Update the data in the communication padding along the
970  * specified axis
971  *
972  * \param axis [in] the axis along which communication will be
973  * performed
974  */
975  void updateCommPad(int axis);
976 
985  void startUpdateCommPad(int axis);
986 
995  void endUpdateCommPad(int axis);
996 
998 
1001 
1013  operator[](dim_type index) const;
1014 
1032  operator[](Slice slice) const;
1033 
1035 
1038 
1046  void writeBinary(const std::string & filename,
1047  bool includeBndryPad = false) const;
1048 
1056  void readBinary(const std::string & filename,
1057  bool includeBndryPad = false);
1058 
1060 
1061 private:
1062 
1063  // The Teuchos communicator. Note that this is always a reference
1064  // to the communicator of the _mdMap, and is stored only for
1065  // convenience
1066  Teuchos::RCP< const Teuchos::Comm< int > > _teuchosComm;
1067 
1068  // The MDMap that describes the domain decomposition of this
1069  // MDVector
1070  Teuchos::RCP< const MDMap< Node > > _mdMap;
1071 
1072  // The MDArrayRCP that stores the data of this MDVector
1073  MDArrayRCP< Scalar > _mdArrayRcp;
1074 
1075  // The MDArrayView of the (possibly whole) sub-view into this
1076  // MDVector's MDArrayRCP
1077  MDArrayView< Scalar > _mdArrayView;
1078 
1079  // The operator[](int) and operator[](Slice) methods are applied to
1080  // a specific axis, namely this internally stored and updated next
1081  // axis
1082  int _nextAxis;
1083 
1085  // *** Communication Support *** //
1087 
1088 #ifdef HAVE_MPI
1089  // An array of MPI_Request objects for supporting non-blocking sends
1090  // and receives
1091  Teuchos::Array< MPI_Request > _requests;
1092 #endif
1093 
1094  // Define a struct for storing all the information needed for a
1095  // single message: a pointer to the buffer, a reference to the
1096  // message's MPI data type, the rank of the communication partner,
1097  // and the axis alng which we are communicating
1098  struct MessageInfo
1099  {
1100  // Pointer to first element of data buffer
1101  void *buffer;
1102 #ifdef HAVE_MPI
1103  // MPI data type (strided vector)
1104  Teuchos::RCP< MPI_Datatype > datatype;
1105 #else
1106  // Teuchos ArrayView for periodic domains
1107  MDArrayView< Scalar > dataview;
1108 #endif
1109  // Processor rank for communication partner
1110  int proc;
1111  // Communication is along this axis
1112  int axis;
1113  };
1114 
1115  // An array of MessageInfo objects representing all active send
1116  // buffers. The outer array represents the axis and the inner
1117  // 2-Tuple represents the lower and upper boundaries.
1118  Teuchos::Array< Teuchos::Tuple< MessageInfo, 2 > > _sendMessages;
1119 
1120  // An array of MessageInfo objects representing all active receive
1121  // buffers. The outer array represents the axis and the inner
1122  // 2-Tuple represents the lower and upper boundaries.
1123  Teuchos::Array< Teuchos::Tuple< MessageInfo, 2 > > _recvMessages;
1124 
1125  // A private method to initialize the _sendMessages and
1126  // _recvMessages arrays.
1127  void initializeMessages();
1128 
1130  // *** Input/Output Support *** //
1132 
1133  // Define a struct for storing all of the information needed to
1134  // write or read the MDVector to a file: arrays that store the file
1135  // shape, the local buffer shape, the local data shape, the file
1136  // starting coordinates, and the data starting coordinates.
1137  struct FileInfo
1138  {
1139  Teuchos::Array< dim_type > fileShape;
1140  Teuchos::Array< dim_type > bufferShape;
1141  Teuchos::Array< dim_type > dataShape;
1142  Teuchos::Array< dim_type > fileStart;
1143  Teuchos::Array< dim_type > dataStart;
1144 #ifdef HAVE_MPI
1145  Teuchos::RCP< MPI_Datatype > filetype;
1146  Teuchos::RCP< MPI_Datatype > datatype;
1147 #endif
1148  };
1149 
1150  // FileInfo struct for an input or output file that does not store
1151  // boundary padding. This is mutable because it does not get set
1152  // until the first time the MDVector is read or written to a file,
1153  // and the writeBinary() method should logically be const.
1154  mutable Teuchos::RCP< FileInfo > _fileInfo;
1155 
1156  // FileInfo struct for an input or output file that does store
1157  // boundary padding. This is mutable because it does not get set
1158  // until the first time the MDVector is read or written to a file,
1159  // and the writeBinary() method should logically be const.
1160  mutable Teuchos::RCP< FileInfo > _fileInfoWithBndry;
1161 
1162  // Compute either the _fileInfo or _fileInfoWithBndry data members.
1163  // This private method gets called by the writeBinary() and
1164  // readBinary() methods, and sets the requested fileInfo object,
1165  // unless it has already been set. This is const so that it can be
1166  // called by writeBinary(), but its whole purpose is to change
1167  // mutable data members.
1168  Teuchos::RCP< FileInfo > & computeFileInfo(bool includeBndryPad) const;
1169 
1170 };
1171 
1173 // *** Implementations *** //
1175 
1177 
1178 template< class Scalar,
1179  class Node >
1181 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1182  bool zeroOut) :
1183  _teuchosComm(mdMap->getTeuchosComm()),
1184  _mdMap(mdMap),
1185  _mdArrayRcp(),
1186  _mdArrayView(),
1187  _nextAxis(0),
1188 #ifdef HAVE_MPI
1189  _requests(),
1190 #endif
1191  _sendMessages(),
1192  _recvMessages()
1193 {
1194  setObjectLabel("Domi::MDVector");
1195 
1196  // Obtain the array of dimensions
1197  int numDims = _mdMap->numDims();
1198  Teuchos::Array< dim_type > dims(numDims);
1199  for (int axis = 0; axis < numDims; ++axis)
1200  dims[axis] = _mdMap->getLocalDim(axis,true);
1201 
1202  // Resize the MDArrayRCP and set the MDArrayView
1203  _mdArrayRcp.resize(dims);
1204  _mdArrayView = _mdArrayRcp();
1205 }
1206 
1208 
1209 template< class Scalar,
1210  class Node >
1212 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1213  const dim_type leadingDim,
1214  const dim_type trailingDim,
1215  bool zeroOut) :
1216  _teuchosComm(mdMap->getTeuchosComm()),
1217  _mdMap(mdMap->getAugmentedMDMap(leadingDim, trailingDim)),
1218  _mdArrayRcp(),
1219  _mdArrayView(),
1220  _nextAxis(0),
1221 #ifdef HAVE_MPI
1222  _requests(),
1223 #endif
1224  _sendMessages(),
1225  _recvMessages()
1226 {
1227  setObjectLabel("Domi::MDVector");
1228 
1229  // Obtain the array of dimensions
1230  int numDims = _mdMap->numDims();
1231  Teuchos::Array< dim_type > dims(numDims);
1232  for (int axis = 0; axis < numDims; ++axis)
1233  dims[axis] = _mdMap->getLocalDim(axis,true);
1234 
1235  // Resize the MDArrayRCP and set the MDArrayView
1236  _mdArrayRcp.resize(dims);
1237  _mdArrayView = _mdArrayRcp();
1238 }
1239 
1241 
1242 template< class Scalar,
1243  class Node >
1245 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1246  const MDArrayView< Scalar > & source) :
1247  _mdMap(mdMap),
1248  _mdArrayRcp(source),
1249  _mdArrayView(_mdArrayRcp()),
1250  _nextAxis(0),
1251 #ifdef HAVE_MPI
1252  _requests(),
1253 #endif
1254  _sendMessages(),
1255  _recvMessages()
1256 {
1257  setObjectLabel("Domi::MDVector");
1258  int numDims = _mdMap->numDims();
1259  TEUCHOS_TEST_FOR_EXCEPTION(
1260  numDims != _mdArrayRcp.numDims(),
1262  "MDMap and source array do not have the same number of dimensions");
1263 
1264  for (int axis = 0; axis < numDims; ++axis)
1265  {
1266  TEUCHOS_TEST_FOR_EXCEPTION(
1267  _mdMap->getLocalDim(axis) != _mdArrayRcp.dimension(axis),
1269  "Axis " << axis << ": MDMap dimension = " << _mdMap->getLocalDim(axis)
1270  << ", MDArray dimension = " << _mdArrayRcp.dimension(axis));
1271  }
1272 }
1273 
1275 
1276 template< class Scalar,
1277  class Node >
1279 MDVector(const Teuchos::RCP< const MDMap< Node > > & mdMap,
1280  const MDArrayRCP< Scalar > & mdArrayRcp) :
1281  _mdMap(mdMap),
1282  _mdArrayRcp(mdArrayRcp),
1283  _mdArrayView(_mdArrayRcp()),
1284  _nextAxis(0),
1285 #ifdef HAVE_MPI
1286  _requests(),
1287 #endif
1288  _sendMessages(),
1289  _recvMessages()
1290 {
1291 #ifdef DOMI_MDVECTOR_VERBOSE
1292  cout << "_mdArrayRcp = " << _mdArrayRcp << endl;
1293  cout << "_mdArrayView.getRawPtr() = " << _mdArrayView.getRawPtr()
1294  << " (constructor)" << endl;
1295  cout << "_mdArrayView = " << _mdArrayView << endl;
1296 #endif
1297  setObjectLabel("Domi::MDVector");
1298  int numDims = _mdMap->numDims();
1299  TEUCHOS_TEST_FOR_EXCEPTION(
1300  numDims != _mdArrayRcp.numDims(),
1302  "MDMap and source array do not have the same number of dimensions");
1303 
1304  for (int axis = 0; axis < numDims; ++axis)
1305  {
1306  TEUCHOS_TEST_FOR_EXCEPTION(
1307  _mdMap->getLocalDim(axis) != _mdArrayRcp.dimension(axis),
1309  "Axis " << axis << ": MDMap dimension = " << _mdMap->getLocalDim(axis)
1310  << ", MDArray dimension = " << _mdArrayRcp.dimension(axis));
1311  }
1312 }
1313 
1315 
1316 template< class Scalar,
1317  class Node >
1320  Teuchos::DataAccess access) :
1321  _teuchosComm(source.getMDMap()->getTeuchosComm()),
1322  _mdMap(source.getMDMap()),
1323  _mdArrayRcp(source._mdArrayRcp),
1324  _mdArrayView(source._mdArrayView),
1325  _nextAxis(0),
1326 #ifdef HAVE_MPI
1327  _requests(),
1328 #endif
1329  _sendMessages(),
1330  _recvMessages()
1331 {
1332  setObjectLabel("Domi::MDVector");
1333 
1334  if (access == Teuchos::Copy)
1335  {
1336 #ifdef DOMI_MDVECTOR_VERBOSE
1337  cout << "Inside MDVector copy constructor with copy access" << endl;
1338 #endif
1339  // Obtain the array of dimensions
1340  int numDims = _mdMap->numDims();
1341  Teuchos::Array< dim_type > dims(numDims);
1342  for (int axis = 0; axis < numDims; ++axis)
1343  dims[axis] = _mdMap->getLocalDim(axis,true);
1344 
1345  // Reset the MDArrayRCP and set the MDArrayView
1346  _mdArrayRcp = MDArrayRCP< Scalar >(dims, 0, source.getLayout());
1347  _mdArrayView = _mdArrayRcp();
1348 
1349  // Copy the source data to the new MDVector
1350  typedef typename MDArrayView< Scalar >::iterator iterator;
1351  typedef typename MDArrayView< Scalar >::const_iterator const_iterator;
1352  const_iterator src = source.getData().begin();
1353  for (iterator trg = _mdArrayView.begin(); trg != _mdArrayView.end(); ++trg)
1354  {
1355  *trg = *src;
1356  ++src;
1357  }
1358  }
1359 #ifdef DOMI_MDVECTOR_VERBOSE
1360  else
1361  {
1362  cout << "Inside MDVector copy constructor with view access"
1363  << endl;
1364  }
1365 #endif
1366 }
1367 
1369 
1370 template< class Scalar,
1371  class Node >
1373 MDVector(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
1374  Teuchos::ParameterList & plist) :
1375  _teuchosComm(teuchosComm),
1376  _mdMap(),
1377  _mdArrayRcp(),
1378  _mdArrayView(),
1379  _nextAxis(0),
1380 #ifdef HAVE_MPI
1381  _requests(),
1382 #endif
1383  _sendMessages(),
1384  _recvMessages()
1385 {
1386  setObjectLabel("Domi::MDVector");
1387 
1388  // Compute the MDComm and MDMap
1389  MDMap< Node > * myMdMap = new MDMap< Node >(teuchosComm, plist);
1390  dim_type leadingDim = plist.get("leading dimension" , 0);
1391  dim_type trailingDim = plist.get("trailing dimension", 0);
1392  if (leadingDim + trailingDim > 0)
1393  {
1394  _mdMap = myMdMap->getAugmentedMDMap(leadingDim, trailingDim);
1395  delete myMdMap;
1396  }
1397  else
1398  _mdMap = Teuchos::rcp(myMdMap);
1399 
1400  // Obtain the array of dimensions
1401  int numDims = _mdMap->numDims();
1402  Teuchos::Array< dim_type > dims(numDims);
1403  for (int axis = 0; axis < numDims; ++axis)
1404  dims[axis] = _mdMap->getLocalDim(axis,true);
1405 
1406  // Resize the MDArrayRCP and set the MDArrayView
1407  _mdArrayRcp.resize(dims);
1408  _mdArrayView = _mdArrayRcp();
1409 }
1410 
1412 
1413 template< class Scalar,
1414  class Node >
1416 MDVector(const Teuchos::RCP< const MDComm > mdComm,
1417  Teuchos::ParameterList & plist) :
1418  _teuchosComm(mdComm->getTeuchosComm()),
1419  _mdMap(Teuchos::rcp(new MDMap< Node >(mdComm, plist))),
1420  _mdArrayRcp(),
1421  _mdArrayView(),
1422  _nextAxis(0),
1423 #ifdef HAVE_MPI
1424  _requests(),
1425 #endif
1426  _sendMessages(),
1427  _recvMessages()
1428 {
1429  setObjectLabel("Domi::MDVector");
1430 
1431  // Compute the MDMap
1432  MDMap< Node > * myMdMap = new MDMap< Node >(mdComm, plist);
1433  dim_type leadingDim = plist.get("leading dimension" , 0);
1434  dim_type trailingDim = plist.get("trailing dimension", 0);
1435  if (leadingDim + trailingDim > 0)
1436  {
1437  _mdMap = myMdMap->getAugmentedMDMap(leadingDim, trailingDim);
1438  delete myMdMap;
1439  }
1440  else
1441  _mdMap = Teuchos::rcp(myMdMap);
1442 
1443  // Obtain the array of dimensions
1444  int numDims = _mdMap->numDims();
1445  Teuchos::Array< dim_type > dims(numDims);
1446  for (int axis = 0; axis < numDims; ++axis)
1447  dims[axis] = _mdMap->getLocalDim(axis,true);
1448 
1449  // Resize the MDArrayRCP and set the MDArrayView
1450  _mdArrayRcp.resize(dims);
1451  _mdArrayView = _mdArrayRcp();
1452 }
1453 
1455 
1456 template< class Scalar,
1457  class Node >
1460  int axis,
1461  dim_type globalIndex) :
1462  _teuchosComm(parent._teuchosComm),
1463  _mdMap(),
1464  _mdArrayRcp(parent._mdArrayRcp),
1465  _mdArrayView(parent._mdArrayView),
1466  _nextAxis(0),
1467 #ifdef HAVE_MPI
1468  _requests(),
1469 #endif
1470  _sendMessages(),
1471  _recvMessages()
1472 {
1473  setObjectLabel("Domi::MDVector");
1474 
1475  // Obtain the parent MDMap
1476  Teuchos::RCP< const MDMap< Node > > parentMdMap = parent.getMDMap();
1477 
1478  // Obtain the new, sliced MDMap
1479  _mdMap = Teuchos::rcp(new MDMap< Node >(*parentMdMap,
1480  axis,
1481  globalIndex));
1482 
1483  // Check that we are on the new sub-communicator
1484  if (_mdMap->onSubcommunicator())
1485  {
1486  // Convert the index from global to local. We start by
1487  // determining the starting global index on this processor along
1488  // the given axis, ignoring the boundary padding. We then
1489  // subtract the lower padding, whether it is communication padding
1490  // or boundary padding.
1491  dim_type origin = parentMdMap->getGlobalRankBounds(axis,false).start() -
1492  parentMdMap->getLowerPadSize(axis);
1493 
1494  // The local index along the given axis is the global axis minus
1495  // the starting index. Since we are on the subcommunicator, this
1496  // should be valid.
1497  dim_type localIndex = globalIndex - origin;
1498 
1499  // Obtain the new MDArrayView using the local index
1500  MDArrayView< Scalar > newView(_mdArrayView, axis, localIndex);
1501  _mdArrayView = newView;
1502  }
1503  else
1504  {
1505  // We are not on the sub-communicator, so clear out the data
1506  // buffer and view
1507  _mdArrayRcp.clear();
1508  _mdArrayView = MDArrayView< Scalar >();
1509  }
1510 }
1511 
1513 
1514 template< class Scalar,
1515  class Node >
1518  int axis,
1519  const Slice & slice,
1520  int bndryPad) :
1521  _teuchosComm(),
1522  _mdMap(),
1523  _mdArrayRcp(parent._mdArrayRcp),
1524  _mdArrayView(parent._mdArrayView),
1525  _nextAxis(0),
1526 #ifdef HAVE_MPI
1527  _requests(),
1528 #endif
1529  _sendMessages(),
1530  _recvMessages()
1531 {
1532 #ifdef DOMI_MDVECTOR_VERBOSE
1533  cout << "slice axis " << axis << endl;
1534  cout << " _mdArrayRcp @ " << _mdArrayRcp.getRawPtr() << endl;
1535  cout << " _mdArrayView @ " << _mdArrayView.getRawPtr() << endl;
1536 #endif
1537 
1538  setObjectLabel("Domi::MDVector");
1539 
1540  // Obtain the parent MDMap
1541  Teuchos::RCP< const MDMap< Node > > parentMdMap = parent.getMDMap();
1542 
1543  // Obtain the new, sliced MDMap
1544  _mdMap = Teuchos::rcp(new MDMap< Node >(*parentMdMap,
1545  axis,
1546  slice,
1547  bndryPad));
1548  _teuchosComm = _mdMap->getTeuchosComm();
1549 
1550  // Check that we are on the new sub-communicator
1551  if (_mdMap->onSubcommunicator())
1552  {
1553  // Get the concrete bounds
1554  Slice bounds = slice.bounds(parentMdMap->getGlobalDim(axis,true));
1555 
1556  // Convert the given Slice start index from global to local. We
1557  // start by determining the starting global index on this
1558  // processor along the given axis, ignoring the boundary padding.
1559  // We then subtract the lower padding, whether it is communication
1560  // padding or boundary padding.
1561  dim_type origin = parentMdMap->getGlobalRankBounds(axis,false).start() -
1562  parentMdMap->getLowerPadSize(axis);
1563 
1564  // Determine the starting index of our local slice. This will be
1565  // the start of the given slice minus the starting global index on
1566  // this processor minus the given boundary pad. If this is less
1567  // than zero, then the start is on a lower processor, so set the
1568  // local start to zero.
1569  dim_type start = std::max(0, bounds.start() - origin - bndryPad);
1570 
1571  // Now get the stop index of the local slice. This will be the
1572  // stop of the given slice minus the starting global index on this
1573  // processor plus the given boundary pad. If this is larger than
1574  // the local dimension, then set the local stop to the local
1575  // dimension.
1576  dim_type stop = std::min(bounds.stop() - origin + bndryPad,
1577  parentMdMap->getLocalDim(axis,true));
1578 
1579  // Obtain the new MDArrayView using the local slice
1580  MDArrayView< Scalar > newView(_mdArrayView, axis, Slice(start,stop));
1581  _mdArrayView = newView;
1582  }
1583  else
1584  {
1585  // We are not on the sub-communicator, so clear out the data
1586  // buffer and view
1587  _mdArrayRcp.clear();
1588  _mdArrayView = MDArrayView< Scalar >();
1589  }
1590 #ifdef DOMI_MDVECTOR_VERBOSE
1591  cout << " _mdArrayView @ " << _mdArrayView.getRawPtr() << endl;
1592  cout << " offset = " << int(_mdArrayView.getRawPtr() -
1593  _mdArrayRcp.getRawPtr()) << endl;
1594 #endif
1595 }
1596 
1598 
1599 template< class Scalar,
1600  class Node >
1603  const Teuchos::ArrayView< Slice > & slices,
1604  const Teuchos::ArrayView< int > & bndryPad)
1605 {
1606  setObjectLabel("Domi::MDVector");
1607 
1608  // Temporarily store the number of dimensions
1609  int numDims = parent.numDims();
1610 
1611  // Sanity check on dimensions
1612  TEUCHOS_TEST_FOR_EXCEPTION(
1613  (slices.size() != numDims),
1615  "number of slices = " << slices.size() << " != parent MDVector number of "
1616  "dimensions = " << numDims);
1617 
1618  // Apply the single-Slice constructor to each axis in succession
1619  MDVector< Scalar, Node > tempMDVector1(parent);
1620  for (int axis = 0; axis < numDims; ++axis)
1621  {
1622  int bndryPadding = (axis < bndryPad.size()) ? bndryPad[axis] : 0;
1623  MDVector< Scalar, Node > tempMDVector2(tempMDVector1,
1624  axis,
1625  slices[axis],
1626  bndryPadding);
1627  tempMDVector1 = tempMDVector2;
1628  }
1629  *this = tempMDVector1;
1630 }
1631 
1633 
1634 template< class Scalar,
1635  class Node >
1639 {
1640  _teuchosComm = source._teuchosComm;
1641  _mdMap = source._mdMap;
1642  _mdArrayRcp = source._mdArrayRcp;
1643  _mdArrayView = source._mdArrayView;
1644  _nextAxis = source._nextAxis;
1645 #ifdef HAVE_MPI
1646  _requests = source._requests;
1647 #endif
1648  _sendMessages = source._sendMessages;
1649  _recvMessages = source._recvMessages;
1650  return *this;
1651 }
1652 
1654 
1655 template< class Scalar,
1656  class Node >
1658 {
1659 }
1660 
1662 
1663 template< class Scalar,
1664  class Node >
1665 const Teuchos::RCP< const MDMap< Node > >
1667 getMDMap() const
1668 {
1669  return _mdMap;
1670 }
1671 
1673 
1674 template< class Scalar,
1675  class Node >
1676 bool
1679 {
1680  return _mdMap->onSubcommunicator();
1681 }
1682 
1684 
1685 template< class Scalar,
1686  class Node >
1687 Teuchos::RCP< const Teuchos::Comm< int > >
1690 {
1691  return _mdMap->getTeuchosComm();
1692 }
1693 
1695 
1696 template< class Scalar,
1697  class Node >
1698 int
1700 numDims() const
1701 {
1702  return _mdMap->numDims();
1703 }
1704 
1706 
1707 template< class Scalar,
1708  class Node >
1709 int
1711 getCommDim(int axis) const
1712 {
1713  return _mdMap->getCommDim(axis);
1714 }
1715 
1717 
1718 template< class Scalar,
1719  class Node >
1720 bool
1722 isPeriodic(int axis) const
1723 {
1724  return _mdMap->isPeriodic(axis);
1725 }
1726 
1728 
1729 template< class Scalar,
1730  class Node >
1731 int
1733 getCommIndex(int axis) const
1734 {
1735  return _mdMap->getCommIndex(axis);
1736 }
1737 
1739 
1740 template< class Scalar,
1741  class Node >
1742 int
1744 getLowerNeighbor(int axis) const
1745 {
1746  return _mdMap->getLowerNeighbor(axis);
1747 }
1748 
1750 
1751 template< class Scalar,
1752  class Node >
1753 int
1755 getUpperNeighbor(int axis) const
1756 {
1757  return _mdMap->getUpperNeighbor(axis);
1758 }
1759 
1761 
1762 template< class Scalar,
1763  class Node >
1764 dim_type
1766 getGlobalDim(int axis, bool withBndryPad) const
1767 {
1768  return _mdMap->getGlobalDim(axis, withBndryPad);
1769 }
1770 
1772 
1773 template< class Scalar,
1774  class Node >
1775 Slice
1777 getGlobalBounds(int axis, bool withBndryPad) const
1778 {
1779  return _mdMap->getGlobalBounds(axis, withBndryPad);
1780 }
1781 
1783 
1784 template< class Scalar,
1785  class Node >
1786 Slice
1788 getGlobalRankBounds(int axis, bool withBndryPad) const
1789 {
1790  return _mdMap->getGlobalRankBounds(axis, withBndryPad);
1791 }
1792 
1794 
1795 template< class Scalar,
1796  class Node >
1797 dim_type
1799 getLocalDim(int axis, bool withCommPad) const
1800 {
1801  return _mdMap->getLocalDim(axis, withCommPad);
1802 }
1803 
1805 
1806 template< class Scalar,
1807  class Node >
1808 Teuchos::ArrayView< const Slice >
1811 {
1812  return _mdMap->getLocalBounds();
1813 }
1814 
1816 
1817 template< class Scalar,
1818  class Node >
1819 Slice
1821 getLocalBounds(int axis, bool withCommPad) const
1822 {
1823  return _mdMap->getLocalBounds(axis, withCommPad);
1824 }
1825 
1827 
1828 template< class Scalar,
1829  class Node >
1830 Slice
1832 getLocalInteriorBounds(int axis) const
1833 {
1834  return _mdMap->getLocalInteriorBounds(axis);
1835 }
1836 
1838 
1839 template< class Scalar,
1840  class Node >
1841 bool
1843 hasPadding() const
1844 {
1845  return _mdMap->hasPadding();
1846 }
1847 
1849 
1850 template< class Scalar,
1851  class Node >
1852 int
1854 getLowerPadSize(int axis) const
1855 {
1856  return _mdMap->getLowerPadSize(axis);
1857 }
1858 
1860 
1861 template< class Scalar,
1862  class Node >
1863 int
1865 getUpperPadSize(int axis) const
1866 {
1867  return _mdMap->getUpperPadSize(axis);
1868 }
1869 
1871 
1872 template< class Scalar,
1873  class Node >
1874 int
1876 getCommPadSize(int axis) const
1877 {
1878  return _mdMap->getCommPadSize(axis);
1879 }
1880 
1882 
1883 template< class Scalar,
1884  class Node >
1885 int
1887 getLowerBndryPad(int axis) const
1888 {
1889  return _mdMap->getLowerBndryPad(axis);
1890 }
1891 
1893 
1894 template< class Scalar,
1895  class Node >
1896 int
1898 getUpperBndryPad(int axis) const
1899 {
1900  return _mdMap->getUpperBndryPad(axis);
1901 }
1902 
1904 
1905 template< class Scalar,
1906  class Node >
1907 int
1909 getBndryPadSize(int axis) const
1910 {
1911  return _mdMap->getBndryPadSize(axis);
1912 }
1913 
1915 
1916 template< class Scalar,
1917  class Node >
1918 void
1920 setLowerPad(int axis,
1921  const Scalar value)
1922 {
1923  MDArrayView< Scalar > lowerPad = getLowerPadDataNonConst(axis);
1924  lowerPad.assign(value);
1925 }
1926 
1928 
1929 template< class Scalar,
1930  class Node >
1931 void
1933 setUpperPad(int axis,
1934  const Scalar value)
1935 {
1936  MDArrayView< Scalar > upperPad = getUpperPadDataNonConst(axis);
1937  upperPad.assign(value);
1938 }
1939 
1941 
1942 template< class Scalar,
1943  class Node >
1944 bool
1946 isReplicatedBoundary(int axis) const
1947 {
1948  return _mdMap->isReplicatedBoundary(axis);
1949 }
1950 
1952 
1953 template< class Scalar,
1954  class Node >
1955 Layout
1957 getLayout() const
1958 {
1959  return _mdMap->getLayout();
1960 }
1961 
1963 
1964 template< class Scalar,
1965  class Node >
1966 bool
1969 {
1970  return _mdMap->isContiguous();
1971 }
1972 
1974 
1975 #ifdef HAVE_EPETRA
1976 
1977 // The getEpetraIntVectorView() method only makes sense for Scalar =
1978 // int, because Epetra_IntVectors store data buffers of type int only.
1979 // There is no convenient way to specialize just one (or a small
1980 // handfull of) methods, instead you have to specialize the whole
1981 // class. So we allow getEpetraIntVectorView() to compile for any
1982 // Scalar, but we will throw an exception if Scalar is not int.
1983 
1984 template< class Scalar,
1985  class Node >
1986 Teuchos::RCP< Epetra_IntVector >
1988 getEpetraIntVectorView() const
1989 {
1990  // Throw an exception if Scalar is not int
1991  TEUCHOS_TEST_FOR_EXCEPTION(
1992  typeid(Scalar) != typeid(int),
1993  TypeError,
1994  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
1995  "Epetra_IntVector requires scalar type 'int'");
1996 
1997  // Throw an exception if this MDVector's MDMap is not contiguous
1998  TEUCHOS_TEST_FOR_EXCEPTION(
1999  !isContiguous(),
2001  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2002  "a slice of a parent MDVector.");
2003 
2004  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2005  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2006 
2007  // Return the result. We are changing a Scalar* to a double*, which
2008  // looks sketchy, but we have thrown an exception if Sca is not
2009  // double, so everything is kosher.
2010  void * buffer = (void*) _mdArrayView.getRawPtr();
2011  return Teuchos::rcp(new Epetra_IntVector(View,
2012  *epetraMap,
2013  (int*) buffer));
2014 }
2015 
2017 
2018 // The getEpetraVectorView() method only makes sense for Scalar =
2019 // double, because Epetra_Vectors store data buffers of type double
2020 // only. There is no convenient way to specialize just one (or a
2021 // small handfull of) methods, instead you have to specialize the
2022 // whole class. So we allow getEpetraVectorView() to compile for any
2023 // Scalar, but we will throw an exception if Scalar is not double.
2024 
2025 template< class Scalar,
2026  class Node >
2027 Teuchos::RCP< Epetra_Vector >
2028 MDVector< Scalar, Node >::
2029 getEpetraVectorView() const
2030 {
2031  // Throw an exception if Scalar is not double
2032  TEUCHOS_TEST_FOR_EXCEPTION(
2033  typeid(Scalar) != typeid(double),
2034  TypeError,
2035  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
2036  "Epetra_Vector requires scalar type 'double'");
2037 
2038  // Throw an exception if this MDVector's MDMap is not contiguous
2039  TEUCHOS_TEST_FOR_EXCEPTION(
2040  !isContiguous(),
2041  MDMapNoncontiguousError,
2042  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2043  "a slice of a parent MDVector.");
2044 
2045  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2046  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2047 
2048  // Return the result. We are changing a Scalar* to a double*, which
2049  // looks sketchy, but we have thrown an exception if Sca is not
2050  // double, so everything is kosher.
2051  void * buffer = (void*) _mdArrayView.getRawPtr();
2052  return Teuchos::rcp(new Epetra_Vector(View,
2053  *epetraMap,
2054  (double*) buffer));
2055 }
2056 
2058 
2059 // The getEpetraMultiVectorView() method only makes sense for Scalar =
2060 // double, because Epetra_MultiVectors store data buffers of type
2061 // double only. There is no convenient way to specialize just one (or
2062 // a small handfull of) methods, instead you have to specialize the
2063 // whole class. So we allow getEpetraVectorView() to compile for any
2064 // Scalar, but we will throw an exception if Scalar is not double.
2065 
2066 template< class Scalar,
2067  class Node >
2068 Teuchos::RCP< Epetra_MultiVector >
2069 MDVector< Scalar, Node >::
2070 getEpetraMultiVectorView() const
2071 {
2072  // Throw an exception if Scalar is not double
2073  TEUCHOS_TEST_FOR_EXCEPTION(
2074  typeid(Scalar) != typeid(double),
2075  TypeError,
2076  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
2077  "Epetra_Vector requires scalar type 'double'");
2078 
2079  // Determine the vector axis and related info
2080  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2081  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2082  int commDim = getCommDim(vectorAxis);
2083  int numVectors = getGlobalDim(vectorAxis);
2084 
2085  // Obtain the appropriate MDMap and check that it is contiguous
2086  Teuchos::RCP< const MDMap< Node > > newMdMap;
2087  if (padding == 0 && commDim == 1)
2088  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2089  else
2090  {
2091  newMdMap = _mdMap;
2092  numVectors = 1;
2093  }
2094  TEUCHOS_TEST_FOR_EXCEPTION(
2095  ! newMdMap->isContiguous(),
2096  MDMapNoncontiguousError,
2097  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2098  "a slice of a parent MDVector.");
2099 
2100  // Get the stride between vectors. The MDMap strides are private,
2101  // but we know the new MDMap is contiguous, so we can calculate it
2102  // as the product of the new MDMap dimensions (including padding)
2103  size_type stride = newMdMap->getLocalDim(0,true);
2104  for (int axis = 0; axis < newMdMap->numDims(); ++axis)
2105  stride *= newMdMap->getLocalDim(axis,true);
2106  TEUCHOS_TEST_FOR_EXCEPTION(
2107  stride*numVectors > Teuchos::OrdinalTraits<int>::max(),
2108  MapOrdinalError,
2109  "Buffer size " << stride*numVectors << " is too large for Epetra int "
2110  "ordinals");
2111  int lda = (int)stride;
2112 
2113  // Get the Epetra_Map that is equivalent to newMdMap
2114  Teuchos::RCP< const Epetra_Map > epetraMap = newMdMap->getEpetraMap(true);
2115 
2116  // Return the result. We are changing a Scalar* to a double*, which
2117  // looks sketchy, but we have thrown an exception if Sca is not
2118  // double, so everything is kosher.
2119  void * buffer = (void*) _mdArrayView.getRawPtr();
2120  return Teuchos::rcp(new Epetra_MultiVector(View,
2121  *epetraMap,
2122  (double*) buffer,
2123  lda,
2124  numVectors));
2125 }
2126 
2128 
2129 template< class Scalar,
2130  class Node >
2131 Teuchos::RCP< Epetra_IntVector >
2132 MDVector< Scalar, Node >::
2133 getEpetraIntVectorCopy() const
2134 {
2135  typedef typename MDArrayView< const Scalar >::iterator iterator;
2136 
2137  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2138  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2139 
2140  // Construct the result
2141  Teuchos::RCP< Epetra_IntVector > result =
2142  Teuchos::rcp(new Epetra_IntVector(*epetraMap));
2143 
2144  // Copy the MDVector data buffer to the Epetra_IntVector, even if the
2145  // MDVector is non-contiguous
2146  int ii = 0;
2147  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2148  result->operator[](ii++) = (int) *it;
2149 
2150  // Return the result
2151  return result;
2152 }
2153 
2155 
2156 template< class Scalar,
2157  class Node >
2158 Teuchos::RCP< Epetra_Vector >
2159 MDVector< Scalar, Node >::
2160 getEpetraVectorCopy() const
2161 {
2162  typedef typename MDArrayView< const Scalar >::iterator iterator;
2163 
2164  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2165  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2166 
2167  // Construct the result
2168  Teuchos::RCP< Epetra_Vector > result =
2169  Teuchos::rcp(new Epetra_Vector(*epetraMap));
2170 
2171  // Copy the MDVector data buffer to the Epetra_Vector, even if the
2172  // MDVector is non-contiguous
2173  int ii = 0;
2174  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2175  result->operator[](ii++) = (double) *it;
2176 
2177  // Return the result
2178  return result;
2179 }
2180 
2182 
2183 template< class Scalar,
2184  class Node >
2185 Teuchos::RCP< Epetra_MultiVector >
2186 MDVector< Scalar, Node >::
2187 getEpetraMultiVectorCopy() const
2188 {
2189  typedef typename MDArrayView< Scalar >::iterator iterator;
2190  typedef typename MDArrayView< const Scalar >::iterator citerator;
2191 
2192  // Determine the vector axis and related info
2193  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2194  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2195  int commDim = getCommDim(vectorAxis);
2196  int numVectors = getGlobalDim(vectorAxis);
2197 
2198  // Obtain the appropriate MDMap
2199  Teuchos::RCP< const MDMap< Node > > newMdMap;
2200  if (padding == 0 && commDim == 1)
2201  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2202  else
2203  {
2204  newMdMap = _mdMap;
2205  numVectors = 1;
2206  }
2207 
2208  // Get the Epetra_Map that is equivalent to newMdMap
2209  Teuchos::RCP< const Epetra_Map > epetraMap = newMdMap->getEpetraMap(true);
2210 
2211  // Construct the result
2212  Teuchos::RCP< Epetra_MultiVector > result =
2213  Teuchos::rcp(new Epetra_MultiVector(*epetraMap, numVectors));
2214 
2215  // Copy the MDVector data to the Epetra_MultiVector, even if the
2216  // MDVector is non-contiguous
2217  int ii = 0;
2218  if (numVectors == 1)
2219  {
2220  for (citerator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2221  result->operator[](0)[ii++] = (double) *it;
2222  }
2223  else
2224  {
2225  for (int iv = 0; iv < numVectors; ++iv)
2226  {
2227  ii = 0;
2228  MDArrayView< Scalar > subArray(_mdArrayView, vectorAxis, iv);
2229  for (iterator it = subArray.begin(); it != subArray.end(); ++it)
2230  result->operator[](iv)[ii++] = (double) *it;
2231  }
2232  }
2233 
2234  // Return the result
2235  return result;
2236 }
2237 
2238 #endif
2239 
2241 
2242 #ifdef HAVE_TPETRA
2243 
2244 template< class Scalar, class Node >
2245 template< class LocalOrdinal,
2246  class GlobalOrdinal,
2247  class Node2 >
2248 Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
2249 MDVector< Scalar, Node >::
2250 getTpetraVectorView() const
2251 {
2252  // Throw an exception if this MDVector's MDMap is not contiguous
2253  TEUCHOS_TEST_FOR_EXCEPTION(
2254  !isContiguous(),
2255  MDMapNoncontiguousError,
2256  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2257  "a slice of a parent MDVector.");
2258 
2259  // Get the Tpetra::Map that is equivalent to this MDVector's MDMap
2260  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2261  GlobalOrdinal,
2262  Node2 > > tpetraMap =
2263  _mdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2264 
2265  // Return the result
2266  return Teuchos::rcp(new Tpetra::Vector< Scalar,
2267  LocalOrdinal,
2268  GlobalOrdinal,
2269  Node2 >(tpetraMap,
2270  _mdArrayView.arrayView()));
2271 }
2272 
2274 
2275 template< class Scalar, class Node >
2276 template< class LocalOrdinal,
2277  class GlobalOrdinal,
2278  class Node2 >
2279 Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node2 > >
2280 MDVector< Scalar, Node >::
2281 getTpetraVectorCopy() const
2282 {
2283  typedef typename MDArrayView< const Scalar >::iterator iterator;
2284 
2285  // Get the Tpetra::Map that is equivalent to this MDVector's MDMap
2286  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2287  GlobalOrdinal,
2288  Node2 > > tpetraMap =
2289  _mdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2290 
2291  // Construct the result
2292  Teuchos::RCP< Tpetra::Vector< Scalar,
2293  LocalOrdinal,
2294  GlobalOrdinal,
2295  Node2 > > result =
2296  Teuchos::rcp(new Tpetra::Vector< Scalar,
2297  LocalOrdinal,
2298  GlobalOrdinal,
2299  Node2 >(tpetraMap) );
2300 
2301  // Copy the MDVector data to the Tpetra::Vector, even if the
2302  // MDVector is non-contiguous
2303  Teuchos::ArrayRCP< Scalar > tpetraVectorArray = result->getDataNonConst();
2304  int ii = 0;
2305  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2306  tpetraVectorArray[ii++] = *it;
2307 
2308  // Return the result
2309  return result;
2310 }
2311 
2313 
2314 template< class Scalar, class Node >
2315 template < class LocalOrdinal,
2316  class GlobalOrdinal,
2317  class Node2 >
2318 Teuchos::RCP< Tpetra::MultiVector< Scalar,
2319  LocalOrdinal,
2320  GlobalOrdinal,
2321  Node2 > >
2322 MDVector< Scalar, Node >::
2323 getTpetraMultiVectorView() const
2324 {
2325  // Determine the vector axis and related info
2326  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2327  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2328  int commDim = getCommDim(vectorAxis);
2329  size_t numVectors = getGlobalDim(vectorAxis);
2330 
2331  // Obtain the appropriate MDMap and check that it is contiguous
2332  Teuchos::RCP< const MDMap< Node > > newMdMap;
2333  if (padding == 0 && commDim == 1)
2334  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2335  else
2336  {
2337  newMdMap = _mdMap;
2338  numVectors = 1;
2339  }
2340  TEUCHOS_TEST_FOR_EXCEPTION(
2341  ! newMdMap->isContiguous(),
2342  MDMapNoncontiguousError,
2343  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2344  "a slice of a parent MDVector.");
2345 
2346  // Get the stride between vectors. The MDMap strides are private,
2347  // but we know the new MDMap is contiguous, so we can calculate it
2348  // as the product of the new MDMap dimensions (including padding)
2349  size_type stride = newMdMap->getLocalDim(0,true);
2350  for (int axis = 0; axis < newMdMap->numDims(); ++axis)
2351  stride *= newMdMap->getLocalDim(axis,true);
2352  TEUCHOS_TEST_FOR_EXCEPTION(
2353  stride*numVectors > Teuchos::OrdinalTraits<GlobalOrdinal>::max(),
2354  MapOrdinalError,
2355  "Buffer size " << stride*numVectors << " is too large for Tpetra "
2356  "GlobalOrdinal = " << typeid(GlobalOrdinal).name() );
2357  size_t lda = (size_t)stride;
2358 
2359  // Get the Tpetra::Map that is equivalent to newMdMap
2360  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2361  GlobalOrdinal,
2362  Node2> > tpetraMap =
2363  newMdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2364 
2365  // Return the result
2366  return Teuchos::rcp(new Tpetra::MultiVector< Scalar,
2367  LocalOrdinal,
2368  GlobalOrdinal,
2369  Node2 >(tpetraMap,
2370  _mdArrayView.arrayView(),
2371  lda,
2372  numVectors));
2373 }
2374 
2376 
2377 template< class Scalar, class Node >
2378 template < class LocalOrdinal,
2379  class GlobalOrdinal,
2380  class Node2 >
2381 Teuchos::RCP< Tpetra::MultiVector< Scalar,
2382  LocalOrdinal,
2383  GlobalOrdinal,
2384  Node2 > >
2385 MDVector< Scalar, Node >::
2386 getTpetraMultiVectorCopy() const
2387 {
2388  typedef typename MDArrayView< Scalar >::iterator iterator;
2389  typedef typename MDArrayView< const Scalar >::iterator citerator;
2390 
2391  // Determine the vector axis and related info
2392  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2393  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2394  int commDim = getCommDim(vectorAxis);
2395  int numVectors = getGlobalDim(vectorAxis);
2396 
2397  // Obtain the appropriate MDMap
2398  Teuchos::RCP< const MDMap< Node > > newMdMap;
2399  if (padding == 0 && commDim == 1)
2400  newMdMap = Teuchos::rcp(new MDMap< Node >(*_mdMap, vectorAxis, 0));
2401  else
2402  {
2403  newMdMap = _mdMap;
2404  numVectors = 1;
2405  }
2406 
2407  // Get the Tpetra::Map that is equivalent to newMdMap
2408  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2409  GlobalOrdinal,
2410  Node2 > > tpetraMap =
2411  newMdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal, Node2 >(true);
2412 
2413  // Construct the result
2414  Teuchos::RCP< Tpetra::MultiVector< Scalar,
2415  LocalOrdinal,
2416  GlobalOrdinal,
2417  Node2 > > result =
2418  Teuchos::rcp(new Tpetra::MultiVector< Scalar,
2419  LocalOrdinal,
2420  GlobalOrdinal,
2421  Node2 >(tpetraMap, numVectors));
2422 
2423  // Copy the MDVector to the Tpetra::MultiVector, even if the
2424  // MDVector is non-contiguous
2425  int ii = 0;
2426  if (numVectors == 1)
2427  {
2428  Teuchos::ArrayRCP< Scalar > tpetraVectorArray = result->getDataNonConst(0);
2429  for (citerator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2430  tpetraVectorArray[ii++] = (double) *it;
2431  }
2432  else
2433  {
2434  for (int iv = 0; iv < numVectors; ++iv)
2435  {
2436  ii = 0;
2437  Teuchos::ArrayRCP< Scalar > tpetraVectorArray =
2438  result->getDataNonConst(iv);
2439  MDArrayView< Scalar > subArray(_mdArrayView, vectorAxis, iv);
2440  for (iterator it = subArray.begin(); it != subArray.end(); ++it)
2441  tpetraVectorArray[ii++] = (double) *it;
2442  }
2443  }
2444 
2445  // Return the result
2446  return result;
2447 }
2448 
2449 #endif
2450 
2452 
2453 template< class Scalar,
2454  class Node >
2457 getDataNonConst(bool includePadding)
2458 {
2459 #ifdef DOMI_MDVECTOR_VERBOSE
2460  cout << "_mdArrayView.getRawPtr() = " << _mdArrayView.getRawPtr()
2461  << endl;
2462  cout << "_mdArrayView = " << _mdArrayView << endl;
2463 #endif
2464  if (includePadding)
2465  return _mdArrayView;
2466  MDArrayView< Scalar > newArray(_mdArrayView);
2467  for (int axis = 0; axis < numDims(); ++axis)
2468  {
2469  int lo = getLowerPadSize(axis);
2470  int hi = getLocalDim(axis,true) - getUpperPadSize(axis);
2471  newArray = MDArrayView< Scalar >(newArray, axis, Slice(lo,hi));
2472  }
2473  return newArray;
2474 }
2475 
2477 
2478 template< class Scalar,
2479  class Node >
2482 getData(bool includePadding) const
2483 {
2484  if (includePadding)
2485  return _mdArrayView.getConst();
2486  MDArrayView< Scalar > newArray(_mdArrayView);
2487  for (int axis = 0; axis < numDims(); ++axis)
2488  {
2489  int lo = getLowerPadSize(axis);
2490  int hi = getLocalDim(axis,true) - getUpperPadSize(axis);
2491  newArray = MDArrayView< Scalar >(newArray, axis, Slice(lo,hi));
2492  }
2493  return newArray.getConst();
2494 }
2495 
2497 
2498 template< class Scalar,
2499  class Node >
2503 {
2504  MDArrayView< Scalar > newArrayView(_mdArrayView,
2505  axis,
2506  Slice(getLowerPadSize(axis)));
2507  return newArrayView;
2508 }
2509 
2511 
2512 template< class Scalar,
2513  class Node >
2516 getLowerPadData(int axis) const
2517 {
2518  MDArrayView< const Scalar > newArrayView(_mdArrayView.getConst(),
2519  axis,
2520  Slice(getLowerPadSize(axis)));
2521  return newArrayView;
2522 }
2523 
2525 
2526 template< class Scalar,
2527  class Node >
2531 {
2532  dim_type n = getLocalDim(axis,true);
2533  int pad = getUpperPadSize(axis);
2534  Slice slice;
2535  if (pad) slice = Slice(n-pad,n);
2536  else slice = Slice(n-1,n-1);
2537  MDArrayView< Scalar > newArrayView(_mdArrayView,
2538  axis,
2539  slice);
2540  return newArrayView;
2541 }
2542 
2544 
2545 template< class Scalar,
2546  class Node >
2549 getUpperPadData(int axis) const
2550 {
2551  dim_type n = getLocalDim(axis,true);
2552  int pad = getUpperPadSize(axis);
2553  Slice slice;
2554  if (pad) slice = Slice(n-pad,n);
2555  else slice = Slice(n-1,n-1);
2556  MDArrayView< const Scalar > newArrayView(_mdArrayView.getConst(),
2557  axis,
2558  slice);
2559  return newArrayView;
2560 }
2561 
2563 
2564 template< class Scalar,
2565  class Node >
2566 Scalar
2569 {
2570  typedef typename MDArrayView< const Scalar >::iterator iterator;
2571 
2572  TEUCHOS_TEST_FOR_EXCEPTION(
2573  ! _mdMap->isCompatible(*(a._mdMap)),
2574  MDMapError,
2575  "MDMap of calling MDVector and argument 'a' are incompatible");
2576 
2578  Scalar local_dot = 0;
2579  iterator a_it = aView.begin();
2580  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end();
2581  ++it, ++a_it)
2582  local_dot += *it * *a_it;
2583  Scalar global_dot = 0;
2584  Teuchos::reduceAll(*_teuchosComm,
2585  Teuchos::REDUCE_SUM,
2586  1,
2587  &local_dot,
2588  &global_dot);
2589  return global_dot;
2590 }
2591 
2593 
2594 template< class Scalar,
2595  class Node >
2596 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2598 norm1() const
2599 {
2600  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2601  typedef typename MDArrayView< const Scalar >::iterator iterator;
2602 
2603  mag local_norm1 = 0;
2604  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2605  local_norm1 += std::abs(*it);
2606  mag global_norm1 = 0;
2607  Teuchos::reduceAll(*_teuchosComm,
2608  Teuchos::REDUCE_SUM,
2609  1,
2610  &local_norm1,
2611  &global_norm1);
2612  return global_norm1;
2613 }
2614 
2616 
2617 template< class Scalar,
2618  class Node >
2619 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2621 norm2() const
2622 {
2623  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2624  mag norm2 = dot(*this);
2625  return Teuchos::ScalarTraits<mag>::squareroot(norm2);
2626 }
2627 
2629 
2630 template< class Scalar,
2631  class Node >
2632 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2634 normInf() const
2635 {
2636  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2637  typedef typename MDArrayView< const Scalar >::iterator iterator;
2638 
2639  mag local_normInf = 0;
2640  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2641  local_normInf = std::max(local_normInf, std::abs(*it));
2642  mag global_normInf = 0;
2643  Teuchos::reduceAll(*_teuchosComm,
2644  Teuchos::REDUCE_MAX,
2645  1,
2646  &local_normInf,
2647  &global_normInf);
2648  return global_normInf;
2649 }
2650 
2652 
2653 template< class Scalar,
2654  class Node >
2655 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2658 {
2659  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2660  typedef typename MDArrayView< const Scalar >::iterator iterator;
2661 
2662  TEUCHOS_TEST_FOR_EXCEPTION(
2663  ! _mdMap->isCompatible(*(weights._mdMap)),
2664  MDMapError,
2665  "MDMap of calling MDVector and argument 'weights' are incompatible");
2666 
2667  MDArrayView< const Scalar > wView = weights.getData();
2668  mag local_wNorm = 0;
2669  iterator w_it = wView.begin();
2670  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end();
2671  ++it, ++w_it)
2672  local_wNorm += *it * *it * *w_it;
2673  mag global_wNorm = 0;
2674  Teuchos::reduceAll(*_teuchosComm,
2675  Teuchos::REDUCE_SUM,
2676  1,
2677  &local_wNorm,
2678  &global_wNorm);
2679  Teuchos::Array< dim_type > dimensions(numDims());
2680  for (int i = 0; i < numDims(); ++i)
2681  dimensions[i] = _mdMap->getGlobalDim(i);
2682  size_type n = computeSize(dimensions);
2683  if (n == 0) return 0;
2684  return Teuchos::ScalarTraits<mag>::squareroot(global_wNorm / n);
2685 }
2686 
2688 
2689 template< class Scalar,
2690  class Node >
2691 Scalar
2693 meanValue() const
2694 {
2695  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2696  typedef typename MDArrayView< const Scalar >::iterator iterator;
2697 
2698  mag local_sum = 0;
2699  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2700  local_sum += *it;
2701  mag global_sum = 0;
2702  Teuchos::reduceAll(*_teuchosComm,
2703  Teuchos::REDUCE_SUM,
2704  1,
2705  &local_sum,
2706  &global_sum);
2707  Teuchos::Array< dim_type > dimensions(numDims());
2708  for (int i = 0; i < numDims(); ++i)
2709  dimensions[i] = _mdMap->getGlobalDim(i);
2710  size_type n = computeSize(dimensions);
2711  if (n == 0) return 0;
2712  return global_sum / n;
2713 }
2714 
2716 
2717 template< class Scalar,
2718  class Node >
2719 std::string
2722 {
2723  using Teuchos::TypeNameTraits;
2724 
2725  Teuchos::Array< dim_type > dims(numDims());
2726  for (int axis = 0; axis < numDims(); ++axis)
2727  dims[axis] = getGlobalDim(axis, true);
2728 
2729  std::ostringstream oss;
2730  oss << "\"Domi::MDVector\": {"
2731  << "Template parameters: {"
2732  << "Scalar: " << TypeNameTraits<Scalar>::name()
2733  << ", Node: " << TypeNameTraits< Node >::name()
2734  << "}";
2735  if (this->getObjectLabel() != "")
2736  oss << ", Label: \"" << this->getObjectLabel () << "\", ";
2737  oss << "Global dimensions: " << dims << " }";
2738  return oss.str();
2739 }
2740 
2742 
2743 template< class Scalar,
2744  class Node >
2745 void
2747 describe(Teuchos::FancyOStream &out,
2748  const Teuchos::EVerbosityLevel verbLevel) const
2749 {
2750  using std::setw;
2751  using Teuchos::Comm;
2752  using Teuchos::RCP;
2753  using Teuchos::TypeNameTraits;
2754  using Teuchos::VERB_DEFAULT;
2755  using Teuchos::VERB_NONE;
2756  using Teuchos::VERB_LOW;
2757  using Teuchos::VERB_MEDIUM;
2758  using Teuchos::VERB_HIGH;
2759  using Teuchos::VERB_EXTREME;
2760 
2761  const Teuchos::EVerbosityLevel vl =
2762  (verbLevel == VERB_DEFAULT) ? VERB_LOW : verbLevel;
2763 
2764  const MDMap< Node > & mdMap = *(getMDMap());
2765  Teuchos::RCP< const Teuchos::Comm< int > > comm = mdMap.getTeuchosComm();
2766  const int myImageID = comm->getRank();
2767  const int numImages = comm->getSize();
2768  Teuchos::OSTab tab0(out);
2769 
2770  if (vl != VERB_NONE)
2771  {
2772  if (myImageID == 0)
2773  {
2774  out << "\"Domi::MDVector\":" << endl;
2775  }
2776  Teuchos::OSTab tab1(out);// applies to all processes
2777  if (myImageID == 0)
2778  {
2779  out << "Template parameters:";
2780  {
2781  Teuchos::OSTab tab2(out);
2782  out << "Scalar: " << TypeNameTraits<Scalar>::name() << endl
2783  << "Node: " << TypeNameTraits< Node >::name() << endl;
2784  }
2785  out << endl;
2786  if (this->getObjectLabel() != "")
2787  {
2788  out << "Label: \"" << getObjectLabel() << "\"" << endl;
2789  }
2790  Teuchos::Array< dim_type > globalDims(numDims());
2791  for (int axis = 0; axis < numDims(); ++axis)
2792  globalDims[axis] = getGlobalDim(axis, true);
2793  out << "Global dimensions: " << globalDims << endl;
2794  }
2795  for (int imageCtr = 0; imageCtr < numImages; ++imageCtr)
2796  {
2797  if (myImageID == imageCtr)
2798  {
2799  if (vl != VERB_LOW)
2800  {
2801  // VERB_MEDIUM and higher prints getLocalLength()
2802  out << "Process: " << myImageID << endl;
2803  Teuchos::OSTab tab2(out);
2804  Teuchos::Array< dim_type > localDims(numDims());
2805  for (int axis = 0; axis < numDims(); ++axis)
2806  localDims[axis] = getLocalDim(axis, true);
2807  out << "Local dimensions: " << localDims << endl;
2808  }
2809  std::flush(out); // give output time to complete
2810  }
2811  comm->barrier(); // give output time to complete
2812  comm->barrier();
2813  comm->barrier();
2814  }
2815  }
2816 }
2817 
2819 
2820 template< class Scalar,
2821  class Node >
2822 void
2824 putScalar(const Scalar & value,
2825  bool includePadding)
2826 {
2827  typedef typename MDArrayView< Scalar >::iterator iterator;
2828 
2829  MDArrayView< Scalar > newArray = getDataNonConst(includePadding);
2830  for (iterator it = newArray.begin(); it != newArray.end(); ++it)
2831  *it = value;
2832 }
2833 
2835 
2836 template< class Scalar,
2837  class Node >
2838 void
2841 {
2842  typedef typename MDArrayView< Scalar >::iterator iterator;
2843 
2844  Teuchos::ScalarTraits< Scalar >::seedrandom(time(NULL));
2845  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2846  *it = Teuchos::ScalarTraits< Scalar >::random();
2847 }
2848 
2850 
2851 template< class Scalar,
2852  class Node >
2853 void
2856 {
2857  for (int axis = 0; axis < numDims(); ++axis)
2858  {
2859  updateCommPad(axis);
2860  }
2861 }
2862 
2864 
2865 template< class Scalar,
2866  class Node >
2867 void
2869 updateCommPad(int axis)
2870 {
2871  startUpdateCommPad(axis);
2872  endUpdateCommPad(axis);
2873 }
2874 
2876 
2877 template< class Scalar,
2878  class Node >
2879 void
2882 {
2883  // #define DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2884 
2885  // Initialize the _sendMessages and _recvMessages members on the
2886  // first call to startUpdateCommPad(int).
2887  if (_sendMessages.empty()) initializeMessages();
2888 
2889 #ifdef HAVE_MPI
2890  int rank = _teuchosComm->getRank();
2891  int numProc = _teuchosComm->getSize();
2892  int tag;
2893  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
2894  // const Teuchos::MpiComm< int >. We downcast, extract and
2895  // dereference so that we can get access to the MPI_Comm used to
2896  // construct it.
2897  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
2898  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
2899  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
2900  *(mpiComm->getRawMpiComm());
2901 
2902  // Post the non-blocking sends
2903  MPI_Request request;
2904  for (int boundary = 0; boundary < 2; ++boundary)
2905  {
2906  MessageInfo message = _sendMessages[axis][boundary];
2907  if (message.proc >= 0)
2908  {
2909  tag = 2 * (rank * numProc + message.proc) + boundary;
2910 
2911 #ifdef DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2912  cout << rank << ": post send for axis " << axis << ", boundary "
2913  << boundary << ", buffer = " << message.buffer << ", proc = "
2914  << message.proc << ", tag = " << tag << endl;
2915 #endif
2916 
2917  if (MPI_Isend(message.buffer,
2918  1,
2919  *(message.datatype),
2920  message.proc,
2921  tag,
2922  communicator(),
2923  &request))
2924  throw std::runtime_error("Domi::MDVector: Error in MPI_Isend");
2925  _requests.push_back(request);
2926  }
2927  }
2928 
2929  // Post the non-blocking receives
2930  for (int boundary = 0; boundary < 2; ++boundary)
2931  {
2932  MessageInfo message = _recvMessages[axis][boundary];
2933  if (message.proc >= 0)
2934  {
2935  tag = 2 * (message.proc * numProc + rank) + (1-boundary);
2936 
2937 #ifdef DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2938  cout << rank << ": post recv for axis " << axis << ", boundary "
2939  << boundary << ", buffer = " << message.buffer << ", proc = "
2940  << message.proc << ", tag = " << tag << endl;
2941 #endif
2942 
2943  if (MPI_Irecv(message.buffer,
2944  1,
2945  *(message.datatype),
2946  message.proc,
2947  tag,
2948  communicator(),
2949  &request))
2950  throw std::runtime_error("Domi::MDVector: Error in MPI_Irecv");
2951  _requests.push_back(request);
2952  }
2953  }
2954 #else
2955  // HAVE_MPI is not defined, so we are on a single processor.
2956  // However, if the axis is periodic, we need to copy the appropriate
2957  // data to the communication padding.
2958  if (isPeriodic(axis))
2959  {
2960  for (int sendBndry = 0; sendBndry < 2; ++sendBndry)
2961  {
2962  int recvBndry = 1 - sendBndry;
2963  // Get the receive and send data views
2964  MDArrayView< Scalar > recvView = _recvMessages[axis][recvBndry].dataview;
2965  MDArrayView< Scalar > sendView = _sendMessages[axis][sendBndry].dataview;
2966 
2967  // Initialize the receive and send data view iterators
2968  typename MDArrayView< Scalar >::iterator it_recv = recvView.begin();
2969  typename MDArrayView< Scalar >::iterator it_send = sendView.begin();
2970 
2971  // Copy the send buffer to the receive buffer
2972  for ( ; it_recv != recvView.end(); ++it_recv, ++it_send)
2973  *it_recv = *it_send;
2974  }
2975  }
2976 #endif
2977 }
2978 
2980 
2981 template< class Scalar,
2982  class Node >
2983 void
2986 {
2987 #ifdef HAVE_MPI
2988  if (_requests.size() > 0)
2989  {
2990  Teuchos::Array< MPI_Status > status(_requests.size());
2991  if (MPI_Waitall(_requests.size(),
2992  &(_requests[0]),
2993  &(status[0]) ) )
2994  throw std::runtime_error("Domi::MDVector: Error in MPI_Waitall");
2995  _requests.clear();
2996  }
2997 #endif
2998 }
2999 
3001 
3002 template< class Scalar,
3003  class Node >
3006 operator[](dim_type index) const
3007 {
3008  MDVector< Scalar, Node > result(*this,
3009  _nextAxis,
3010  index);
3011  int newAxis = _nextAxis + 1;
3012  if (newAxis >= result.numDims()) newAxis = 0;
3013  result._nextAxis = newAxis;
3014  return result;
3015 }
3016 
3018 
3019 template< class Scalar,
3020  class Node >
3023 operator[](Slice slice) const
3024 {
3025  MDVector< Scalar, Node > result(*this,
3026  _nextAxis,
3027  slice );
3028  int newAxis = _nextAxis + 1;
3029  if (newAxis >= result.numDims()) newAxis = 0;
3030  result._nextAxis = newAxis;
3031  return result;
3032 }
3033 
3035 
3036 template< class Scalar,
3037  class Node >
3038 void
3041 {
3042  // #define DOMI_MDVECTOR_MESSAGE_INITIALIZE
3043 
3044  int ndims = numDims();
3045  Teuchos::Array<int> sizes(ndims);
3046  Teuchos::Array<int> subsizes(ndims);
3047  Teuchos::Array<int> starts(ndims);
3048  MessageInfo messageInfo;
3049 
3050  _sendMessages.resize(ndims);
3051  _recvMessages.resize(ndims);
3052 
3053 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3054  std::stringstream msg;
3055  int rank = _teuchosComm->getRank();
3056 #endif
3057 
3058 #ifdef HAVE_MPI
3059  int order = mpiOrder(getLayout());
3060  MPI_Datatype datatype = mpiType< Scalar >();
3061 #endif
3062 
3063  // Loop over the axes we are going to send messages along
3064  for (int msgAxis = 0; msgAxis < ndims; ++msgAxis)
3065  {
3066  // Set the initial values for sizes, subsizes and starts
3067  for (int axis = 0; axis < ndims; ++axis)
3068  {
3069  sizes[axis] = _mdArrayRcp.dimension(axis);
3070  subsizes[axis] = _mdArrayView.dimension(axis);
3071  starts[axis] = 0;
3072  }
3073 
3075  // Set the lower receive and send messages
3077 
3078  int proc = getLowerNeighbor(msgAxis);
3079 
3080 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3081  msg << endl << "P" << rank << ": axis " << msgAxis << ", lower neighbor = "
3082  << proc << endl;
3083 #endif
3084 
3085  // Fix the subsize along this message axis
3086  subsizes[msgAxis] = getLowerPadSize(msgAxis);
3087  // Fix the proc if the subsize is zero
3088  if (subsizes[msgAxis] == 0) proc = -1;
3089  // Assign the non-MPI members of messageInfo
3090  messageInfo.buffer = (void*) getData().getRawPtr();
3091  messageInfo.proc = proc;
3092  messageInfo.axis = msgAxis;
3093 
3094  if (proc >= 0)
3095  {
3097  // Lower receive message
3099 
3100 #ifdef HAVE_MPI
3101 
3102 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3103  msg << endl << "P" << rank << ": axis " << msgAxis
3104  << ", Lower receive message:" << endl << " ndims = " << ndims
3105  << endl << " sizes = " << sizes << endl << " subsizes = "
3106  << subsizes << endl << " starts = " << starts << endl
3107  << " order = " << order << endl;
3108 #endif
3109  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3110  MPI_Type_create_subarray(ndims,
3111  &sizes[0],
3112  &subsizes[0],
3113  &starts[0],
3114  order,
3115  datatype,
3116  commPad.get());
3117  MPI_Type_commit(commPad.get());
3118  messageInfo.datatype = commPad;
3119 #else
3120  messageInfo.dataview = _mdArrayView;
3121  for (int axis = 0; axis < numDims(); ++axis)
3122  {
3123  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3124  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3125  axis,
3126  slice);
3127  }
3128 #endif
3129 
3130  }
3131  _recvMessages[msgAxis][0] = messageInfo;
3132 
3134  // Lower send message
3136 
3137  starts[msgAxis] = getLowerPadSize(msgAxis);
3138  if (isReplicatedBoundary(msgAxis) && getCommIndex(msgAxis) == 0)
3139  starts[msgAxis] += 1;
3140  if (proc >= 0)
3141  {
3142 
3143 #ifdef HAVE_MPI
3144 
3145 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3146  msg << endl << "P" << rank << ": axis " << msgAxis
3147  << ", Lower send message:" << endl << " ndims = " << ndims
3148  << endl << " sizes = " << sizes << endl << " subsizes = "
3149  << subsizes << endl << " starts = " << starts << endl
3150  << " order = " << order << endl;
3151 #endif
3152  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3153  MPI_Type_create_subarray(ndims,
3154  &sizes[0],
3155  &subsizes[0],
3156  &starts[0],
3157  order,
3158  datatype,
3159  commPad.get());
3160  MPI_Type_commit(commPad.get());
3161  messageInfo.datatype = commPad;
3162 #else
3163  messageInfo.dataview = _mdArrayView;
3164  for (int axis = 0; axis < numDims(); ++axis)
3165  {
3166  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3167  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3168  axis,
3169  slice);
3170  }
3171 #endif
3172 
3173  }
3174  _sendMessages[msgAxis][0] = messageInfo;
3175 
3177  // Set the upper receive and send messages
3179 
3180  proc = getUpperNeighbor(msgAxis);
3181 
3182 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3183  msg << endl << "P" << rank << ": axis " << msgAxis << ", upper neighbor = "
3184  << proc << endl;
3185 #endif
3186 
3187  subsizes[msgAxis] = getUpperPadSize(msgAxis);
3188  starts[msgAxis] = _mdArrayView.dimension(msgAxis) -
3189  getUpperPadSize(msgAxis);
3190  if (subsizes[msgAxis] == 0) proc = -1;
3191  messageInfo.proc = proc;
3192  if (proc >= 0)
3193  {
3195  // Upper receive message
3197 
3198 #ifdef HAVE_MPI
3199 
3200 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3201  msg << endl << "P" << rank << ": axis " << msgAxis
3202  << ", Upper receive message:" << endl << " ndims = " << ndims
3203  << endl << " sizes = " << sizes << endl << " subsizes = "
3204  << subsizes << endl << " starts = " << starts << endl
3205  << " order = " << order << endl;
3206 #endif
3207  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3208  MPI_Type_create_subarray(ndims,
3209  &sizes[0],
3210  &subsizes[0],
3211  &starts[0],
3212  order,
3213  datatype,
3214  commPad.get());
3215  MPI_Type_commit(commPad.get());
3216  messageInfo.datatype = commPad;
3217 #else
3218  messageInfo.dataview = _mdArrayView;
3219  for (int axis = 0; axis < numDims(); ++axis)
3220  {
3221  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3222  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3223  axis,
3224  slice);
3225  }
3226 #endif
3227  }
3228  _recvMessages[msgAxis][1] = messageInfo;
3229 
3231  // Upper send message
3233 
3234  starts[msgAxis] -= getUpperPadSize(msgAxis);
3235  if (isReplicatedBoundary(msgAxis) &&
3236  getCommIndex(msgAxis) == getCommDim(msgAxis)-1)
3237  starts[msgAxis] -= 1;
3238  if (proc >= 0)
3239  {
3240 
3241 #ifdef HAVE_MPI
3242 
3243 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3244  msg << endl << "P" << rank << ": axis " << msgAxis
3245  << ", Upper send message:" << endl << " ndims = " << ndims
3246  << endl << " sizes = " << sizes << endl << " subsizes = "
3247  << subsizes << endl << " starts = " << starts << endl
3248  << " order = " << order << endl;
3249 #endif
3250  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3251  MPI_Type_create_subarray(ndims,
3252  &sizes[0],
3253  &subsizes[0],
3254  &starts[0],
3255  order,
3256  datatype,
3257  commPad.get());
3258  MPI_Type_commit(commPad.get());
3259  messageInfo.datatype = commPad;
3260 #else
3261  messageInfo.dataview = _mdArrayView;
3262  for (int axis = 0; axis < numDims(); ++axis)
3263  {
3264  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3265  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3266  axis,
3267  slice);
3268  }
3269 #endif
3270 
3271  }
3272  _sendMessages[msgAxis][1] = messageInfo;
3273  }
3274 
3275 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3276  for (int proc = 0; proc < _teuchosComm->getSize(); ++proc)
3277  {
3278  if (proc == rank)
3279  {
3280  cout << msg.str();
3281  msg.flush();
3282  }
3283  _teuchosComm->barrier();
3284  }
3285 #endif
3286 
3287 }
3288 
3290 
3291 template< class Scalar,
3292  class Node >
3293 void
3295 writeBinary(const std::string & filename,
3296  bool includeBndryPad) const
3297 {
3298  FILE * datafile;
3299  // If we are using MPI and overwriting an existing file, and the new
3300  // file is shorter than the old file, it appears that the new file
3301  // will retain the old file's length and trailing data. To prevent
3302  // this behavior, we open and close the file to give it zero length.
3303  int pid = _teuchosComm->getRank();
3304  if (pid == 0)
3305  {
3306  datafile = fopen(filename.c_str(), "w");
3307  fclose(datafile);
3308  }
3309  _teuchosComm->barrier();
3310 
3311  // Get the pointer to this MDVector's MDArray, including all padding
3312  const Scalar * buffer = getData(true).getRawPtr();
3313 
3314  // Compute either _fileInfo or _fileInfoWithBndry, whichever is
3315  // appropriate, and return a reference to that fileInfo object
3316  Teuchos::RCP< FileInfo > & fileInfo = computeFileInfo(includeBndryPad);
3317 
3318  // Parallel output
3319 #ifdef HAVE_MPI
3320 
3321  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
3322  // const Teuchos::MpiComm< int >. We downcast, extract and
3323  // dereference so that we can get access to the MPI_Comm used to
3324  // construct it.
3325  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
3326  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
3327  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
3328  *(mpiComm->getRawMpiComm());
3329 
3330  // Compute the access mode
3331  int access = MPI_MODE_WRONLY | MPI_MODE_CREATE;
3332 
3333  // I copy the filename C string, because the c_str() method returns
3334  // a const char*, and the MPI_File_open() function requires
3335  // (incorrectly) a non-const char*.
3336  char * cstr = new char[filename.size()+1];
3337  std::strcpy(cstr, filename.c_str());
3338 
3339  // Use MPI I/O to write the binary file
3340  MPI_File mpiFile;
3341  MPI_Status status;
3342  char datarep[7] = "native";
3343  MPI_File_open(communicator(), cstr, access, MPI_INFO_NULL, &mpiFile);
3344  MPI_File_set_view(mpiFile, 0, mpiType< Scalar >(),
3345  *(fileInfo->filetype), datarep, MPI_INFO_NULL);
3346  MPI_File_write_all(mpiFile, (void*)buffer, 1, *(fileInfo->datatype),
3347  &status);
3348  MPI_File_close(&mpiFile);
3349 
3350  // Delete the C string
3351  delete [] cstr;
3352 
3353  // Serial output
3354 #else
3355 
3356  // Get the number of dimensions
3357  // int ndims = _mdMap->numDims();
3358 
3359  // Initialize the data file
3360  datafile = fopen(filename.c_str(), "w");
3361 
3362  // Obtain the data to write, including the boundary padding if requested
3363  MDArrayView< const Scalar > mdArrayView = getData(includeBndryPad);
3364 
3365  // Iterate over the data and write it to the data file
3366  typedef typename MDArrayView< Scalar >::const_iterator const_iterator;
3367  for (const_iterator it = mdArrayView.begin(); it != mdArrayView.end(); ++it)
3368  {
3369  fwrite((const void *) &(*it), sizeof(Scalar), 1, datafile);
3370  }
3371 
3372  // Close the data file
3373  fclose(datafile);
3374 
3375 #endif
3376 
3377 }
3378 
3380 
3381 template< class Scalar,
3382  class Node >
3383 void
3385 readBinary(const std::string & filename,
3386  bool includeBndryPad)
3387 {
3388  // Get the pointer to this MDVector's MDArray, including all padding
3389  const Scalar * buffer = getDataNonConst(true).getRawPtr();
3390 
3391  // Compute either _fileInfo or _fileInfoWithBndry, whichever is
3392  // appropriate, and return a reference to that fileInfo object
3393  Teuchos::RCP< FileInfo > & fileInfo = computeFileInfo(includeBndryPad);
3394 
3395  // Parallel input
3396 #ifdef HAVE_MPI
3397 
3398  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
3399  // const Teuchos::MpiComm< int >. We downcast, extract and
3400  // dereference so that we can get access to the MPI_Comm used to
3401  // construct it.
3402  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
3403  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
3404  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
3405  *(mpiComm->getRawMpiComm());
3406 
3407  // Compute the access mode
3408  int access = MPI_MODE_RDONLY;
3409 
3410  // I copy the filename C string, because the c_str() method returns
3411  // a const char*, and the MPI_File_open() function requires
3412  // (incorrectly) a non-const char*.
3413  char * cstr = new char[filename.size()+1];
3414  std::strcpy(cstr, filename.c_str());
3415 
3416  // Use MPI I/O to read the binary file
3417  MPI_File mpiFile;
3418  MPI_Status status;
3419  char datarep[7] = "native";
3420  MPI_File_open(communicator(), cstr, access, MPI_INFO_NULL, &mpiFile);
3421  MPI_File_set_view(mpiFile, 0, mpiType< Scalar >(),
3422  *(fileInfo->filetype), datarep, MPI_INFO_NULL);
3423  MPI_File_read_all(mpiFile, (void*)buffer, 1, *(fileInfo->datatype),
3424  &status);
3425  MPI_File_close(&mpiFile);
3426 
3427  // Delete the C string
3428  delete [] cstr;
3429 
3430  // Serial output
3431 #else
3432 
3433  // Get the number of dimensions
3434  int ndims = _mdMap->numDims();
3435 
3436  // Initialize the data file
3437  FILE * datafile;
3438  datafile = fopen(filename.c_str(), "r");
3439 
3440  // Obtain the MDArrayView to read into, including the boundary
3441  // padding if requested
3442  MDArrayView< Scalar > mdArrayView = getDataNonConst(includeBndryPad);
3443 
3444  // Initialize the set of indexes
3445  Teuchos::Array< Ordinal > index(3);
3446  for (int axis = 0; axis < ndims; ++axis)
3447  index[axis] = fileInfo->dataStart[axis];
3448 
3449  // Iterate over the data and read it from the data file
3450  typedef typename MDArrayView< Scalar >::iterator iterator;
3451  for (iterator it = mdArrayView.begin(); it != mdArrayView.end(); ++it)
3452  {
3453  fread(&(*it), sizeof(Scalar), 1, datafile);
3454  }
3455 
3456  // Close the data file
3457  fclose(datafile);
3458 
3459 #endif
3460 
3461 }
3462 
3464 
3465 template< class Scalar,
3466  class Node >
3467 Teuchos::RCP< typename MDVector< Scalar, Node >::FileInfo > &
3469 computeFileInfo(bool includeBndryPad) const
3470 {
3471  // Work with the appropriate FileInfo object. By using a reference
3472  // here, we are working directly with the member data.
3473  Teuchos::RCP< MDVector< Scalar, Node >::FileInfo > & fileInfo =
3474  includeBndryPad ? _fileInfoWithBndry : _fileInfo;
3475 
3476  // If the fileInfo object already has been set, our work is done
3477  if (!fileInfo.is_null()) return fileInfo;
3478 
3479  // Initialize the new FileInfo object
3480  int ndims = _mdMap->numDims();
3481  fileInfo.reset(new MDVector< Scalar, Node >::FileInfo);
3482  fileInfo->fileShape.resize(ndims);
3483  fileInfo->bufferShape.resize(ndims);
3484  fileInfo->dataShape.resize(ndims);
3485  fileInfo->fileStart.resize(ndims);
3486  fileInfo->dataStart.resize(ndims);
3487 
3488  // Initialize the shapes and starts.
3489  for (int axis = 0; axis < ndims; ++axis)
3490  {
3491  // Initialize the FileInfo arrays using includeBndryPad where
3492  // appropriate
3493  fileInfo->fileShape[axis] = getGlobalDim(axis,includeBndryPad);
3494  fileInfo->bufferShape[axis] = getLocalDim(axis,true );
3495  fileInfo->dataShape[axis] = getLocalDim(axis,false);
3496  fileInfo->fileStart[axis] = getGlobalRankBounds(axis,includeBndryPad).start();
3497  fileInfo->dataStart[axis] = getLocalBounds(axis).start();
3498  // Modify dataShape and dataStart if boundary padding is included
3499  if (includeBndryPad)
3500  {
3501  int commIndex = _mdMap->getCommIndex(axis);
3502  if (commIndex == 0)
3503  {
3504  int pad = getLowerBndryPad(axis);
3505  fileInfo->dataShape[axis] += pad;
3506  fileInfo->dataStart[axis] -= pad;
3507  }
3508  if (commIndex == _mdMap->getCommDim(axis)-1)
3509  {
3510  fileInfo->dataShape[axis] += getUpperBndryPad(axis);
3511  }
3512  }
3513  }
3514 
3515 #ifdef DOMI_MDVECTOR_DEBUG_IO
3516  cout << pid << ": fileShape = " << fileInfo->fileShape() << endl;
3517  cout << pid << ": bufferShape = " << fileInfo->bufferShape() << endl;
3518  cout << pid << ": dataShape = " << fileInfo->dataShape() << endl;
3519  cout << pid << ": fileStart = " << fileInfo->fileStart() << endl;
3520  cout << pid << ": dataStart = " << fileInfo->dataStart() << endl;
3521 #endif
3522 
3523 #ifdef HAVE_MPI
3524  int mpi_order = getLayout() == C_ORDER ? MPI_ORDER_C : MPI_ORDER_FORTRAN;
3525  // Build the MPI_Datatype for the file
3526  fileInfo->filetype = Teuchos::rcp(new MPI_Datatype);
3527  MPI_Type_create_subarray(ndims,
3528  fileInfo->fileShape.getRawPtr(),
3529  fileInfo->dataShape.getRawPtr(),
3530  fileInfo->fileStart.getRawPtr(),
3531  mpi_order,
3532  mpiType< Scalar >(),
3533  fileInfo->filetype.get());
3534  MPI_Type_commit(fileInfo->filetype.get());
3535 
3536  // Build the MPI_Datatype for the data
3537  fileInfo->datatype = Teuchos::rcp(new MPI_Datatype);
3538  MPI_Type_create_subarray(ndims,
3539  fileInfo->bufferShape.getRawPtr(),
3540  fileInfo->dataShape.getRawPtr(),
3541  fileInfo->dataStart.getRawPtr(),
3542  mpi_order,
3543  mpiType< Scalar >(),
3544  fileInfo->datatype.get());
3545  MPI_Type_commit(fileInfo->datatype.get());
3546 #endif // DGM_PARALLEL
3547 
3548  return fileInfo;
3549 }
3550 
3552 
3553 } // Namespace Domi
3554 
3555 #endif
void writeBinary(const std::string &filename, bool includeBndryPad=false) const
Write the MDVector to a binary file.
Definition: Domi_MDVector.hpp:3295
int getCommPadSize(int axis) const
Get the communication padding size along the given axis.
Definition: Domi_MDVector.hpp:1876
Scalar meanValue() const
Compute the mean (average) value of this MDVector.
Definition: Domi_MDVector.hpp:2693
MDVector< Scalar, Node > operator[](dim_type index) const
Sub-vector access operator.
Definition: Domi_MDVector.hpp:3006
virtual std::string description() const
A simple one-line description of this MDVector.
Definition: Domi_MDVector.hpp:2721
void resize(const Teuchos::ArrayView< dim_type > &dims)
Resize the MDArrayRCP based on the given dimensions.
Definition: Domi_MDArrayRCP.hpp:1684
int getUpperPadSize(int axis) const
Get the size of the upper padding along the given axis.
Definition: Domi_MDVector.hpp:1865
MDVector(const Teuchos::RCP< const MDMap< Node > > &mdMap, bool zeroOut=true)
Main constructor.
Definition: Domi_MDVector.hpp:1181
virtual void describe(Teuchos::FancyOStream &out, const Teuchos::EVerbosityLevel verbLevel=Teuchos::Describable::verbLevel_default) const
Print the object with some verbosity level to a FancyOStream.
Definition: Domi_MDVector.hpp:2747
int getLowerPadSize(int axis) const
Get the size of the lower padding along the given axis.
Definition: Domi_MDVector.hpp:1854
Teuchos::RCP< const MDMap< Node > > getAugmentedMDMap(const dim_type leadingDim, const dim_type trailingDim=0) const
Return an RCP to a new MDMap that is a simple augmentation of this MDMap.
Definition: Domi_MDMap.hpp:2570
void assign(const T &value)
Assign a value to all elements of the MDArrayView
Definition: Domi_MDArrayView.hpp:1413
MDArrayView< Scalar > getLowerPadDataNonConst(int axis)
Get a non-const view of the lower padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2502
Teuchos::ArrayView< const Slice > getLocalBounds() const
Get the local loop bounds along every axis.
Definition: Domi_MDVector.hpp:1810
void setLowerPad(int axis, const Scalar value)
Assign all elements of the lower pad a constant value.
Definition: Domi_MDVector.hpp:1920
Teuchos::ScalarTraits< Scalar >::magnitudeType norm1() const
Compute the 1-norm of this MDVector.
Definition: Domi_MDVector.hpp:2598
void setUpperPad(int axis, const Scalar value)
Assign all elements of the upper pad a constant value.
Definition: Domi_MDVector.hpp:1933
dim_type getLocalDim(int axis, bool withCommPad=false) const
Get the local dimension along the specified axis.
Definition: Domi_MDVector.hpp:1799
iterator begin()
Return the beginning iterator.
Definition: Domi_MDArrayView.hpp:960
MDArrayView< const Scalar > getLowerPadData(int axis) const
Get a const view of the lower padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2516
bool onSubcommunicator() const
Query whether this processor is on the sub-communicator.
Definition: Domi_MDVector.hpp:1678
Iterator class suitable for multi-dimensional arrays.
Definition: Domi_MDIterator.hpp:100
iterator end()
Return the ending iterator.
Definition: Domi_MDArrayView.hpp:969
Scalar dot(const MDVector< Scalar, Node > &a) const
Compute the dot product of this MDVector and MDVector a.
Definition: Domi_MDVector.hpp:2568
void startUpdateCommPad(int axis)
Start an asyncronous update of the communication padding.
Definition: Domi_MDVector.hpp:2881
MDArrayView< const Scalar > getData(bool includePadding=true) const
Get a const view of the data as an MDArrayView.
Definition: Domi_MDVector.hpp:2482
bool isReplicatedBoundary(int axis) const
Return whether the given axis supports a replicated boundary.
Definition: Domi_MDVector.hpp:1946
int numDims() const
Get the number of dimensions.
Definition: Domi_MDVector.hpp:1700
Layout getLayout() const
Get the storage order.
Definition: Domi_MDVector.hpp:1957
dim_type getGlobalDim(int axis, bool withBndryPad=false) const
Get the global dimension along the specified axis.
Definition: Domi_MDVector.hpp:1766
int getCommDim(int axis) const
Get the communicator size along the given axis.
Definition: Domi_MDVector.hpp:1711
const T * getRawPtr() const
Return a const raw pointer to the beginning of the MDArrayView or NULL if unsized.
Definition: Domi_MDArrayView.hpp:1521
void readBinary(const std::string &filename, bool includeBndryPad=false)
Read the MDVector from a binary file.
Definition: Domi_MDVector.hpp:3385
int getCommIndex(int axis) const
Get the axis rank of this processor.
Definition: Domi_MDVector.hpp:1733
MDArrayView< Scalar > getUpperPadDataNonConst(int axis)
Get a non-const view of the upper padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2530
const dim_type stop() const
Stop index.
Definition: Domi_Slice.hpp:438
bool hasPadding() const
Return true if there is any padding stored locally.
Definition: Domi_MDVector.hpp:1843
Teuchos::ScalarTraits< Scalar >::magnitudeType norm2() const
Compute the 2-norm of this MDVector.
Definition: Domi_MDVector.hpp:2621
iterator begin()
Return the beginning iterator.
Definition: Domi_MDArrayRCP.hpp:1065
A Slice contains a start, stop, and step index, describing a subset of an ordered container...
Definition: Domi_Slice.hpp:137
Teuchos::ScalarTraits< Scalar >::magnitudeType normInf() const
Compute the infinity-norm of this MDVector.
Definition: Domi_MDVector.hpp:2634
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDMap.hpp:2068
bool isPeriodic(int axis) const
Return the periodic flag for the given axis.
Definition: Domi_MDVector.hpp:1722
void endUpdateCommPad(int axis)
Complete an asyncronous update of the communication padding.
Definition: Domi_MDVector.hpp:2985
void clear()
Clear the MDArrayRCP
Definition: Domi_MDArrayRCP.hpp:1652
MDArrayView< const Scalar > getUpperPadData(int axis) const
Get a const view of the upper padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2549
void randomize()
Set all values in the multivector to pseudorandom numbers.
Definition: Domi_MDVector.hpp:2840
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:114
Definition: Domi_ConfigDefs.hpp:97
Type Error exception type.
Definition: Domi_Exceptions.hpp:126
MDVector< Scalar, Node > & operator=(const MDVector< Scalar, Node > &source)
Assignment operator.
Definition: Domi_MDVector.hpp:1638
virtual Slice bounds(dim_type size) const
Return a Slice with concrete start and stop values.
virtual ~MDVector()
Destructor.
Definition: Domi_MDVector.hpp:1657
Slice getLocalInteriorBounds(int axis) const
Get the local interior looping bounds along the specified axis.
Definition: Domi_MDVector.hpp:1832
void putScalar(const Scalar &value, bool includePadding=true)
Set all values in the multivector with the given value.
Definition: Domi_MDVector.hpp:2824
int numDims() const
Return the number of dimensions.
Definition: Domi_MDArrayRCP.hpp:999
int getBndryPadSize(int axis) const
Get the boundary padding size along the given axis.
Definition: Domi_MDVector.hpp:1909
Teuchos::ScalarTraits< Scalar >::magnitudeType normWeighted(const MDVector< Scalar, Node > &weights) const
Compute the weighted norm of this.
Definition: Domi_MDVector.hpp:2657
Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const
Get the global loop bounds on this processor along the specified axis.
Definition: Domi_MDVector.hpp:1788
Multi-dimensional distributed vector.
Definition: Domi_MDVector.hpp:177
const_pointer getRawPtr() const
Return a const raw pointer to the beginning of the MDArrayRCP or NULL if unsized. ...
Definition: Domi_MDArrayRCP.hpp:1718
Slice getGlobalBounds(int axis, bool withBndryPadding=false) const
Get the bounds of the global problem.
Definition: Domi_MDVector.hpp:1777
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDVector.hpp:1689
void updateCommPad()
Sum values of a locally replicated multivector across all processes.
Definition: Domi_MDVector.hpp:2855
iterator end()
Return the ending iterator.
Definition: Domi_MDArrayRCP.hpp:1074
int getLowerNeighbor(int axis) const
Get the rank of the lower neighbor.
Definition: Domi_MDVector.hpp:1744
int getLowerBndryPad(int axis) const
Get the size of the lower boundary padding along the given axis.
Definition: Domi_MDVector.hpp:1887
MDArrayView< const T > getConst() const
Return an MDArrayView< const T > of an MDArrayView< T > object.
Definition: Domi_MDArrayView.hpp:1055
dim_type dimension(int axis) const
Return the dimension of the given axis.
Definition: Domi_MDArrayRCP.hpp:1017
const Teuchos::RCP< const MDMap< Node > > getMDMap() const
MDMap accessor method.
Definition: Domi_MDVector.hpp:1667
Multi-dimensional map.
Definition: Domi_MDMap.hpp:144
const dim_type start() const
Start index.
Definition: Domi_Slice.hpp:431
MDArrayView< Scalar > getDataNonConst(bool includePadding=true)
Get a non-const view of the data as an MDArrayView.
Definition: Domi_MDVector.hpp:2457
int getUpperBndryPad(int axis) const
Get the size of the upper boundary padding along the given axis.
Definition: Domi_MDVector.hpp:1898
Invalid argument exception type.
Definition: Domi_Exceptions.hpp:53
int getUpperNeighbor(int axis) const
Get the rank of the upper neighbor.
Definition: Domi_MDVector.hpp:1755
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:102
bool isContiguous() const
True if there are no stride gaps on any processor.
Definition: Domi_MDVector.hpp:1968

Generated for Domi by doxygen 1.8.14