Zoltan2
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Pages
Zoltan2_Directory_Comm.hpp
Go to the documentation of this file.
1 /*
2  * @HEADER
3  *
4  * ***********************************************************************
5  *
6  * Zoltan2 Directory for Load-balancing, Partitioning, Ordering and Coloring
7  * Copyright 2012 Sandia Corporation
8  *
9  * Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
10  * the U.S. Government retains certain rights in this software.
11  *
12  * Redistribution and use in source and binary forms, with or without
13  * modification, are permitted provided that the following conditions are
14  * met:
15  *
16  * 1. Redistributions of source code must retain the above copyright
17  * notice, this list of conditions and the following disclaimer.
18  *
19  * 2. Redistributions in binary form must reproduce the above copyright
20  * notice, this list of conditions and the following disclaimer in the
21  * documentation and/or other materials provided with the distribution.
22  *
23  * 3. Neither the name of the Corporation nor the names of the
24  * contributors may be used to endorse or promote products derived from
25  * this software without specific prior written permission.
26  *
27  * THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
28  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
30  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
31  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
32  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
33  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
34  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
35  * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
36  * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
37  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38  *
39  * Questions? Contact Karen Devine kddevin@sandia.gov
40  * Erik Boman egboman@sandia.gov
41  *
42  * ***********************************************************************
43  *
44  * @HEADER
45  */
46 
47 #ifndef ZOLTAN2_DIRECTORY_COMM_H_
48 #define ZOLTAN2_DIRECTORY_COMM_H_
49 
50 #include <Teuchos_CommHelpers.hpp>
51 #include <vector>
52 #include <mpi.h>
53 #include <Teuchos_ArrayRCP.hpp>
54 
55 namespace Zoltan2 {
56 
57 class Zoltan2_Directory_Plan { /* data for mapping between decompositions */
58  public:
60  }
61 
63  }
64 
66 
67  void print(const std::string& headerMessage) const;
68 
69  Teuchos::ArrayRCP<int> procs_to; /* processors I'll send to */
70  Teuchos::ArrayRCP<int> procs_from; /* processors I'll receive from*/
71  Teuchos::ArrayRCP<int> lengths_to; /* # items I send in my messages */
72  Teuchos::ArrayRCP<int> lengths_from; /* # items I recv in my messages */
73 
74  /* Following arrays used if send/recv data is packed contiguously */
75  Teuchos::ArrayRCP<int> starts_to; /* where in item lists each send starts */
76  Teuchos::ArrayRCP<int> starts_from; /* where in item lists each recv starts */
77 
78  /* Following arrays used is send/recv data not packed contiguously */
79  Teuchos::ArrayRCP<int> indices_to; /* indices of items I send in my msgs */
80 
81  /* ordered consistent with lengths_to */
82  Teuchos::ArrayRCP<int> indices_from; /* indices for where to put arriving data */
83 
84  /* ordered consistent with lengths_from */
85 
86  /* Above information is sufficient if items are all of the same size */
87  /* If item sizes are variable, then need following additional arrays */
88  Teuchos::ArrayRCP<int> sizes; /* size of each item to send (if items vary) */
89  bool using_sizes; /* may refactor this so it's out - tracks whether we are in size mode even if 0 size */
90 
91  Teuchos::ArrayRCP<int> sizes_to; /* size of each msg to send (if items vary) */
92  Teuchos::ArrayRCP<int> sizes_from; /* size of each msg to recv (if items vary) */
93 
94  /* Following used if send/recv data is packed contiguously & items vary */
95  Teuchos::ArrayRCP<int> starts_to_ptr; /* where in dense array sends starts */
96  Teuchos::ArrayRCP<int> starts_from_ptr; /* where in dense each recv starts */
97 
98  /* Following used is send/recv data not packed contiguously & items vary */
99  Teuchos::ArrayRCP<int> indices_to_ptr; /* where to find items I send in my msgs */
100  /* ordered consistent with lengths_to */
101  Teuchos::ArrayRCP<int> indices_from_ptr; /* where to find items I recv */
102  /* ordered consistent with lengths_from */
103 
104  /* Note: ALL above arrays include data for self-msg */
105 
106  int nvals; /* number of values I own to start */
107  int nvals_recv; /* number of values I own after remapping */
108  int nrecvs; /* number of msgs I'll recv (w/o self_msg) */
109  int nsends; /* number of msgs I'll send (w/o self_msg) */
110  int self_msg; /* do I have data for myself? */
111  int max_send_size; /* size of longest message I send (w/o self) */
112  int total_recv_size; /* total amount of data I'll recv (w/ self) */
113  int maxed_recvs; /* use MPI_Alltoallv if too many receives */
114  Teuchos::RCP<const Teuchos::Comm<int> > comm; /* communicator */
115 
116  // making this ArrayRCP is causing issues with name() demangling for gcc
117  // sems build, but not clang... will need to work on this further
118  // back to std::vector for the moment
119  // there is probably an option in the sems building turning on
120  // something that creates this conflict
121  std::vector<MPI_Request> request; /* MPI requests for posted recvs */
122  std::vector<MPI_Status> status; /* MPI status for those recvs */
123 
124  Zoltan2_Directory_Plan* plan_reverse; /* to support POST & WAIT */
125 
126  Teuchos::ArrayRCP<char> recv_buff; /* To support POST & WAIT */
127  Teuchos::ArrayRCP<char> getRecvBuff() const { return recv_buff; }
128 };
129 
131  public:
133  int nvals, /* number of values I currently own */
134  const Teuchos::ArrayRCP<int> &assign, /* processor assignment for all values */
135  Teuchos::RCP<const Teuchos::Comm<int> > comm, /* communicator */
136  int tag); /* message tag I can use */
137 
139 
140  int do_forward(
141  int tag, /* message tag for communicating */
142  const Teuchos::ArrayRCP<char> &send_data, /* array of data I currently own */
143  int nbytes, /* msg size */
144  Teuchos::ArrayRCP<char> &recv_data); /* array of data to receive */
145 
146  int do_reverse(
147  int tag, /* message tag for communicating */
148  const Teuchos::ArrayRCP<char> &send_data, /* array of data I currently own */
149  int nbytes, /* msg size */
150  const Teuchos::ArrayRCP<int> &sizes,
151  Teuchos::ArrayRCP<char> &recv_data); /* array of data owned after reverse */
152 
153  int getNRec() const { return nrec; } /* accessor for nrec */
154 
156  return plan_forward->total_recv_size;
157  }
158 
159  int resize(const Teuchos::ArrayRCP<int> &sizes, int tag,
160  int *sum_recv_sizes);
161 
162  private:
163  int resize(Zoltan2_Directory_Plan *plan,
164  const Teuchos::ArrayRCP<int> &sizes, int tag, int *sum_recv_sizes);
165 
166  int do_post(Zoltan2_Directory_Plan *plan, int tag,
167  const Teuchos::ArrayRCP<char> &send_data,
168  int nbytes, /* msg size */
169  Teuchos::ArrayRCP<char> &recv_data);
170 
171  int do_wait(Zoltan2_Directory_Plan *plan, int tag,
172  const Teuchos::ArrayRCP<char> &send_data,
173  int nbytes, /* msg size */
174  Teuchos::ArrayRCP<char> &recv_data);
175 
176  int do_all_to_all(Zoltan2_Directory_Plan *plan,
177  const Teuchos::ArrayRCP<char> &send_data,
178  int nbytes, /* msg size */
179  Teuchos::ArrayRCP<char> &recv_data);
180 
181  int sort_ints(Teuchos::ArrayRCP<int> &vals_sort, Teuchos::ArrayRCP<int> &vals_other);
182 
183  int invert_map(const Teuchos::ArrayRCP<int> &lengths_to,
184  const Teuchos::ArrayRCP<int> &procs_to, int nsends, int self_msg,
185  Teuchos::ArrayRCP<int> &lengths_from, Teuchos::ArrayRCP<int> &procs_from,
186  int *pnrecvs, int my_proc,int nprocs, int out_of_mem, int tag,
187  Teuchos::RCP<const Teuchos::Comm<int> > comm);
188 
189  int exchange_sizes(const Teuchos::ArrayRCP<int> &sizes_to,
190  const Teuchos::ArrayRCP<int> &procs_to, int nsends,
191  int self_msg, Teuchos::ArrayRCP<int> &sizes_from,
192  const Teuchos::ArrayRCP<int> &procs_from,
193  int nrecvs, int *total_recv_size, int my_proc, int tag,
194  Teuchos::RCP<const Teuchos::Comm<int> > comm);
195 
196  void free_reverse_plan(Zoltan2_Directory_Plan *plan);
197 
198  int create_reverse_plan(int tag, const Teuchos::ArrayRCP<int> &sizes);
199 
200  MPI_Comm getRawComm() {
201  #ifdef HAVE_MPI
202  return Teuchos::getRawMpiComm(*comm_);
203  #else
204  return MPI_COMM_WORLD;
205  #endif
206  }
207 
208  Teuchos::RCP<const Teuchos::Comm<int> > comm_;
209  Zoltan2_Directory_Plan * plan_forward; // for efficient MPI communication
210  int nrec;
211 };
212 
213 // -----------------------------------------------------------------------------
214 // TODO: Decide how to handle this code - copied from zoltan - some may be relic
215  /* Red Storm MPI permits a maximum of 2048 receives. We set our
216  * limit of posted receives to 2000, leaving some for the application.
217  */
218  #ifndef MPI_RECV_LIMIT
219  /* Decided for Trilinos v10/Zoltan v3.2 would almost always use */
220  /* MPI_Alltoall communication instead of point-to-point. */
221  /* August 2009 */
222  /* #define MPI_RECV_LIMIT 4 */
223 
224  /* Decided for zoltan_gid_64 branch to always used posted receives because
225  * Alltoall requires that offsets be 32-bit integers. October 2010
226  */
227  #define MPI_RECV_LIMIT 0
228  /* #define MPI_RECV_LIMIT 2000 */
229  #endif
230 // -----------------------------------------------------------------------------
231 
232 } // end namespace Zoltan2
233 
234 #endif
Teuchos::ArrayRCP< int > indices_from_ptr
void print(const std::string &headerMessage) const
int do_reverse(int tag, const Teuchos::ArrayRCP< char > &send_data, int nbytes, const Teuchos::ArrayRCP< int > &sizes, Teuchos::ArrayRCP< char > &recv_data)
Teuchos::ArrayRCP< char > getRecvBuff() const
Teuchos::ArrayRCP< int > indices_to_ptr
Teuchos::ArrayRCP< int > starts_from_ptr
int do_forward(int tag, const Teuchos::ArrayRCP< char > &send_data, int nbytes, Teuchos::ArrayRCP< char > &recv_data)
int resize(const Teuchos::ArrayRCP< int > &sizes, int tag, int *sum_recv_sizes)
Teuchos::RCP< const Teuchos::Comm< int > > comm
Zoltan2_Directory_Comm(int nvals, const Teuchos::ArrayRCP< int > &assign, Teuchos::RCP< const Teuchos::Comm< int > > comm, int tag)
Teuchos::ArrayRCP< int > starts_to_ptr
void getInvertedValues(Zoltan2_Directory_Plan *from)