Dune Core Modules (2.7.1)

communication.hh
Go to the documentation of this file.
1 // -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 // vi: set et ts=4 sw=2 sts=2:
3 #ifndef DUNE_COMMON_PARALLEL_COMMUNICATION_HH
4 #define DUNE_COMMON_PARALLEL_COMMUNICATION_HH
12 #include <iostream>
13 #include <complex>
14 #include <algorithm>
15 #include <vector>
16 
19 #include <dune/common/unused.hh>
20 #include <dune/common/parallel/future.hh>
21 
41 namespace Dune
42 {
43 
44  /* define some type that definitely differs from MPI_Comm */
45  struct No_Comm {};
46 
79  template<typename Communicator>
81  {
82  public:
85  {}
86 
91  Communication (const Communicator&)
92  {}
93 
95  int rank () const
96  {
97  return 0;
98  }
99 
101  int size () const
102  {
103  return 1;
104  }
105 
109  template<class T>
110  int send(const T& data, int dest_rank, int tag){
111  DUNE_UNUSED_PARAMETER(data);
112  DUNE_UNUSED_PARAMETER(dest_rank);
114  DUNE_THROW(ParallelError, "This method is not supported in sequential programs");
115  }
116 
120  template<class T>
121  PseudoFuture<T> isend(const T&& data, int dest_rank, int tag){
122  DUNE_UNUSED_PARAMETER(data);
123  DUNE_UNUSED_PARAMETER(dest_rank);
125  DUNE_THROW(ParallelError, "This method is not supported in sequential programs");
126  }
127 
131  template<class T>
132  T recv(T&& data, int source_rank, int tag, void* status = 0){
133  DUNE_UNUSED_PARAMETER(data);
134  DUNE_UNUSED_PARAMETER(source_rank);
136  DUNE_UNUSED_PARAMETER(status);
137  DUNE_THROW(ParallelError, "This method is not supported in sequential programs");
138  }
139 
143  template<class T>
144  PseudoFuture<T> irecv(T&& data, int source_rank, int tag){
145  DUNE_UNUSED_PARAMETER(data);
146  DUNE_UNUSED_PARAMETER(source_rank);
148  DUNE_THROW(ParallelError, "This method is not supported in sequential programs");
149  }
150 
151  template<class T>
152  T rrecv(T&& data, int source_rank, int tag, void* status = 0) const
153  {
154  DUNE_UNUSED_PARAMETER(data);
155  DUNE_UNUSED_PARAMETER(source_rank);
157  DUNE_UNUSED_PARAMETER(status);
158  DUNE_THROW(ParallelError, "This method is not supported in sequential programs");
159  }
163  template<typename T>
164  T sum (const T& in) const
165  {
166  return in;
167  }
168 
174  template<typename T>
175  int sum (T* inout, int len) const
176  {
177  DUNE_UNUSED_PARAMETER(inout);
179  return 0;
180  }
181 
185  template<typename T>
186  T prod (const T& in) const
187  {
188  return in;
189  }
190 
196  template<typename T>
197  int prod (T* inout, int len) const
198  {
199  DUNE_UNUSED_PARAMETER(inout);
201  return 0;
202  }
203 
207  template<typename T>
208  T min (const T& in) const
209  {
210  return in;
211  }
212 
218  template<typename T>
219  int min (T* inout, int len) const
220  {
221  DUNE_UNUSED_PARAMETER(inout);
223  return 0;
224  }
225 
229  template<typename T>
230  T max (const T& in) const
231  {
232  return in;
233  }
234 
240  template<typename T>
241  int max (T* inout, int len) const
242  {
243  DUNE_UNUSED_PARAMETER(inout);
245  return 0;
246  }
247 
251  int barrier () const
252  {
253  return 0;
254  }
255 
260  {
261  return {true}; // return a valid future
262  }
263 
267  template<typename T>
268  int broadcast (T* inout, int len, int root) const
269  {
270  DUNE_UNUSED_PARAMETER(inout);
272  DUNE_UNUSED_PARAMETER(root);
273  return 0;
274  }
275 
279  template<class T>
280  PseudoFuture<T> ibroadcast(T&& data, int root) const{
281  return {std::forward<T>(data)};
282  }
283 
284 
297  template<typename T>
298  int gather (const T* in, T* out, int len, int root) const // note out must have same size as in
299  {
300  DUNE_UNUSED_PARAMETER(root);
301  for (int i=0; i<len; i++)
302  out[i] = in[i];
303  return 0;
304  }
305 
309  template<class TIN, class TOUT = std::vector<TIN>>
310  PseudoFuture<TOUT> igather(TIN&& data_in, TOUT&& data_out, int root){
311  *(data_out.begin()) = std::forward<TIN>(data_in);
312  return {std::forward<TOUT>(data_out)};
313  }
314 
315 
335  template<typename T>
336  int gatherv (const T* in, int sendlen, T* out, int* recvlen, int* displ, int root) const
337  {
338  DUNE_UNUSED_PARAMETER(recvlen);
339  DUNE_UNUSED_PARAMETER(root);
340  for (int i=*displ; i<sendlen; i++)
341  out[i] = in[i];
342  return 0;
343  }
344 
358  template<typename T>
359  int scatter (const T* send, T* recv, int len, int root) const // note out must have same size as in
360  {
361  DUNE_UNUSED_PARAMETER(root);
362  for (int i=0; i<len; i++)
363  recv[i] = send[i];
364  return 0;
365  }
366 
370  template<class TIN, class TOUT = TIN>
371  PseudoFuture<TOUT> iscatter(TIN&& data_in, TOUT&& data_out, int root){
372  data_out = *(std::forward<TIN>(data_in).begin());
373  return {std::forward<TOUT>(data_out)};
374  }
375 
394  template<typename T>
395  int scatterv (const T* send, int* sendlen, int* displ, T* recv, int recvlen, int root) const
396  {
397  DUNE_UNUSED_PARAMETER(recvlen);
398  DUNE_UNUSED_PARAMETER(root);
399  for (int i=*displ; i<*sendlen; i++)
400  recv[i] = send[i];
401  return 0;
402  }
403 
417  template<typename T>
418  int allgather(const T* sbuf, int count, T* rbuf) const
419  {
420  for(const T* end=sbuf+count; sbuf < end; ++sbuf, ++rbuf)
421  *rbuf=*sbuf;
422  return 0;
423  }
424 
429  template<class TIN, class TOUT = TIN>
430  PseudoFuture<TOUT> iallgather(TIN&& data_in, TOUT&& data_out){
431  return {std::forward<TOUT>(data_out)};
432  }
433 
450  template<typename T>
451  int allgatherv (const T* in, int sendlen, T* out, int* recvlen, int* displ) const
452  {
453  DUNE_UNUSED_PARAMETER(recvlen);
454  for (int i=*displ; i<sendlen; i++)
455  out[i] = in[i];
456  return 0;
457  }
458 
471  template<typename BinaryFunction, typename Type>
472  int allreduce(Type* inout, int len) const
473  {
474  DUNE_UNUSED_PARAMETER(inout);
476  return 0;
477  }
478 
483  template<class BinaryFunction, class TIN, class TOUT = TIN>
484  PseudoFuture<TOUT> iallreduce(TIN&& data_in, TOUT&& data_out){
485  data_out = std::forward<TIN>(data_in);
486  return {std::forward<TOUT>(data_out)};
487  }
488 
493  template<class BinaryFunction, class T>
495  return {std::forward<T>(data)};
496  }
497 
498 
512  template<typename BinaryFunction, typename Type>
513  int allreduce(const Type* in, Type* out, int len) const
514  {
515  std::copy(in, in+len, out);
516  return 0;
517  }
518 
519  };
520 
521  template<class T>
522  using CollectiveCommunication
523  // Will be deprecated after the 2.7 release
524  //[[deprecated("CollectiveCommunication is deprecated. Use Communication instead.")]]
525  = Communication<T>;
526 }
527 
528 #endif
helper classes to provide unique types for standard functions
Collective communication interface and sequential default implementation.
Definition: communication.hh:81
PseudoFuture< void > ibarrier() const
Nonblocking barrier.
Definition: communication.hh:259
int send(const T &data, int dest_rank, int tag)
Sends the data to the dest_rank.
Definition: communication.hh:110
int allreduce(const Type *in, Type *out, int len) const
Compute something over all processes for each component of an array and return the result in every pr...
Definition: communication.hh:513
T max(const T &in) const
Compute the maximum of the argument over all processes and return the result in every process....
Definition: communication.hh:230
int rank() const
Return rank, is between 0 and size()-1.
Definition: communication.hh:95
T sum(const T &in) const
Compute the sum of the argument over all processes and return the result in every process....
Definition: communication.hh:164
int prod(T *inout, int len) const
Compute the product over all processes for each component of an array and return the result in every ...
Definition: communication.hh:197
T recv(T &&data, int source_rank, int tag, void *status=0)
Receives the data from the source_rank.
Definition: communication.hh:132
int allreduce(Type *inout, int len) const
Compute something over all processes for each component of an array and return the result in every pr...
Definition: communication.hh:472
int scatter(const T *send, T *recv, int len, int root) const
Scatter array from a root to all other task.
Definition: communication.hh:359
PseudoFuture< T > irecv(T &&data, int source_rank, int tag)
Receives the data from the source_rank nonblocking.
Definition: communication.hh:144
int allgatherv(const T *in, int sendlen, T *out, int *recvlen, int *displ) const
Gathers data of variable length from all tasks and distribute it to all.
Definition: communication.hh:451
int size() const
Number of processes in set, is greater than 0.
Definition: communication.hh:101
int gatherv(const T *in, int sendlen, T *out, int *recvlen, int *displ, int root) const
Gather arrays of variable size on root task.
Definition: communication.hh:336
PseudoFuture< T > isend(const T &&data, int dest_rank, int tag)
Sends the data to the dest_rank nonblocking.
Definition: communication.hh:121
int sum(T *inout, int len) const
Compute the sum over all processes for each component of an array and return the result in every proc...
Definition: communication.hh:175
PseudoFuture< TOUT > iallreduce(TIN &&data_in, TOUT &&data_out)
Compute something over all processes nonblocking.
Definition: communication.hh:484
T min(const T &in) const
Compute the minimum of the argument over all processes and return the result in every process....
Definition: communication.hh:208
int allgather(const T *sbuf, int count, T *rbuf) const
Gathers data from all tasks and distribute it to all.
Definition: communication.hh:418
PseudoFuture< TOUT > iallgather(TIN &&data_in, TOUT &&data_out)
Gathers data from all tasks and distribute it to all nonblocking.
Definition: communication.hh:430
int gather(const T *in, T *out, int len, int root) const
Gather arrays on root task.
Definition: communication.hh:298
Communication(const Communicator &)
Constructor with a given communicator.
Definition: communication.hh:91
int max(T *inout, int len) const
Compute the maximum over all processes for each component of an array and return the result in every ...
Definition: communication.hh:241
PseudoFuture< T > iallreduce(T &&data)
Compute something over all processes nonblocking and in-place.
Definition: communication.hh:494
T prod(const T &in) const
Compute the product of the argument over all processes and return the result in every process....
Definition: communication.hh:186
PseudoFuture< T > ibroadcast(T &&data, int root) const
Distribute an array from the process with rank root to all other processes nonblocking.
Definition: communication.hh:280
PseudoFuture< TOUT > igather(TIN &&data_in, TOUT &&data_out, int root)
Gather arrays on root task nonblocking.
Definition: communication.hh:310
int broadcast(T *inout, int len, int root) const
Distribute an array from the process with rank root to all other processes.
Definition: communication.hh:268
int min(T *inout, int len) const
Compute the minimum over all processes for each component of an array and return the result in every ...
Definition: communication.hh:219
int scatterv(const T *send, int *sendlen, int *displ, T *recv, int recvlen, int root) const
Scatter arrays of variable length from a root to all other tasks.
Definition: communication.hh:395
int barrier() const
Wait until all processes have arrived at this point in the program.
Definition: communication.hh:251
PseudoFuture< TOUT > iscatter(TIN &&data_in, TOUT &&data_out, int root)
Scatter array from a root to all other task nonblocking.
Definition: communication.hh:371
Communication()
Construct default object.
Definition: communication.hh:84
Default exception if an error in the parallel communication of the program occurred.
Definition: exceptions.hh:285
A wrapper-class for a object which is ready immediately.
Definition: future.hh:120
A few common exception classes.
#define DUNE_UNUSED_PARAMETER(parm)
A macro to mark intentionally unused function parameters with.
Definition: unused.hh:25
#define DUNE_THROW(E, m)
Definition: exceptions.hh:216
Dune namespace.
Definition: alignedallocator.hh:14
Definition of the DUNE_UNUSED macro for the case that config.h is not available.
Creative Commons License   |  Legal Statements / Impressum  |  Hosted by TU Dresden  |  generated with Hugo v0.80.0 (May 9, 22:29, 2024)