Line data Source code
1 : /* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
2 : Copyright (c) 2011-2019 The plumed team
3 : (see the PEOPLE file at the root of the distribution for a list of names)
4 :
5 : See http://www.plumed.org for more information.
6 :
7 : This file is part of plumed, version 2.
8 :
9 : plumed is free software: you can redistribute it and/or modify
10 : it under the terms of the GNU Lesser General Public License as published by
11 : the Free Software Foundation, either version 3 of the License, or
12 : (at your option) any later version.
13 :
14 : plumed is distributed in the hope that it will be useful,
15 : but WITHOUT ANY WARRANTY; without even the implied warranty of
16 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 : GNU Lesser General Public License for more details.
18 :
19 : You should have received a copy of the GNU Lesser General Public License
20 : along with plumed. If not, see <http://www.gnu.org/licenses/>.
21 : +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
22 : #ifndef __PLUMED_tools_Communicator_h
23 : #define __PLUMED_tools_Communicator_h
24 : #ifdef __PLUMED_HAS_MPI
25 : #include <mpi.h>
26 : #endif
27 : #include <cstdlib>
28 : #include "Exception.h"
29 : #include <vector>
30 : #include <string>
31 : #include "Vector.h"
32 : #include "Tensor.h"
33 : #include "Matrix.h"
34 :
35 : namespace PLMD {
36 :
37 : #ifndef __PLUMED_HAS_MPI
38 : /// Surrogate of MPI_Comm when MPI library is not available
39 : class MPI_Comm {};
40 : /// Surrogate of MPI_Datatype when MPI library is not available
41 : class MPI_Datatype {};
42 : /// Surrogate of MPI_Status when MPI library is not available
43 : class MPI_Status {};
44 : /// Surrogate of MPI_Request when MPI library is not available
45 : class MPI_Request {};
46 : #endif
47 :
48 : /// \ingroup TOOLBOX
49 : /// Class containing wrappers to MPI.
50 : /// All the MPI related stuff is relegated here.
51 : class Communicator {
52 : /// Communicator
53 : MPI_Comm communicator;
54 : /// Function returning the MPI type.
55 : /// You can use it to access to the MPI type of a C++ type, e.g.
56 : /// `MPI_Datatype type=getMPIType<double>();`
57 : template <class T>
58 : static MPI_Datatype getMPIType();
59 : /// Structure defining a buffer for MPI.
60 : /// It contains info on the pointed data and its type and size. It is useful to
61 : /// allow wrapper of MPI functions where the triplet (buffer,type,size)
62 : /// is grouped into a single object. It can be built starting from
63 : /// different kinds of data. To implement compatibility of MPI wrappers
64 : /// with e.g. vectors, add constructors here.
65 : struct Data {
66 : void*pointer;
67 : int size;
68 : int nbytes=0;
69 : MPI_Datatype type;
70 : /// Init from pointer and size
71 2313555 : template <typename T> Data(T*p,int s): pointer(p), size(s), nbytes(sizeof(T)), type(getMPIType<T>()) {}
72 : /// Init from reference
73 6115464 : template <typename T> explicit Data(T&p): pointer(&p), size(1), nbytes(sizeof(T)), type(getMPIType<T>()) {}
74 : /// Init from pointer to VectorGeneric
75 11179 : template <unsigned n> explicit Data(VectorGeneric<n> *p,int s): pointer(p), size(n*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
76 : /// Init from reference to VectorGeneric
77 : template <unsigned n> explicit Data(VectorGeneric<n> &p): pointer(&p), size(n), nbytes(sizeof(double)), type(getMPIType<double>()) {}
78 : /// Init from pointer to TensorGeneric
79 16 : template <unsigned n,unsigned m> explicit Data(TensorGeneric<n,m> *p,int s): pointer(p), size(n*m*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
80 : /// Init from reference to TensorGeneric
81 33799 : template <unsigned n,unsigned m> explicit Data(TensorGeneric<n,m> &p): pointer(&p), size(n*m), nbytes(sizeof(double)), type(getMPIType<double>()) {}
82 : /// Init from reference to std::vector
83 2206971 : template <typename T> explicit Data(std::vector<T>&v) {
84 4402747 : Data d(v.data(),v.size()); pointer=d.pointer; size=d.size; type=d.type;
85 : }
86 : /// Init from reference to PLMD::Matrix
87 17 : template <typename T> explicit Data(Matrix<T>&m ) {
88 34 : if(m.nrows()*m.ncols()>0) { Data d(&m(0,0),m.nrows()*m.ncols()); pointer=d.pointer; size=d.size; type=d.type; }
89 0 : else { pointer=NULL; size=0; }
90 17 : }
91 : /// Init from reference to std::string
92 0 : explicit Data(std::string&s) {
93 0 : if(s.size()>0) { Data d(&s[0],s.size()); pointer=d.pointer; size=d.size; type=d.type; }
94 0 : else { pointer=NULL; size=0; }
95 0 : }
96 : };
97 : /// Const version of Communicator::Data
98 : /// See Communicator::Data documentation
99 : struct ConstData {
100 : const void*pointer;
101 : int size;
102 : int nbytes=0;
103 : MPI_Datatype type;
104 32340 : template <typename T> explicit ConstData(const T*p,int s): pointer(p), size(s), nbytes(sizeof(T)), type(getMPIType<T>()) {}
105 4487 : template <typename T> explicit ConstData(const T&p): pointer(&p), size(1), nbytes(sizeof(T)), type(getMPIType<T>()) {}
106 : template <unsigned n> explicit ConstData(const VectorGeneric<n> *p,int s): pointer(p), size(n*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
107 : template <unsigned n> explicit ConstData(const VectorGeneric<n> &p): pointer(&p), size(n), nbytes(sizeof(double)), type(getMPIType<double>()) {}
108 : template <unsigned n,unsigned m> explicit ConstData(const TensorGeneric<n,m> *p,int s): pointer(p), size(n*m*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
109 : template <unsigned n,unsigned m> explicit ConstData(const TensorGeneric<n,m> &p): pointer(&p), size(n*m), nbytes(sizeof(double)), type(getMPIType<double>()) {}
110 198 : template <typename T> explicit ConstData(const std::vector<T>&v) {
111 396 : ConstData d(v.data(),v.size()); pointer=d.pointer; size=d.size; type=d.type;
112 : }
113 : template <typename T> explicit ConstData(const Matrix<T>&m ) {
114 : if(m.nrows()*m.ncols()>0) { ConstData d(&m(0,0),m.nrows()*m.ncols()); pointer=d.pointer; size=d.size; type=d.type; }
115 : else { pointer=NULL; size=0; }
116 : }
117 57 : explicit ConstData(const std::string&s) {
118 114 : if(s.size()>0) { ConstData d(&s[0],s.size()); pointer=d.pointer; size=d.size; type=d.type; }
119 0 : else { pointer=NULL; size=0; }
120 57 : }
121 : };
122 : public:
123 : /// Wrapper class for MPI_Status
124 : class Status {
125 : int Get_count(MPI_Datatype)const;
126 : public:
127 : MPI_Status s;
128 : template <class T>
129 7540 : int Get_count()const {return Get_count(getMPIType<T>());}
130 : };
131 : /// Special status used when status should be ignored.
132 : /// E.g. `Recv(a,0,1,Communicator::StatusIgnore);`
133 : /// Notice that this is the default for Recv, so this is equivalent to
134 : /// `Recv(a,0,1);`
135 : static Status StatusIgnore;
136 : /// Wrapper class for MPI_Request
137 : class Request {
138 : public:
139 : MPI_Request r;
140 : void wait(Status&s=StatusIgnore);
141 : };
142 : /// Default constructor
143 : Communicator();
144 : /// Copy constructor.
145 : /// It effectively "clones" the communicator, providing a new one acting on the same group
146 : Communicator(const Communicator&);
147 : /// Assignment operator.
148 : /// It effectively "clones" the communicator, providing a new one acting on the same group
149 : Communicator& operator=(const Communicator&);
150 : /// Destructor
151 : virtual ~Communicator();
152 : /// Obtain the rank of the present process
153 : int Get_rank()const;
154 : /// Obtain the number of processes
155 : int Get_size()const;
156 : /// Set from a real MPI communicator.
157 : /// \param comm MPI communicator
158 : void Set_comm(MPI_Comm comm);
159 : /// Reference to MPI communicator
160 : MPI_Comm & Get_comm();
161 : /// Set from a pointer to a real MPI communicator (C).
162 : /// \param comm Pointer to a C MPI communicator
163 : void Set_comm(void*comm);
164 : /// Set from a pointer to a real MPI communicator (FORTRAN).
165 : /// \param comm Pointer to a FORTRAN MPI communicator (INTEGER)
166 : void Set_fcomm(void*comm);
167 : /// Wrapper to MPI_Abort.
168 : /// \param code Error code
169 : void Abort(int code);
170 : /// Wrapper to MPI_Barrier
171 : void Barrier()const;
172 : /// Tests if MPI library is initialized
173 : static bool initialized();
174 : /// Wrapper for MPI_Allreduce with MPI_SUM (data struct)
175 : void Sum(Data);
176 : /// Wrapper for MPI_Allreduce with MPI_SUM (pointer)
177 171354 : template <class T> void Sum(T*buf,int count) {Sum(Data(buf,count));}
178 : /// Wrapper for MPI_Allreduce with MPI_SUM (reference)
179 8293515 : template <class T> void Sum(T&buf) {Sum(Data(buf));}
180 : /// Wrapper for MPI_Allreduce with MPI_PROD (data struct)
181 : void Prod(Data);
182 : /// Wrapper for MPI_Allreduce with MPI_PROD (pointer)
183 : template <class T> void Prod(T*buf,int count) {Prod(Data(buf,count));}
184 : /// Wrapper for MPI_Allreduce with MPI_PROD (reference)
185 : template <class T> void Prod(T&buf) {Prod(Data(buf));}
186 : /// Wrapper for MPI_Allreduce with MPI_MAX (data struct)
187 : void Max(Data);
188 : /// Wrapper for MPI_Allreduce with MPI_MAX (pointer)
189 : template <class T> void Max(T*buf,int count) {Max(Data(buf,count));}
190 : /// Wrapper for MPI_Allreduce with MPI_MAX (reference)
191 : template <class T> void Max(T&buf) {Max(Data(buf));}
192 : /// Wrapper for MPI_Allreduce with MPI_MIN (data struct)
193 : void Min(Data);
194 : /// Wrapper for MPI_Allreduce with MPI_MIN (pointer)
195 : template <class T> void Min(T*buf,int count) {Min(Data(buf,count));}
196 : /// Wrapper for MPI_Allreduce with MPI_MIN (reference)
197 0 : template <class T> void Min(T&buf) {Min(Data(buf));}
198 :
199 : /// Wrapper for MPI_Bcast (data struct)
200 : void Bcast(Data,int);
201 : /// Wrapper for MPI_Bcast (pointer)
202 : template <class T> void Bcast(T*buf,int count,int root) {Bcast(Data(buf,count),root);}
203 : /// Wrapper for MPI_Bcast (reference)
204 8409486 : template <class T> void Bcast(T&buf,int root) {Bcast(Data(buf),root);}
205 :
206 : /// Wrapper for MPI_Isend (data struct)
207 : Request Isend(ConstData,int,int);
208 : /// Wrapper for MPI_Isend (pointer)
209 30160 : template <class T> Request Isend(const T*buf,int count,int source,int tag) {return Isend(ConstData(buf,count),source,tag);}
210 : /// Wrapper for MPI_Isend (reference)
211 171 : template <class T> Request Isend(const T&buf,int source,int tag) {return Isend(ConstData(buf),source,tag);}
212 :
213 : /// Wrapper for MPI_Allgatherv (data struct)
214 : void Allgatherv(ConstData in,Data out,const int*,const int*);
215 : /// Wrapper for MPI_Allgatherv (pointer)
216 16801 : template <class T,class S> void Allgatherv(const T*sendbuf,int sendcount,S*recvbuf,const int*recvcounts,const int*displs) {
217 16801 : Allgatherv(ConstData(sendbuf,sendcount),Data(recvbuf,0),recvcounts,displs);
218 16801 : }
219 : /// Wrapper for MPI_Allgatherv (reference)
220 : template <class T,class S> void Allgatherv(const T&sendbuf,S&recvbuf,const int*recvcounts,const int*displs) {
221 : Allgatherv(ConstData(sendbuf),Data(recvbuf),recvcounts,displs);
222 : }
223 :
224 : /// Wrapper for MPI_Allgather (data struct)
225 : void Allgather(ConstData in,Data out);
226 : /// Wrapper for MPI_Allgatherv (pointer)
227 204 : template <class T,class S> void Allgather(const T*sendbuf,int sendcount,S*recvbuf,int recvcount) {
228 408 : Allgather(ConstData(sendbuf,sendcount),Data(recvbuf,recvcount*Get_size()));
229 204 : }
230 : /// Wrapper for MPI_Allgatherv (reference)
231 4628 : template <class T,class S> void Allgather(const T&sendbuf,S&recvbuf) {
232 4628 : Allgather(ConstData(sendbuf),Data(recvbuf));
233 4628 : }
234 :
235 : /// Wrapper for MPI_Recv (data struct)
236 : void Recv(Data,int,int,Status&s=StatusIgnore);
237 : /// Wrapper for MPI_Recv (pointer)
238 30160 : template <class T> void Recv(T*buf,int count,int source,int tag,Status&s=StatusIgnore) {Recv(Data(buf,count),source,tag,s);}
239 : /// Wrapper for MPI_Recv (reference)
240 228 : template <class T> void Recv(T&buf,int source,int tag,Status&s=StatusIgnore) {Recv(Data(buf),source,tag,s);}
241 :
242 : /// Wrapper to MPI_Comm_split
243 : void Split(int,int,Communicator&)const;
244 : };
245 :
246 : }
247 :
248 : #endif
|