3 * This source code is part of
7 * GROningen MAchine for Chemical Simulations
10 * Written by David van der Spoel, Erik Lindahl, Berk Hess, and others.
11 * Copyright (c) 1991-2000, University of Groningen, The Netherlands.
12 * Copyright (c) 2001-2004, The GROMACS development team,
13 * check out http://www.gromacs.org for more information.
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation; either version 2
18 * of the License, or (at your option) any later version.
20 * If you want to redistribute modifications, please consider that
21 * scientific software is very special. Version control is crucial -
22 * bugs must be traceable. We will be happy to consider code for
23 * inclusion in the official distribution, but derived work must not
24 * be called official GROMACS. Details are found in the README & COPYING
25 * files - if they are missing, get the official version at www.gromacs.org.
27 * To help us fund GROMACS development, we humbly ask that you cite
28 * the papers on the package - you can find them in the top README file.
30 * For more info, check our website at http://www.gromacs.org
33 * GRoups of Organic Molecules in ACtion for Science
44 typedef void* MPI_Comm
;
45 typedef void* MPI_Request
;
46 typedef void* MPI_Group
;
60 typedef struct gmx_domdec_master
*gmx_domdec_master_p_t
;
63 int j0
; /* j-cell start */
64 int j1
; /* j-cell end */
65 int cg1
; /* i-charge-group end */
66 int jcg0
; /* j-charge-group start */
67 int jcg1
; /* j-charge-group end */
68 ivec shift0
; /* Minimum shifts to consider */
69 ivec shift1
; /* Maximum shifts to consider */
70 } gmx_domdec_ns_ranges_t
;
73 /* The number of zones including the home zone */
75 /* The shift of the zones with respect to the home zone */
76 ivec shift
[DD_MAXZONE
];
77 /* The charge group boundaries for the zones */
78 int cg_range
[DD_MAXZONE
+1];
79 /* The number of neighbor search zones with i-particles */
81 /* The neighbor search charge group ranges for each i-zone */
82 gmx_domdec_ns_ranges_t izone
[DD_MAXIZONE
];
85 typedef struct gmx_ga2la
*gmx_ga2la_t
;
87 typedef struct gmx_reverse_top
*gmx_reverse_top_p_t
;
89 typedef struct gmx_domdec_constraints
*gmx_domdec_constraints_p_t
;
91 typedef struct gmx_domdec_specat_comm
*gmx_domdec_specat_comm_p_t
;
93 typedef struct gmx_domdec_comm
*gmx_domdec_comm_p_t
;
95 typedef struct gmx_pme_comm_n_box
*gmx_pme_comm_n_box_p_t
;
102 /* Tells if the box is skewed for each of the three cartesian directions */
105 /* Orthogonal vectors for triclinic cells, Cartesian index */
107 /* Normal vectors for the cells walls */
113 /* these buffers are used as destination buffers if MPI_IN_PLACE isn't
115 int *ibuf
; /* for ints */
118 float *fbuf
; /* for floats */
121 double *dbuf
; /* for doubles */
123 } mpi_in_place_buf_t
;
127 /* The DD particle-particle nodes only */
128 /* The communication setup within the communicator all
129 * defined in dd->comm in domdec.c
132 MPI_Comm mpi_comm_all
;
133 /* Use MPI_Sendrecv communication instead of non-blocking calls */
135 /* The local DD cell index and rank */
140 /* Communication with the PME only nodes */
142 gmx_bool pme_receive_vir_ener
;
143 gmx_pme_comm_n_box_p_t cnb
;
145 MPI_Request req_pme
[4];
148 /* The communication setup, identical for each cell, cartesian index */
151 ivec dim
; /* indexed by 0 to ndim */
154 /* PBC from dim 0 to npbcdim */
160 /* Forward and backward neighboring cells, indexed by 0 to ndim */
161 int neighbor
[DIM
][2];
163 /* Only available on the master node */
164 gmx_domdec_master_p_t ma
;
166 /* Are there inter charge group constraints */
167 gmx_bool bInterCGcons
;
169 /* Global atom number to interaction list */
170 gmx_reverse_top_p_t reverse_top
;
174 /* The number of inter charge-group exclusions */
179 gmx_domdec_specat_comm_p_t vsite_comm
;
181 /* Constraint stuff */
182 gmx_domdec_constraints_p_t constraints
;
183 gmx_domdec_specat_comm_p_t constraint_comm
;
185 /* The local to gobal charge group index and local cg to local atom index */
191 /* Local atom to local cg index, only for special cases */
195 /* The number of home atoms */
197 /* The total number of atoms: home and received zones */
199 /* Index from the local atoms to the global atoms */
203 /* Global atom number to local atom number list */
206 /* Communication stuff */
207 gmx_domdec_comm_p_t comm
;
209 /* The partioning count, to keep track of the state */
210 gmx_large_int_t ddp_count
;
213 /* gmx_pme_recv_f buffer */
214 int pme_recv_f_alloc
;
215 rvec
*pme_recv_f_buf
;
219 typedef struct gmx_partdec
*gmx_partdec_p_t
;
224 MPI_Group mpi_group_masters
;
225 MPI_Comm mpi_comm_masters
;
226 /* these buffers are used as destination buffers if MPI_IN_PLACE isn't
228 mpi_in_place_buf_t
*mpb
;
231 #define DUTY_PP (1<<0)
232 #define DUTY_PME (1<<1)
244 } gmx_commrec_thread_t
;
247 /* The nodeids in one sim are numbered sequentially from 0.
248 * All communication within some simulation should happen
249 * in mpi_comm_mysim, or its subset mpi_comm_mygroup.
251 int sim_nodeid
,nnodes
,npmenodes
;
253 /* thread numbers: */
254 /* Not used yet: int threadid, nthreads; */
255 /* The nodeid in the PP/PME, PP or PME group */
257 MPI_Comm mpi_comm_mysim
;
258 MPI_Comm mpi_comm_mygroup
;
260 #ifdef GMX_THREAD_SHM_FDECOMP
261 gmx_commrec_thread_t thread
;
266 /* For domain decomposition */
269 /* For particle decomposition */
272 /* The duties of this node, see the defines above */
277 /* these buffers are used as destination buffers if MPI_IN_PLACE isn't
279 mpi_in_place_buf_t
*mpb
;
282 #define MASTERNODE(cr) ((cr)->nodeid == 0)
283 /* #define MASTERTHREAD(cr) ((cr)->threadid == 0) */
284 /* #define MASTER(cr) (MASTERNODE(cr) && MASTERTHREAD(cr)) */
285 #define MASTER(cr) MASTERNODE(cr)
286 #define SIMMASTER(cr) (MASTER(cr) && ((cr)->duty & DUTY_PP))
287 #define NODEPAR(cr) ((cr)->nnodes > 1)
288 /* #define THREADPAR(cr) ((cr)->nthreads > 1) */
289 /* #define PAR(cr) (NODEPAR(cr) || THREADPAR(cr)) */
290 #define PAR(cr) NODEPAR(cr)
291 #define RANK(cr,nodeid) (nodeid)
292 #define MASTERRANK(cr) (0)
294 #define DOMAINDECOMP(cr) ((cr)->dd != NULL)
295 #define DDMASTER(dd) ((dd)->rank == (dd)->masterrank)
297 #define PARTDECOMP(cr) ((cr)->pd != NULL)
299 #define MULTISIM(cr) ((cr)->ms)
300 #define MSRANK(ms,nodeid) (nodeid)
301 #define MASTERSIM(ms) ((ms)->sim == 0)
303 /* The master of all (the node that prints the remaining run time etc.) */
304 #define MULTIMASTER(cr) (SIMMASTER(cr) && (!MULTISIM(cr) || MASTERSIM((cr)->ms)))