Fixed some memory leaks.
[gromacs/qmmm-gamess-us.git] / include / types / commrec.h
blobd51d47d2d9c3da9ca7a8ee5e182a95a91b4df3cb
1 /*
2 *
3 * This source code is part of
4 *
5 * G R O M A C S
6 *
7 * GROningen MAchine for Chemical Simulations
8 *
9 * VERSION 3.2.0
10 * Written by David van der Spoel, Erik Lindahl, Berk Hess, and others.
11 * Copyright (c) 1991-2000, University of Groningen, The Netherlands.
12 * Copyright (c) 2001-2004, The GROMACS development team,
13 * check out http://www.gromacs.org for more information.
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation; either version 2
18 * of the License, or (at your option) any later version.
20 * If you want to redistribute modifications, please consider that
21 * scientific software is very special. Version control is crucial -
22 * bugs must be traceable. We will be happy to consider code for
23 * inclusion in the official distribution, but derived work must not
24 * be called official GROMACS. Details are found in the README & COPYING
25 * files - if they are missing, get the official version at www.gromacs.org.
27 * To help us fund GROMACS development, we humbly ask that you cite
28 * the papers on the package - you can find them in the top README file.
30 * For more info, check our website at http://www.gromacs.org
32 * And Hey:
33 * GRoups of Organic Molecules in ACtion for Science
35 #ifdef HAVE_CONFIG_H
36 #include <config.h>
37 #endif
39 #ifdef GMX_LIB_MPI
40 #include <mpi.h>
41 #endif
42 #ifdef GMX_THREADS
43 #include "tmpi.h"
44 #endif
46 #include "idef.h"
48 #ifdef __cplusplus
49 extern "C" {
50 #endif
53 #define DD_MAXZONE 8
54 #define DD_MAXIZONE 4
56 typedef struct gmx_domdec_master *gmx_domdec_master_p_t;
58 typedef struct {
59 int j0; /* j-cell start */
60 int j1; /* j-cell end */
61 int cg1; /* i-charge-group end */
62 int jcg0; /* j-charge-group start */
63 int jcg1; /* j-charge-group end */
64 ivec shift0; /* Minimum shifts to consider */
65 ivec shift1; /* Maximum shifts to consider */
66 } gmx_domdec_ns_ranges_t;
68 typedef struct {
69 /* The number of zones including the home zone */
70 int n;
71 /* The shift of the zones with respect to the home zone */
72 ivec shift[DD_MAXZONE];
73 /* The charge group boundaries for the zones */
74 int cg_range[DD_MAXZONE+1];
75 /* The number of neighbor search zones with i-particles */
76 int nizone;
77 /* The neighbor search charge group ranges for each i-zone */
78 gmx_domdec_ns_ranges_t izone[DD_MAXIZONE];
79 } gmx_domdec_zones_t;
81 typedef struct gmx_ga2la *gmx_ga2la_t;
83 typedef struct gmx_reverse_top *gmx_reverse_top_p_t;
85 typedef struct gmx_domdec_constraints *gmx_domdec_constraints_p_t;
87 typedef struct gmx_domdec_specat_comm *gmx_domdec_specat_comm_p_t;
89 typedef struct gmx_domdec_comm *gmx_domdec_comm_p_t;
91 typedef struct gmx_pme_comm_n_box *gmx_pme_comm_n_box_p_t;
93 typedef struct {
94 int npbcdim;
95 int nboundeddim;
96 rvec box0;
97 rvec box_size;
98 /* Tells if the box is skewed for each of the three cartesian directions */
99 ivec tric_dir;
100 rvec skew_fac;
101 /* Orthogonal vectors for triclinic cells, Cartesian index */
102 rvec v[DIM][DIM];
103 /* Normal vectors for the cells walls */
104 rvec normal[DIM];
105 } gmx_ddbox_t;
108 #if defined(GMX_MPI) && !defined(GMX_THREADS) && !defined(MPI_IN_PLACE_EXISTS)
109 typedef struct {
110 /* these buffers are used as destination buffers if MPI_IN_PLACE isn't
111 supported.*/
112 float *ibuf; /* for ints */
113 int ibuf_alloc;
115 float *fbuf; /* for floats */
116 int fbuf_alloc;
118 double *dbuf; /* for doubles */
119 int dbuf_alloc;
120 } mpi_in_place_buf_t;
121 #endif
124 typedef struct {
125 /* The DD particle-particle nodes only */
126 /* The communication setup within the communicator all
127 * defined in dd->comm in domdec.c
129 int nnodes;
130 #ifdef GMX_MPI
131 MPI_Comm mpi_comm_all;
132 #endif
133 /* Use MPI_Sendrecv communication instead of non-blocking calls */
134 bool bSendRecv2;
135 /* The local DD cell index and rank */
136 ivec ci;
137 int rank;
138 ivec master_ci;
139 int masterrank;
140 /* Communication with the PME only nodes */
141 int pme_nodeid;
142 bool pme_receive_vir_ener;
143 gmx_pme_comm_n_box_p_t cnb;
144 #ifdef GMX_MPI
145 int nreq_pme;
146 MPI_Request req_pme[4];
147 #endif
150 /* The communication setup, identical for each cell, cartesian index */
151 ivec nc;
152 int ndim;
153 ivec dim; /* indexed by 0 to ndim */
154 bool bGridJump;
156 /* PBC from dim 0 to npbcdim */
157 int npbcdim;
159 /* Screw PBC? */
160 bool bScrewPBC;
162 /* Forward and backward neighboring cells, indexed by 0 to ndim */
163 int neighbor[DIM][2];
165 /* Only available on the master node */
166 gmx_domdec_master_p_t ma;
168 /* Are there inter charge group constraints */
169 bool bInterCGcons;
171 /* Global atom number to interaction list */
172 gmx_reverse_top_p_t reverse_top;
173 int nbonded_global;
174 int nbonded_local;
176 /* The number of inter charge-group exclusions */
177 int n_intercg_excl;
179 /* Vsite stuff */
180 int *ga2la_vsite;
181 gmx_domdec_specat_comm_p_t vsite_comm;
183 /* Constraint stuff */
184 gmx_domdec_constraints_p_t constraints;
185 gmx_domdec_specat_comm_p_t constraint_comm;
187 /* The local to gobal charge group index and local cg to local atom index */
188 int ncg_home;
189 int ncg_tot;
190 int *index_gl;
191 int *cgindex;
192 int cg_nalloc;
193 /* Local atom to local cg index, only for special cases */
194 int *la2lc;
195 int la2lc_nalloc;
197 /* The number of home atoms */
198 int nat_home;
199 /* The total number of atoms: home and received zones */
200 int nat_tot;
201 /* Index from the local atoms to the global atoms */
202 int *gatindex;
203 int gatindex_nalloc;
205 /* Global atom number to local atom number list */
206 gmx_ga2la_t ga2la;
208 /* Communication stuff */
209 gmx_domdec_comm_p_t comm;
211 /* The partioning count, to keep track of the state */
212 gmx_large_int_t ddp_count;
215 /* gmx_pme_recv_f buffer */
216 int pme_recv_f_alloc;
217 rvec *pme_recv_f_buf;
219 } gmx_domdec_t;
221 typedef struct gmx_partdec *gmx_partdec_p_t;
223 typedef struct {
224 int nsim;
225 int sim;
226 #ifdef GMX_MPI
227 MPI_Group mpi_group_masters;
228 MPI_Comm mpi_comm_masters;
229 #if !defined(GMX_THREADS) && !defined(MPI_IN_PLACE_EXISTS)
230 /* these buffers are used as destination buffers if MPI_IN_PLACE isn't
231 supported.*/
232 mpi_in_place_buf_t *mpb;
233 #endif
234 #endif
235 } gmx_multisim_t;
237 #define DUTY_PP (1<<0)
238 #define DUTY_PME (1<<1)
240 typedef struct {
241 int bUse;
242 #ifdef GMX_MPI
243 MPI_Comm comm_intra;
244 int rank_intra;
245 MPI_Comm comm_inter;
246 #endif
248 } gmx_nodecomm_t;
250 typedef struct {
251 int dummy;
252 } gmx_commrec_thread_t;
254 typedef struct {
255 /* The nodids in one sim are numbered sequentially from 0.
256 * All communication within some simulation should happen
257 * in mpi_comm_mysim, or its subset mpi_comm_mygroup.
259 int sim_nodeid,nnodes,npmenodes;
260 int threadid,nthreads;
261 /* The nodeid in the PP/PME, PP or PME group */
262 int nodeid;
263 #ifdef GMX_MPI
264 MPI_Comm mpi_comm_mysim;
265 MPI_Comm mpi_comm_mygroup;
266 #endif
268 #ifdef GMX_THREAD_SHM_FDECOMP
269 gmx_commrec_thread_t thread;
270 #endif
272 gmx_nodecomm_t nc;
274 /* For domain decomposition */
275 gmx_domdec_t *dd;
277 /* For particle decomposition */
278 gmx_partdec_p_t pd;
280 /* The duties of this node, see the defines above */
281 int duty;
283 gmx_multisim_t *ms;
285 #if defined(GMX_MPI) && !defined(GMX_THREADS) && !defined(MPI_IN_PLACE_EXISTS)
286 /* these buffers are used as destination buffers if MPI_IN_PLACE isn't
287 supported.*/
288 mpi_in_place_buf_t *mpb;
289 #endif
290 } t_commrec;
292 #define MASTERNODE(cr) ((cr)->nodeid == 0)
293 #define MASTERTHREAD(cr) ((cr)->threadid == 0)
294 #define MASTER(cr) (MASTERNODE(cr) && MASTERTHREAD(cr))
295 #define SIMMASTER(cr) (MASTER(cr) && ((cr)->duty & DUTY_PP))
296 #define NODEPAR(cr) ((cr)->nnodes > 1)
297 #define THREADPAR(cr) ((cr)->nthreads > 1)
298 #define PAR(cr) (NODEPAR(cr) || THREADPAR(cr))
299 #define RANK(cr,nodeid) (nodeid)
300 #define MASTERRANK(cr) (0)
302 #define DOMAINDECOMP(cr) ((cr)->dd != NULL)
303 #define DDMASTER(dd) ((dd)->rank == (dd)->masterrank)
305 #define PARTDECOMP(cr) ((cr)->pd != NULL)
307 #define MULTISIM(cr) ((cr)->ms)
308 #define MSRANK(ms,nodeid) (nodeid)
309 #define MASTERSIM(ms) ((ms)->sim == 0)
311 /* The master of all (the node that prints the remaining run time etc.) */
312 #define MULTIMASTER(cr) (SIMMASTER(cr) && (!MULTISIM(cr) || MASTERSIM((cr)->ms)))
314 #ifdef __cplusplus
316 #endif