2 c Creation/update of the database of conformations
8 cMS$ATTRIBUTES C :: proc_proc
11 include "DIMENSIONS.ZSCOPT"
12 include "DIMENSIONS.FREE"
15 integer IERROR,ERRCODE
18 include "COMMON.IOUNITS"
20 include "COMMON.CONTROL"
21 include "COMMON.ALLPARM"
23 double precision rr,x(max_paropt)
27 call MPI_Init( IERROR )
28 call MPI_Comm_rank( MPI_COMM_WORLD, me, IERROR )
29 call MPI_Comm_size( MPI_COMM_WORLD, nprocs, IERROR )
32 write(iout,*) "SEVERE ERROR - Can't initialize MPI."
33 call mpi_finalize(ierror)
36 if (nprocs.gt.MaxProcs+1) then
37 write (2,*) "Error - too many processors",
39 write (2,*) "Increase MaxProcs and recompile"
40 call MPI_Finalize(IERROR)
57 call read_general_data(*10)
61 if (constr_dist.gt.0) call read_dist_constr
63 write (iout,*) "Calling proc_groups"
65 write (iout,*) "proc_groups exited"
69 write (iout,*) "Calling parmread",ipar
70 call parmread(ipar,*10)
71 if (.not.separate_parset) then
73 write (iout,*) "Finished storing parameters",ipar
74 else if (ipar.eq.myparm) then
76 write (iout,*) "Finished storing parameters",ipar
81 write (iout,*) "Finished READ_EFREE"
83 call read_protein_data(*10)
84 write (iout,*) "Finished READ_PROTEIN_DATA"
89 call read_ref_structure(*10)
93 C if (constr_dist.gt.0) call read_dist_constr
94 write (iout,*) "Begin read_database"
96 call read_database(*10)
97 write (iout,*) "Finished read_database"
99 if (separate_parset) nparmset=1
101 if (ntot(islice).gt.0) then
103 call work_partition(islice,.true.)
104 write (iout,*) "work_partition OK"
107 call enecalc(islice,*10)
108 write (iout,*) "enecalc OK"
110 call WHAM_CALC(islice,*10)
111 write (iout,*) "wham_calc OK"
113 call write_dbase(islice,*10)
114 write (iout,*) "write_dbase OK"
116 if (ensembles.gt.0) then
117 call make_ensembles(islice,*10)
118 write (iout,*) "make_ensembles OK"
124 call MPI_Finalize( IERROR )
127 10 write (iout,*) "Error termination of the program"
128 call MPI_Finalize( IERROR )
131 c------------------------------------------------------------------------------
133 subroutine proc_groups
134 C Split the processors into the Master and Workers group, if needed.
137 include "DIMENSIONS.ZSCOPT"
138 include "DIMENSIONS.FREE"
140 include "COMMON.IOUNITS"
142 include "COMMON.FREE"
143 integer n,chunk,i,j,ii,remainder
144 integer kolor,key,ierror,errcode
148 C Split the communicator if independent runs for different parameter
149 C sets will be performed.
151 if (nparmset.eq.1 .or. .not.separate_parset) then
152 WHAM_COMM = MPI_COMM_WORLD
153 else if (separate_parset) then
154 if (nprocs.lt.nparmset) then
156 & "*** Cannot split parameter sets for fewer processors than sets",
158 call MPI_Finalize(ierror)
161 write (iout,*) "nparmset",nparmset
162 nprocs = nprocs/nparmset
165 write (iout,*) "My old rank",me," kolor",kolor," key",key
166 call MPI_Comm_split(MPI_COMM_WORLD,kolor,key,WHAM_COMM,ierror)
167 call MPI_Comm_size(WHAM_COMM,nprocs,ierror)
168 call MPI_Comm_rank(WHAM_COMM,me,ierror)
169 write (iout,*) "My new rank",me," comm size",nprocs
170 write (iout,*) "MPI_COMM_WORLD",MPI_COMM_WORLD,
171 & " WHAM_COMM",WHAM_COMM
173 write (iout,*) "My parameter set is",myparm
182 c------------------------------------------------------------------------------
183 subroutine work_partition(islice,lprint)
184 c Split the conformations between processors
187 include "DIMENSIONS.ZSCOPT"
188 include "DIMENSIONS.FREE"
190 include "COMMON.IOUNITS"
192 include "COMMON.PROT"
194 integer n,chunk,i,j,ii,remainder
195 integer kolor,key,ierror,errcode
198 C Divide conformations between processors; the first and
199 C the last conformation to handle by ith processor is stored in
200 C indstart(i) and indend(i), respectively.
202 C First try to assign equal number of conformations to each processor.
205 write (iout,*) "n=",n
209 c print *,"i",0," indstart",indstart(0)," scount",
212 indstart(i)=chunk+indstart(i-1)
213 scount(i)=scount(i-1)
214 c print *,"i",i," indstart",indstart(i)," scount",
218 C Determine how many conformations remained yet unassigned.
220 remainder=N-(indstart(nprocs1-1)
221 & +scount(nprocs1-1)-1)
222 c print *,"remainder",remainder
224 C Assign the remainder conformations to consecutive processors, starting
225 C from the lowest rank; this continues until the list is exhausted.
227 if (remainder .gt. 0) then
229 scount(i-1) = scount(i-1) + 1
230 indstart(i) = indstart(i) + i
232 do i=remainder+1,nprocs1-1
233 indstart(i) = indstart(i) + remainder
237 indstart(nprocs1)=N+1
241 indend(i)=indstart(i)+scount(i)-1
242 idispl(i)=indstart(i)-1
247 N=N+indend(i)-indstart(i)+1
250 c print *,"N",n," NTOT",ntot(islice)
251 if (N.ne.ntot(islice)) then
252 write (iout,*) "!!! Checksum error on processor",me,
255 call MPI_Abort( MPI_COMM_WORLD, Ierror, Errcode )
259 write (iout,*) "Partition of work between processors"
261 write (iout,'(a,i5,a,i7,a,i7,a,i7)')
262 & "Processor",i," indstart",indstart(i),
263 & " indend",indend(i)," count",scount(i)