2 c Creation/update of the database of conformations
8 cMS$ATTRIBUTES C :: proc_proc
11 include "DIMENSIONS.ZSCOPT"
12 include "DIMENSIONS.FREE"
15 integer IERROR,ERRCODE
18 include "COMMON.IOUNITS"
20 include "COMMON.CONTROL"
21 include "COMMON.ALLPARM"
23 double precision rr,x(max_paropt)
27 call MPI_Init( IERROR )
28 call MPI_Comm_rank( MPI_COMM_WORLD, me, IERROR )
29 call MPI_Comm_size( MPI_COMM_WORLD, nprocs, IERROR )
32 write(iout,*) "SEVERE ERROR - Can't initialize MPI."
33 call mpi_finalize(ierror)
36 if (nprocs.gt.MaxProcs+1) then
37 write (2,*) "Error - too many processors",
39 write (2,*) "Increase MaxProcs and recompile"
40 call MPI_Finalize(IERROR)
57 call read_general_data(*10)
61 if ((constr_dist.gt.0).and.(genconstr.eq.0)) call read_dist_constr
63 write (iout,*) "Calling proc_groups"
65 write (iout,*) "proc_groups exited"
69 write (iout,*) "Calling parmread",ipar
70 call parmread(ipar,*10)
71 if (.not.separate_parset) then
73 write (iout,*) "Finished storing parameters",ipar
74 else if (ipar.eq.myparm) then
76 write (iout,*) "Finished storing parameters",ipar
81 write (iout,*) "Finished READ_EFREE"
83 call read_protein_data(*10)
84 write (iout,*) "Finished READ_PROTEIN_DATA"
89 call read_ref_structure(*10)
90 if ((constr_dist.gt.0).and.(genconstr.gt.0)) call gen_dist_constr
95 C if (constr_dist.gt.0) call read_dist_constr
96 write (iout,*) "Begin read_database"
98 call read_database(*10)
99 write (iout,*) "Finished read_database"
101 if (separate_parset) nparmset=1
103 if (ntot(islice).gt.0) then
105 call work_partition(islice,.true.)
106 write (iout,*) "work_partition OK"
109 call enecalc(islice,*10)
110 write (iout,*) "enecalc OK"
112 call WHAM_CALC(islice,*10)
113 write (iout,*) "wham_calc OK"
115 call write_dbase(islice,*10)
116 write (iout,*) "write_dbase OK"
118 if (ensembles.gt.0) then
119 call make_ensembles(islice,*10)
120 write (iout,*) "make_ensembles OK"
126 call MPI_Finalize( IERROR )
129 10 write (iout,*) "Error termination of the program"
130 call MPI_Finalize( IERROR )
133 c------------------------------------------------------------------------------
135 subroutine proc_groups
136 C Split the processors into the Master and Workers group, if needed.
139 include "DIMENSIONS.ZSCOPT"
140 include "DIMENSIONS.FREE"
142 include "COMMON.IOUNITS"
144 include "COMMON.FREE"
145 integer n,chunk,i,j,ii,remainder
146 integer kolor,key,ierror,errcode
150 C Split the communicator if independent runs for different parameter
151 C sets will be performed.
153 if (nparmset.eq.1 .or. .not.separate_parset) then
154 WHAM_COMM = MPI_COMM_WORLD
155 else if (separate_parset) then
156 if (nprocs.lt.nparmset) then
158 & "*** Cannot split parameter sets for fewer processors than sets",
160 call MPI_Finalize(ierror)
163 write (iout,*) "nparmset",nparmset
164 nprocs = nprocs/nparmset
167 write (iout,*) "My old rank",me," kolor",kolor," key",key
168 call MPI_Comm_split(MPI_COMM_WORLD,kolor,key,WHAM_COMM,ierror)
169 call MPI_Comm_size(WHAM_COMM,nprocs,ierror)
170 call MPI_Comm_rank(WHAM_COMM,me,ierror)
171 write (iout,*) "My new rank",me," comm size",nprocs
172 write (iout,*) "MPI_COMM_WORLD",MPI_COMM_WORLD,
173 & " WHAM_COMM",WHAM_COMM
175 write (iout,*) "My parameter set is",myparm
184 c------------------------------------------------------------------------------
185 subroutine work_partition(islice,lprint)
186 c Split the conformations between processors
189 include "DIMENSIONS.ZSCOPT"
190 include "DIMENSIONS.FREE"
192 include "COMMON.IOUNITS"
194 include "COMMON.PROT"
196 integer n,chunk,i,j,ii,remainder
197 integer kolor,key,ierror,errcode
200 C Divide conformations between processors; the first and
201 C the last conformation to handle by ith processor is stored in
202 C indstart(i) and indend(i), respectively.
204 C First try to assign equal number of conformations to each processor.
207 write (iout,*) "n=",n
211 c print *,"i",0," indstart",indstart(0)," scount",
214 indstart(i)=chunk+indstart(i-1)
215 scount(i)=scount(i-1)
216 c print *,"i",i," indstart",indstart(i)," scount",
220 C Determine how many conformations remained yet unassigned.
222 remainder=N-(indstart(nprocs1-1)
223 & +scount(nprocs1-1)-1)
224 c print *,"remainder",remainder
226 C Assign the remainder conformations to consecutive processors, starting
227 C from the lowest rank; this continues until the list is exhausted.
229 if (remainder .gt. 0) then
231 scount(i-1) = scount(i-1) + 1
232 indstart(i) = indstart(i) + i
234 do i=remainder+1,nprocs1-1
235 indstart(i) = indstart(i) + remainder
239 indstart(nprocs1)=N+1
243 indend(i)=indstart(i)+scount(i)-1
244 idispl(i)=indstart(i)-1
249 N=N+indend(i)-indstart(i)+1
252 c print *,"N",n," NTOT",ntot(islice)
253 if (N.ne.ntot(islice)) then
254 write (iout,*) "!!! Checksum error on processor",me,
257 call MPI_Abort( MPI_COMM_WORLD, Ierror, Errcode )
261 write (iout,*) "Partition of work between processors"
263 write (iout,'(a,i5,a,i7,a,i7,a,i7)')
264 & "Processor",i," indstart",indstart(i),
265 & " indend",indend(i)," count",scount(i)