2 c Creation/update of the database of conformations
8 cMS$ATTRIBUTES C :: proc_proc
11 include "DIMENSIONS.ZSCOPT"
12 include "DIMENSIONS.FREE"
15 integer IERROR,ERRCODE
18 include "COMMON.IOUNITS"
20 include "COMMON.CONTROL"
21 include "COMMON.ALLPARM"
23 double precision rr,x(max_paropt)
27 call MPI_Init( IERROR )
28 call MPI_Comm_rank( MPI_COMM_WORLD, me, IERROR )
29 call MPI_Comm_size( MPI_COMM_WORLD, nprocs, IERROR )
32 write(iout,*) "SEVERE ERROR - Can't initialize MPI."
33 call mpi_finalize(ierror)
36 if (nprocs.gt.MaxProcs+1) then
37 write (2,*) "Error - too many processors",
39 write (2,*) "Increase MaxProcs and recompile"
40 call MPI_Finalize(IERROR)
57 call read_general_data(*10)
62 write (iout,*) "Calling proc_groups"
64 write (iout,*) "proc_groups exited"
68 write (iout,*) "Calling parmread",ipar
69 call parmread(ipar,*10)
70 if (.not.separate_parset) then
72 write (iout,*) "Finished storing parameters",ipar
73 else if (ipar.eq.myparm) then
75 write (iout,*) "Finished storing parameters",ipar
80 write (iout,*) "Finished READ_EFREE"
82 call read_protein_data(*10)
83 write (iout,*) "Finished READ_PROTEIN_DATA"
88 call read_ref_structure(*10)
92 write (iout,*) "Begin read_database"
94 call read_database(*10)
95 write (iout,*) "Finished read_database"
97 if (separate_parset) nparmset=1
99 if (ntot(islice).gt.0) then
101 call work_partition(islice,.true.)
102 write (iout,*) "work_partition OK"
105 call enecalc(islice,*10)
106 write (iout,*) "enecalc OK"
108 call WHAM_CALC(islice,*10)
109 write (iout,*) "wham_calc OK"
111 call write_dbase(islice,*10)
112 write (iout,*) "write_dbase OK"
114 if (ensembles.gt.0) then
115 call make_ensembles(islice,*10)
116 write (iout,*) "make_ensembles OK"
122 call MPI_Finalize( IERROR )
125 10 write (iout,*) "Error termination of the program"
126 call MPI_Finalize( IERROR )
129 c------------------------------------------------------------------------------
131 subroutine proc_groups
132 C Split the processors into the Master and Workers group, if needed.
135 include "DIMENSIONS.ZSCOPT"
136 include "DIMENSIONS.FREE"
138 include "COMMON.IOUNITS"
140 include "COMMON.FREE"
141 integer n,chunk,i,j,ii,remainder
142 integer kolor,key,ierror,errcode
146 C Split the communicator if independent runs for different parameter
147 C sets will be performed.
149 if (nparmset.eq.1 .or. .not.separate_parset) then
150 WHAM_COMM = MPI_COMM_WORLD
151 else if (separate_parset) then
152 if (nprocs.lt.nparmset) then
154 & "*** Cannot split parameter sets for fewer processors than sets",
156 call MPI_Finalize(ierror)
159 write (iout,*) "nparmset",nparmset
160 nprocs = nprocs/nparmset
163 write (iout,*) "My old rank",me," kolor",kolor," key",key
164 call MPI_Comm_split(MPI_COMM_WORLD,kolor,key,WHAM_COMM,ierror)
165 call MPI_Comm_size(WHAM_COMM,nprocs,ierror)
166 call MPI_Comm_rank(WHAM_COMM,me,ierror)
167 write (iout,*) "My new rank",me," comm size",nprocs
168 write (iout,*) "MPI_COMM_WORLD",MPI_COMM_WORLD,
169 & " WHAM_COMM",WHAM_COMM
171 write (iout,*) "My parameter set is",myparm
180 c------------------------------------------------------------------------------
181 subroutine work_partition(islice,lprint)
182 c Split the conformations between processors
185 include "DIMENSIONS.ZSCOPT"
186 include "DIMENSIONS.FREE"
188 include "COMMON.IOUNITS"
190 include "COMMON.PROT"
192 integer n,chunk,i,j,ii,remainder
193 integer kolor,key,ierror,errcode
196 C Divide conformations between processors; the first and
197 C the last conformation to handle by ith processor is stored in
198 C indstart(i) and indend(i), respectively.
200 C First try to assign equal number of conformations to each processor.
203 write (iout,*) "n=",n
207 c print *,"i",0," indstart",indstart(0)," scount",
210 indstart(i)=chunk+indstart(i-1)
211 scount(i)=scount(i-1)
212 c print *,"i",i," indstart",indstart(i)," scount",
216 C Determine how many conformations remained yet unassigned.
218 remainder=N-(indstart(nprocs1-1)
219 & +scount(nprocs1-1)-1)
220 c print *,"remainder",remainder
222 C Assign the remainder conformations to consecutive processors, starting
223 C from the lowest rank; this continues until the list is exhausted.
225 if (remainder .gt. 0) then
227 scount(i-1) = scount(i-1) + 1
228 indstart(i) = indstart(i) + i
230 do i=remainder+1,nprocs1-1
231 indstart(i) = indstart(i) + remainder
235 indstart(nprocs1)=N+1
239 indend(i)=indstart(i)+scount(i)-1
240 idispl(i)=indstart(i)-1
245 N=N+indend(i)-indstart(i)+1
248 c print *,"N",n," NTOT",ntot(islice)
249 if (N.ne.ntot(islice)) then
250 write (iout,*) "!!! Checksum error on processor",me,
253 call MPI_Abort( MPI_COMM_WORLD, Ierror, Errcode )
257 write (iout,*) "Partition of work between processors"
259 write (iout,'(a,i5,a,i7,a,i7,a,i7)')
260 & "Processor",i," indstart",indstart(i),
261 & " indend",indend(i)," count",scount(i)