2 c Optimize the UNRES energy function by minimization of a quartic target
3 c function or by the VMC method.
9 cMS$ATTRIBUTES C :: proc_proc
12 include "DIMENSIONS.ZSCOPT"
15 integer IERROR,ERRCODE,kolor,key
18 include "COMMON.IOUNITS"
19 include "COMMON.OPTIM"
21 double precision rr,x(max_paropt)
24 c print *,"Starting..."
26 c print *,"Initializing MPI..."
27 call MPI_Init( IERROR )
28 call MPI_Comm_rank( MPI_COMM_WORLD, me, IERROR )
29 call MPI_Comm_size( MPI_COMM_WORLD, nprocs, IERROR )
30 c print *,"Finished initializing MPI..."
33 c print *,"Me",me," Master",master," Ierror",ierror
35 write(iout,*) "SEVERE ERROR - Can't initialize MPI."
36 call mpi_finalize(ierror)
51 c print *,"calling openunits"
53 c print *,"openunits called"
54 call read_general_data(*10)
55 write (iout,'(80(1h-)/10x,
56 & "Maximum likelihood optimization of UNRES energy function",
57 & " v. 05/10/16"/80(1h-))')
61 write (iout,*) "Finished READ_GENERAL_DATA"
64 call parmread(iparm,*10)
66 write (iout,*) "Finished parmread"
68 call read_optim_parm(*10)
69 call print_general_data(*10)
70 call read_protein_data(*10)
71 write (iout,*) "Finished READ_PROTEIN_DATA"
73 call read_database(*10)
74 write (iout,*) "Finished READ_DATABASE"
77 c write (iout,*) Me,' calling PROC_GROUPS'
79 c write (iout,*) Me,' calling WORK_PARTITION_MAP'
80 c call work_partition_map(nvarr)
82 call proc_data(nvarr,x,*10)
85 if (me.eq.Master) then
87 call maxlikopt(nvarr,x)
94 call MPI_Finalize( IERROR )
99 10 write (iout,*) "Error termination of the program"
100 call MPI_Finalize( IERROR )
103 c------------------------------------------------------------------------------
107 parameter (ntasks=11)
108 include "DIMENSIONS.ZSCOPT"
113 double precision ttask_all(ntasks)
114 integer nctask_all(ntasks)
116 include "COMMON.IOUNITS"
118 double precision ttask
120 common /timing/ ttask(ntasks),nctask(ntasks)
121 character*16 tname(ntasks) /"function","gradient",9*''/
124 write (iout,'(80(1h-))')
126 write (iout,*) "Routine call info from the processor ",me," ..."
128 write (iout,*) "Routine call info ..."
131 write (iout,'(65(1h-))')
132 write (iout,100) "task"," # calls"," total time",
134 write (iout,'(65(1h-))')
136 write (iout,200) tname(i),nctask(i),ttask(i),
137 & ttask(i)/(nctask(i)+1.0d-10)
141 call MPI_Reduce(ttask(1),ttask_all(1),ntasks,
142 & MPI_DOUBLE_PRECISION, MPI_SUM, Master, WHAM_COMM,IERROR)
143 call MPI_Reduce(nctask(1),nctask_all(1),ntasks,
144 & MPI_INTEGER, MPI_SUM, Master, WHAM_COMM,IERROR)
145 if (Me.eq.Master) then
146 write (iout,'(80(1h-))')
147 write (iout,*) "Routine call info from all processors ..."
149 write (iout,'(65(1h-))')
150 write (iout,100) "task"," # calls"," total time",
152 write (iout,'(65(1h-))')
154 write (iout,200) tname(i),nctask_all(i),ttask_all(i),
155 & ttask_all(i)/(nctask_all(i)+1.0d-10)
161 100 format(a,t21,a,t31,a,t46,a)
162 200 format(a,t21,i10,f15.2,f15.8)
164 c------------------------------------------------------------------------------
166 subroutine proc_groups
167 C Split the processors into the Master and Workers group, if needed.
170 include "DIMENSIONS.ZSCOPT"
172 include "COMMON.IOUNITS"
174 include "COMMON.VMCPAR"
175 integer n,chunk,iprot,i,j,ii,remainder
176 integer kolor,key,ierror,errcode
179 C No secondary structure constraints.
185 c-------------------------------------------------------------------------------
186 subroutine work_partition(lprint)
187 c Split the conformations between processors
190 include "DIMENSIONS.ZSCOPT"
192 include "COMMON.CLASSES"
193 include "COMMON.IOUNITS"
195 include "COMMON.VMCPAR"
196 integer n,chunk,iprot,i,j,ii,remainder
197 integer kolor,key,ierror,errcode
200 C Divide conformations between processors; for each proteins the first and
201 C the last conformation to handle by ith processor is stored in
202 C indstart(i,iprot) and indend(i,iprot), respectively.
204 C First try to assign equal number of conformations to each processor.
208 write (iout,*) "Protein",iprot," n=",n
211 scount(0,iprot) = chunk
212 c print *,"i",0," indstart",indstart(0,iprot)," scount",
215 indstart(i,iprot)=chunk+indstart(i-1,iprot)
216 scount(i,iprot)=scount(i-1,iprot)
217 c print *,"i",i," indstart",indstart(i,iprot)," scount",
221 C Determine how many conformations remained yet unassigned.
223 remainder=N-(indstart(nprocs1-1,iprot)
224 & +scount(nprocs1-1,iprot)-1)
225 c print *,"remainder",remainder
227 C Assign the remainder conformations to consecutive processors, starting
228 C from the lowest rank; this continues until the list is exhausted.
230 if (remainder .gt. 0) then
232 scount(i-1,iprot) = scount(i-1,iprot) + 1
233 indstart(i,iprot) = indstart(i,iprot) + i
235 do i=remainder+1,nprocs1-1
236 indstart(i,iprot) = indstart(i,iprot) + remainder
240 indstart(nprocs1,iprot)=N+1
241 scount(nprocs1,iprot)=0
244 indend(i,iprot)=indstart(i,iprot)+scount(i,iprot)-1
245 idispl(i,iprot)=indstart(i,iprot)-1
250 N=N+indend(i,iprot)-indstart(i,iprot)+1
253 c print *,"N",n," NTOT",ntot_work(iprot)
254 if (N.ne.ntot_work(iprot)) then
255 write (iout,*) "!!! Checksum error on processor",me
257 call MPI_Abort( WHAM_COMM, Ierror, Errcode )
261 do i=1,ntot_work(iprot)
262 if (i.ge.indstart(me1,iprot) .and. i.le.indend(me1,iprot))
269 c write (iout,*) "i",i," iprot",iprot," i2ii",i2ii(i,iprot)
274 write (iout,*) "Partition of work between processors"
276 write (iout,*) "Protein",iprot
278 write (iout,*) "The i2ii array"
279 do j=1,ntot_work(iprot)
280 write (iout,*) j,i2ii(j,iprot)
284 write (iout,'(a,i5,a,i7,a,i7,a,i7)')
285 & "Processor",i," indstart",indstart(i,iprot),
286 & " indend",indend(i,iprot)," count",scount(i,iprot)
292 c------------------------------------------------------------------------------
293 subroutine jebadelko(nvarr)
296 include "DIMENSIONS.ZSCOPT"
298 include "COMMON.IOUNITS"
300 include "COMMON.VMCPAR"
301 include "COMMON.CLASSES"
302 include "COMMON.OPTIM"
303 include "COMMON.WEIGHTS"
304 include "COMMON.WEIGHTDER"
305 include "COMMON.ENERGIES"
306 include "COMMON.TIME1"
307 include "COMMON.PROTNAME"
308 include "COMMON.PROTFILES"
309 include "COMMON.COMPAR"
310 integer What, TAG, IERROR, status(MPI_STATUS_SIZE), istop, iprot,
313 double precision x(max_paropt), g(max_paropt), viol
317 double precision rdum
318 double precision tcpu,t1,t1w,t1_ini,t1w_ini
322 write(iout,*) "Processor",me,me1," called JEBADELKO"
324 if (me.eq.Master) then
326 call func1(nvarr,istop,x)
329 write (iout,*) "ELOWEST at slave starting JEBADELKO"
331 do ibatch=1,natlike(iprot)+2
332 do ib=1,nbeta(ibatch,iprot)
333 write (iout,*) "iprot",iprot," ibatch",ibatch," ib",ib,
334 & " elowest",elowest(ib,ibatch,iprot)
340 t1w_ini = MPI_WTIME()
342 do while (istop.ne.0)
344 write (iout,*) "ELOWEST at slave calling FUNC1 from JBADELKO"
346 do ibatch=1,natlike(iprot)+2
347 do ib=1,nbeta(ibatch,iprot)
348 write (iout,*) "iprot",iprot," ibatch",ibatch," ib",ib,
349 & " elowest",elowest(ib,ibatch,iprot)
354 call func1(nvarr,istop,x)
356 t1w = mpi_wtime() - t1w_ini
359 write (iout,*) "CPU time",t1," wall clock time",t1w
364 write (iout,*) "Energies of processor",me
367 write (iout,*) "Protein ",protname(iprot)
369 if (i.ge.indstart(me1,j).and.i.le.indend(me1,j)) then
370 write(iout,*)i,e_total(i,j),rmstb(i,j),iscore(i,0,j)
376 write (iout,*) "Deleting scratchfile",bprotfiles(iprot)
377 inquire (file=bprotfiles(iprot),number=iun,opened=op)
378 write (iout,*) "unit",iun," icbase",icbase
380 open (icbase,file=bprotfiles(iprot),status="old",
381 & form="unformatted",access="direct",recl=lenrec(iprot))
382 close(icbase,status="delete")
384 close(iun,status="delete")
387 write (iout,*) "Deleting scratchfile",benefiles(iprot)
388 inquire (file=benefiles(iprot),number=iun,opened=op)
389 write (iout,*) "unit",iun," ientout",icbase
391 open (ientout,file=benefiles(iprot),status="old",
392 & form="unformatted",access="direct",recl=lenrec_ene(iprot))
393 close(ientout,status="delete")
395 close (iun,status="delete")
399 write (iout,*) "Processor",me,"leaves JEBADELKO"