2 c Optimize the UNRES energy function by minimization of a quartic target
3 c function or by the VMC method.
9 cMS$ATTRIBUTES C :: proc_proc
11 include "DIMENSIONS.ZSCOPT"
14 integer IERROR,ERRCODE,kolor,key
17 include "COMMON.IOUNITS"
18 include "COMMON.OPTIM"
20 double precision rr,x(max_paropt)
23 c print *,"Starting..."
25 c print *,"Initializing MPI..."
26 call MPI_Init( IERROR )
27 call MPI_Comm_rank( MPI_COMM_WORLD, me, IERROR )
28 call MPI_Comm_size( MPI_COMM_WORLD, nprocs, IERROR )
29 c print *,"Finished initializing MPI..."
32 c print *,"Me",me," Master",master," Ierror",ierror
34 write(iout,*) "SEVERE ERROR - Can't initialize MPI."
35 call mpi_finalize(ierror)
50 c print *,"calling openunits"
52 c print *,"openunits called"
53 call read_general_data(*10)
54 write (iout,'(80(1h-)/10x,
55 & "Maximum likelihood optimization of UNRES energy function",
56 & " v. 05/10/16"/80(1h-))')
60 write (iout,*) "Finished READ_GENERAL_DATA"
63 call parmread(iparm,*10)
65 write (iout,*) "Finished parmread"
67 call read_optim_parm(*10)
68 call print_general_data(*10)
69 call read_protein_data(*10)
70 write (iout,*) "Finished READ_PROTEIN_DATA"
72 call read_database(*10)
73 write (iout,*) "Finished READ_DATABASE"
76 c write (iout,*) Me,' calling PROC_GROUPS'
78 c write (iout,*) Me,' calling WORK_PARTITION_MAP'
79 c call work_partition_map(nvarr)
81 call proc_data(nvarr,x,*10)
84 if (me.eq.Master) then
86 call maxlikopt(nvarr,x)
93 call MPI_Finalize( IERROR )
98 10 write (iout,*) "Error termination of the program"
99 call MPI_Finalize( IERROR )
102 c------------------------------------------------------------------------------
106 parameter (ntasks=11)
107 include "DIMENSIONS.ZSCOPT"
112 double precision ttask_all(ntasks)
113 integer nctask_all(ntasks)
115 include "COMMON.IOUNITS"
117 double precision ttask
119 common /timing/ ttask(ntasks),nctask(ntasks)
120 character*16 tname(ntasks) /"function","gradient",9*''/
123 write (iout,'(80(1h-))')
125 write (iout,*) "Routine call info from the processor ",me," ..."
127 write (iout,*) "Routine call info ..."
130 write (iout,'(65(1h-))')
131 write (iout,100) "task"," # calls"," total time",
133 write (iout,'(65(1h-))')
135 write (iout,200) tname(i),nctask(i),ttask(i),
136 & ttask(i)/(nctask(i)+1.0d-10)
140 call MPI_Reduce(ttask(1),ttask_all(1),ntasks,
141 & MPI_DOUBLE_PRECISION, MPI_SUM, Master, WHAM_COMM,IERROR)
142 call MPI_Reduce(nctask(1),nctask_all(1),ntasks,
143 & MPI_INTEGER, MPI_SUM, Master, WHAM_COMM,IERROR)
144 if (Me.eq.Master) then
145 write (iout,'(80(1h-))')
146 write (iout,*) "Routine call info from all processors ..."
148 write (iout,'(65(1h-))')
149 write (iout,100) "task"," # calls"," total time",
151 write (iout,'(65(1h-))')
153 write (iout,200) tname(i),nctask_all(i),ttask_all(i),
154 & ttask_all(i)/(nctask_all(i)+1.0d-10)
160 100 format(a,t21,a,t31,a,t46,a)
161 200 format(a,t21,i10,f15.2,f15.8)
163 c------------------------------------------------------------------------------
165 subroutine proc_groups
166 C Split the processors into the Master and Workers group, if needed.
169 include "DIMENSIONS.ZSCOPT"
171 include "COMMON.IOUNITS"
173 include "COMMON.VMCPAR"
174 integer n,chunk,iprot,i,j,ii,remainder
175 integer kolor,key,ierror,errcode
178 C No secondary structure constraints.
184 c-------------------------------------------------------------------------------
185 subroutine work_partition(lprint)
186 c Split the conformations between processors
189 include "DIMENSIONS.ZSCOPT"
191 include "COMMON.CLASSES"
192 include "COMMON.IOUNITS"
194 include "COMMON.VMCPAR"
195 integer n,chunk,iprot,i,j,ii,remainder
196 integer kolor,key,ierror,errcode
199 C Divide conformations between processors; for each proteins the first and
200 C the last conformation to handle by ith processor is stored in
201 C indstart(i,iprot) and indend(i,iprot), respectively.
203 C First try to assign equal number of conformations to each processor.
207 write (iout,*) "Protein",iprot," n=",n
210 scount(0,iprot) = chunk
211 c print *,"i",0," indstart",indstart(0,iprot)," scount",
214 indstart(i,iprot)=chunk+indstart(i-1,iprot)
215 scount(i,iprot)=scount(i-1,iprot)
216 c print *,"i",i," indstart",indstart(i,iprot)," scount",
220 C Determine how many conformations remained yet unassigned.
222 remainder=N-(indstart(nprocs1-1,iprot)
223 & +scount(nprocs1-1,iprot)-1)
224 c print *,"remainder",remainder
226 C Assign the remainder conformations to consecutive processors, starting
227 C from the lowest rank; this continues until the list is exhausted.
229 if (remainder .gt. 0) then
231 scount(i-1,iprot) = scount(i-1,iprot) + 1
232 indstart(i,iprot) = indstart(i,iprot) + i
234 do i=remainder+1,nprocs1-1
235 indstart(i,iprot) = indstart(i,iprot) + remainder
239 indstart(nprocs1,iprot)=N+1
240 scount(nprocs1,iprot)=0
243 indend(i,iprot)=indstart(i,iprot)+scount(i,iprot)-1
244 idispl(i,iprot)=indstart(i,iprot)-1
249 N=N+indend(i,iprot)-indstart(i,iprot)+1
252 c print *,"N",n," NTOT",ntot_work(iprot)
253 if (N.ne.ntot_work(iprot)) then
254 write (iout,*) "!!! Checksum error on processor",me
256 call MPI_Abort( WHAM_COMM, Ierror, Errcode )
260 do i=1,ntot_work(iprot)
261 if (i.ge.indstart(me1,iprot) .and. i.le.indend(me1,iprot))
268 c write (iout,*) "i",i," iprot",iprot," i2ii",i2ii(i,iprot)
273 write (iout,*) "Partition of work between processors"
275 write (iout,*) "Protein",iprot
277 write (iout,*) "The i2ii array"
278 do j=1,ntot_work(iprot)
279 write (iout,*) j,i2ii(j,iprot)
283 write (iout,'(a,i5,a,i7,a,i7,a,i7)')
284 & "Processor",i," indstart",indstart(i,iprot),
285 & " indend",indend(i,iprot)," count",scount(i,iprot)
291 c------------------------------------------------------------------------------
292 subroutine jebadelko(nvarr)
295 include "DIMENSIONS.ZSCOPT"
297 include "COMMON.IOUNITS"
299 include "COMMON.VMCPAR"
300 include "COMMON.CLASSES"
301 include "COMMON.OPTIM"
302 include "COMMON.WEIGHTS"
303 include "COMMON.WEIGHTDER"
304 include "COMMON.ENERGIES"
305 include "COMMON.TIME1"
306 include "COMMON.PROTNAME"
307 include "COMMON.PROTFILES"
308 include "COMMON.COMPAR"
309 integer What, TAG, IERROR, status(MPI_STATUS_SIZE), istop, iprot,
312 double precision x(max_paropt), g(max_paropt), viol
316 double precision rdum
317 double precision tcpu,t1,t1w,t1_ini,t1w_ini
321 write(iout,*) "Processor",me,me1," called JEBADELKO"
323 if (me.eq.Master) then
325 call func1(nvarr,istop,x)
328 write (iout,*) "ELOWEST at slave starting JEBADELKO"
330 do ibatch=1,natlike(iprot)+2
331 do ib=1,nbeta(ibatch,iprot)
332 write (iout,*) "iprot",iprot," ibatch",ibatch," ib",ib,
333 & " elowest",elowest(ib,ibatch,iprot)
339 t1w_ini = MPI_WTIME()
341 do while (istop.ne.0)
343 write (iout,*) "ELOWEST at slave calling FUNC1 from JBADELKO"
345 do ibatch=1,natlike(iprot)+2
346 do ib=1,nbeta(ibatch,iprot)
347 write (iout,*) "iprot",iprot," ibatch",ibatch," ib",ib,
348 & " elowest",elowest(ib,ibatch,iprot)
353 call func1(nvarr,istop,x)
355 t1w = mpi_wtime() - t1w_ini
358 write (iout,*) "CPU time",t1," wall clock time",t1w
363 write (iout,*) "Energies of processor",me
366 write (iout,*) "Protein ",protname(iprot)
368 if (i.ge.indstart(me1,j).and.i.le.indend(me1,j)) then
369 write(iout,*)i,e_total(i,j),rmstb(i,j),iscore(i,0,j)
375 write (iout,*) "Deleting scratchfile",bprotfiles(iprot)
376 inquire (file=bprotfiles(iprot),number=iun,opened=op)
377 write (iout,*) "unit",iun," icbase",icbase
379 open (icbase,file=bprotfiles(iprot),status="old",
380 & form="unformatted",access="direct",recl=lenrec(iprot))
381 close(icbase,status="delete")
383 close(iun,status="delete")
386 if (.not.mod_other_params) then
387 write (iout,*) "Deleting scratchfile",benefiles(iprot)
388 inquire (file=benefiles(iprot),number=iun,opened=op)
389 write (iout,*) "unit",iun," ientout",icbase
391 open (ientout,file=benefiles(iprot),status="old",
392 & form="unformatted",access="direct",recl=lenrec_ene(iprot))
393 close(ientout,status="delete")
395 close (iun,status="delete")
400 write (iout,*) "Processor",me,"leaves JEBADELKO"