1 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
5 ! Program to carry out conformational search of proteins in an united-residue !
8 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
13 use control, only:tcpu
14 use io_base, only:ilen
15 use geometry, only:chainbuild
16 use control, only:dajczas
19 use compare, only: test
25 ! implicit real*8 (a-h,o-z)
26 ! include 'DIMENSIONS'
28 use MPI_data ! include 'COMMON.SETUP'
33 use MPI_data, only: me,king
36 ! include 'COMMON.TIME1'
37 ! include 'COMMON.INTERACT'
38 ! include 'COMMON.NAMES'
39 ! include 'COMMON.GEO'
40 ! include 'COMMON.HEADER'
41 ! include 'COMMON.CONTROL'
42 ! include 'COMMON.CONTACTS'
43 ! include 'COMMON.CHAIN'
44 ! include 'COMMON.VAR'
45 ! include 'COMMON.IOUNITS'
46 ! include 'COMMON.FFIELD'
47 ! include 'COMMON.REMD'
49 ! include 'COMMON.SBRIDGE'
51 real(kind=8) :: hrtime,mintime,sectime
52 character(len=64) :: text_mode_calc(-2:14)
53 text_mode_calc(-2) = 'test'
54 text_mode_calc(-1) = 'cos'
55 text_mode_calc(0) = 'Energy evaluation or minimization'
56 text_mode_calc(1) = 'Regularization of PDB structure'
57 text_mode_calc(2) = 'Threading of a sequence on PDB structures'
58 text_mode_calc(3) = 'Monte Carlo (with minimization) '
59 text_mode_calc(4) = 'Energy minimization of multiple conformations'
60 text_mode_calc(5) = 'Checking energy gradient'
61 text_mode_calc(6) = 'Entropic sampling Monte Carlo (with minimization)'
62 text_mode_calc(7) = 'Energy map'
63 text_mode_calc(8) = 'CSA calculations'
64 text_mode_calc(9) = 'Not used 9'
65 text_mode_calc(10) = 'Not used 10'
66 text_mode_calc(11) = 'Soft regularization of PDB structure'
67 text_mode_calc(12) = 'Mesoscopic molecular dynamics (MD) '
68 text_mode_calc(13) = 'Not used 13'
69 text_mode_calc(14) = 'Replica exchange molecular dynamics (REMD)'
71 ! call memmon_print_usage()
75 write(iout,*)'### LAST MODIFIED 09/03/15 15:32PM by EL'
76 if (me.eq.king) call cinfo
77 ! Read force field parameters and job setup data
80 write (iout,*) "After readrtns"
83 if (me.eq.king .or. .not. out1file) then
84 write (iout,'(2a/)') &
85 text_mode_calc(modecalc)(:ilen(text_mode_calc(modecalc))), &
87 if (minim) write (iout,'(a)') &
88 'Conformations will be energy-minimized.'
89 write (iout,'(80(1h*)/)')
93 if (modecalc.eq.-2) then
96 else if (modecalc.eq.-1) then
97 write(iout,*) "call check_sc_map next"
101 !elwrite(iout,*)"!!!!!!!!!!!!!!!!! in unres"
104 if (fg_rank.gt.0) then
105 ! Fine-grain slaves just do energy and gradient components.
106 call ergastulum ! slave workhouse in Latin
109 if (modecalc.eq.0) then
110 call exec_eeval_or_minim
111 else if (modecalc.eq.1) then
113 else if (modecalc.eq.2) then
115 else if (modecalc.eq.3 .or. modecalc .eq.6) then
117 else if (modecalc.eq.4) then
118 call exec_mult_eeval_or_minim
119 else if (modecalc.eq.5) then
121 else if (ModeCalc.eq.7) then
123 else if (ModeCalc.eq.8) then
125 else if (modecalc.eq.11) then
127 else if (modecalc.eq.12) then
130 else if (modecalc.eq.14) then
133 write (iout,'(a)') 'This calculation type is not supported',&
140 if (fg_rank.eq.0) call finish_task
141 ! call memmon_print_usage()
143 call print_detailed_timing
145 call MPI_Finalize(ierr)
148 call dajczas(tcpu(),hrtime,mintime,sectime)
149 stop '********** Program terminated normally.'
153 !-----------------------------------------------------------------------------
155 !-----------------------------------------------------------------------------
157 use MPI_data !include 'COMMON.SETUP'
158 use control_data !include 'COMMON.CONTROL'
159 use geometry, only:chainbuild,chainbuild_cart
161 use io_units !include 'COMMON.IOUNITS'
162 use compare, only:alloc_compare_arrays
165 ! include 'DIMENSIONS'
171 print *,'After MD alloc'
172 call alloc_compare_arrays
173 print *,'After compare alloc'
174 if (me.eq.king .or. .not. out1file) &
175 write (iout,*) "Calling chainbuild"
183 end subroutine exec_MD
184 !---------------------------------------------------------------------------
185 subroutine exec_MREMD
186 use MPI_data !include 'COMMON.SETUP'
187 use control_data !include 'COMMON.CONTROL'
188 use io_units !include 'COMMON.IOUNITS'
190 use REMD_data !include 'COMMON.REMD'
191 use geometry, only:chainbuild
193 use compare, only:alloc_compare_arrays
196 ! include 'DIMENSIONS'
203 call alloc_MREMD_arrays
204 call alloc_compare_arrays
205 ! if (me.eq.king .or. .not. out1file) &
206 ! write (iout,*) "Calling chainbuild"
208 if (me.eq.king .or. .not. out1file) &
209 write (iout,*) "Calling REMD",remd_mlist,nrep
219 end subroutine exec_MREMD
220 !-----------------------------------------------------------------------------
221 subroutine exec_eeval_or_minim
222 use MPI_data !include 'COMMON.SETUP'
223 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
224 use io_units !include 'COMMON.IOUNITS'
226 ! use energy !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
227 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
228 ! use REMD !include 'COMMON.REMD'
229 ! use MD !include 'COMMON.MD'
232 use MD_data, only: iset
234 use geometry, only:chainbuild
236 use compare, only:alloc_compare_arrays,hairpin,secondary2,rms_nac_nnc
237 use minimm, only:minimize,minim_dc,sc_move
241 ! implicit real*8 (a-h,o-z)
242 ! include 'DIMENSIONS'
247 !el common /srutu/ icall
248 real(kind=8) :: energy_(0:n_ene)
249 real(kind=8) :: energy_long(0:n_ene),energy_short(0:n_ene)
250 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
251 real(kind=8) :: time00, evals, etota, etot, time_ene, time1
252 integer :: nharp,nft_sc,iretcode
256 integer,dimension(4,nres) :: iharp !(4,nres/3)(4,maxres/3)
258 real(kind=8) :: rms,frac,frac_nn,co
261 if (iset.eq.0) iset=1
262 call alloc_compare_arrays
263 if ((indpdb.eq.0).and.(.not.read_cart)) then
265 write(iout,*) 'Warning: Calling chainbuild'
271 ! write(iout,*)"in exec_eeval or minimim",split_ene
273 ! write(iout,*)"cccccc",j,(c(i,j),i=1,3)
274 ! write(iout,*)"dcccccc",j,(dc(i,j),i=1,3)
277 ! write(iout,*)"in exec_eeval or minimim"
279 print *,"Processor",myrank," after chainbuild"
282 call etotal_long(energy_long)
283 write (iout,*) "Printing long range energy"
284 call enerprint(energy_long)
286 call etotal_short(energy_short)
287 write (iout,*) "Printing short range energy"
288 call enerprint(energy_short)
290 energy_(i)=energy_long(i)+energy_short(i)
291 write (iout,*) i,energy_long(i),energy_short(i),energy_(i)
293 write (iout,*) "Printing long+short range energy"
294 call enerprint(energy_)
299 time_ene=MPI_Wtime()-time00
301 write (iout,*) "Time for energy evaluation",time_ene
302 print *,"after etotal"
305 call enerprint(energy_)
307 call hairpin(.true.,nharp,iharp)
308 call secondary2(.true.)
312 print *,"overlap",searchsc,overlapsc
314 print *, 'Calling OVERLAP_SC'
315 call overlap_sc(fail)
319 call sc_move(2,nres-1,10,1d10,nft_sc,etot)
320 print *,'SC_move',nft_sc,etot
321 write(iout,*) 'SC_move',nft_sc,etot
325 print *, 'Calling MINIM_DC'
329 ! call check_ecartint !el
330 call minim_dc(etot,iretcode,nfun)
331 ! call check_ecartint !el
333 if (indpdb.ne.0) then
335 write(iout,*) 'Warning: Calling chainbuild'
338 call geom_to_var(nvar,varia)
339 print *,'Calling MINIMIZE.'
344 ! call exec_checkgrad !el
345 call minimize(etot,varia,iretcode,nfun)
347 ! call exec_checkgrad !el
349 ! print *,'SUMSL return code is',iretcode,' eval ',nfun
351 print *,'LBFGS return code is',status,' eval ',nfun
353 print *,'SUMSL return code is',iretcode,' eval ',nfun
357 evals=nfun/(MPI_WTIME()-time1)
359 print *,'# eval/s',evals
360 print *,'refstr=',refstr
361 ! call hairpin(.true.,nharp,iharp)
362 ! call secondary2(.true.)
365 call enerprint(energy_)
368 call briefout(0,etot)
369 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
370 write (iout,'(a,i3)') 'SUMSL return code:',iretcode
371 write (iout,'(a,i20)') '# of energy evaluations:',nfun+1
372 write (iout,'(a,f16.3)')'# of energy evaluations/sec:',evals
374 print *,'refstr=',refstr,frac,frac_nn,co
375 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
376 print *,"after rms_nac_ncc"
377 call briefout(0,etot)
379 if (outpdb) call pdbout(etot,titel(:32),ipdb)
380 if (outmol2) call mol2out(etot,titel(:32))
381 write(iout,*) "after exec_eeval_or_minim"
383 end subroutine exec_eeval_or_minim
384 !-----------------------------------------------------------------------------
385 subroutine exec_regularize
386 ! use MPI_data !include 'COMMON.SETUP'
387 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
388 use io_units !include 'COMMON.IOUNITS'
390 use energy_data !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
391 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
392 ! use REMD !include 'COMMON.REMD'
393 ! use MD !include 'COMMON.MD'
398 ! implicit real*8 (a-h,o-z)
399 ! include 'DIMENSIONS'
403 real(kind=8) :: energy_(0:n_ene)
405 real(kind=8) :: rms,frac,frac_nn,co
408 call alloc_compare_arrays
412 call regularize(nct-nnt+1,etot,rms,cref(1,nnt,1),iretcode)
414 energy_(0)=energy_(0)-energy_(14)
416 call enerprint(energy_)
418 call briefout(0,etot)
419 if (outpdb) call pdbout(etot,titel(:32),ipdb)
420 if (outmol2) call mol2out(etot,titel(:32))
421 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
423 write (iout,'(a,a9)') 'LBFGS return code:',status
425 write (iout,'(a,i3)') 'SUMSL return code:',iretcode
428 end subroutine exec_regularize
429 !-----------------------------------------------------------------------------
430 subroutine exec_thread
431 ! use MPI_data !include 'COMMON.SETUP'
434 ! include 'DIMENSIONS'
438 call alloc_compare_arrays
441 end subroutine exec_thread
442 !-----------------------------------------------------------------------------
444 ! use MPI_data !include 'COMMON.SETUP'
445 use control_data !include 'COMMON.CONTROL'
450 ! implicit real*8 (a-h,o-z)
451 ! include 'DIMENSIONS'
452 character(len=10) :: nodeinfo
453 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
458 call alloc_MCM_arrays
462 if (modecalc.eq.3) then
468 if (modecalc.eq.3) then
478 end subroutine exec_MC
479 !-----------------------------------------------------------------------------
480 subroutine exec_mult_eeval_or_minim
481 use MPI_data !include 'COMMON.SETUP'
482 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
483 use io_units !include 'COMMON.IOUNITS'
485 use energy_data !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
486 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
487 ! use REMD !include 'COMMON.REMD'
488 ! use MD !include 'COMMON.MD'
490 use geometry, only:chainbuild,geom_to_var,int_from_cart1,var_to_geom
491 use energy, only:etotal,enerprint
492 use compare, only:rms_nac_nnc
493 use minimm, only:minimize!,minim_mcmf
494 ! implicit real*8 (a-h,o-z)
495 ! include 'DIMENSIONS'
497 use minimm, only:minim_mcmf
500 integer :: ierror,ierr
502 real(kind=8),dimension(mpi_status_size) :: muster
506 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
507 integer,dimension(6) :: ind
508 real(kind=8) :: energy_(0:n_ene)
510 real(kind=8) :: etot,ene0
511 integer :: mm,imm,nft,n,iconf,nmin,i,iretcode,it,&
516 real(kind=8) :: rms,frac,frac_nn,co,time,ene
526 open(intin,file=intinname,status='old')
527 write (istat,'(a5,20a12)')"# ",&
528 (wname(print_order(i)),i=1,nprint_ene)
530 write (istat,'(a5,20a12)')"# ",&
531 (ename(print_order(i)),i=1,nprint_ene),&
532 "ETOT total","RMSD","nat.contact","nnt.contact"
534 write (istat,'(a5,20a12)')"# ",&
535 (ename(print_order(i)),i=1,nprint_ene),"ETOT total"
541 read (intin,'(e15.10,e15.5)',end=1100,err=1100) time,ene
542 call read_x(intin,*11)
544 ! Broadcast the order to compute internal coordinates to the slaves.
546 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
548 call int_from_cart1(.false.)
550 read (intin,'(i5)',end=1100,err=1100) iconf
551 call read_angles(intin,*11)
552 call geom_to_var(nvar,varia)
553 write(iout,*) 'Warning: Calling chainbuild1'
556 write (iout,'(a,i7)') 'Conformation #',iconf
558 call briefout(iconf,energy_(0))
559 call enerprint(energy_)
562 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
563 write (istat,'(i5,20(f12.3))') iconf,&
564 (energy_(print_order(i)),i=1,nprint_ene),etot,&
568 write (istat,'(i5,16(f12.3))') iconf,&
569 (energy_(print_order(i)),i=1,nprint_ene),etot
585 if (mm.lt.nodes) then
587 read (intin,'(e15.10,e15.5)',end=11,err=11) time,ene
588 call read_x(intin,*11)
590 ! Broadcast the order to compute internal coordinates to the slaves.
592 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
594 call int_from_cart1(.false.)
596 read (intin,'(i5)',end=11,err=11) iconf
597 call read_angles(intin,*11)
598 call geom_to_var(nvar,varia)
599 write(iout,*) 'Warning: Calling chainbuild2'
602 write (iout,'(a,i7)') 'Conformation #',iconf
612 call mpi_send(ind,6,mpi_integer,mm,idint,CG_COMM,&
614 call mpi_send(varia,nvar,mpi_double_precision,mm,&
616 call mpi_send(ene0,1,mpi_double_precision,mm,&
618 ! print *,'task ',n,' sent to worker ',mm,nvar
620 call mpi_recv(ind,6,mpi_integer,mpi_any_source,idint,&
622 man=muster(mpi_source)
623 ! print *,'receiving result from worker ',man,' (',iii1,iii,')'
624 call mpi_recv(varia,nvar,mpi_double_precision,&
625 man,idreal,CG_COMM,muster,ierr)
626 call mpi_recv(ene,1,&
627 mpi_double_precision,man,idreal,&
629 call mpi_recv(ene0,1,&
630 mpi_double_precision,man,idreal,&
632 ! print *,'result received from worker ',man,' sending now'
634 call var_to_geom(nvar,varia)
635 write(iout,*) 'Warning: Calling chainbuild3'
641 write (iout,'(a,2i7)') 'Conformation #',iconf,ind(5)
644 call enerprint(energy_)
645 call briefout(it,etot)
646 ! if (minim) call briefout(it,etot)
648 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
649 write (istat,'(i5,19(f12.3))') iconf,&
650 (energy_(print_order(i)),i=1,nprint_ene),etot,&
653 write (istat,'(i5,15(f12.3))') iconf,&
654 (energy_(print_order(i)),i=1,nprint_ene),etot
659 read (intin,'(e15.10,e15.5)',end=1101,err=1101) time,ene
660 call read_x(intin,*11)
662 ! Broadcast the order to compute internal coordinates to the slaves.
664 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
666 call int_from_cart1(.false.)
668 read (intin,'(i5)',end=1101,err=1101) iconf
669 call read_angles(intin,*11)
670 call geom_to_var(nvar,varia)
671 write(iout,*) 'Warning: Calling chainbuild4'
682 call mpi_send(ind,6,mpi_integer,man,idint,CG_COMM,&
684 call mpi_send(varia,nvar,mpi_double_precision,man,&
686 call mpi_send(ene0,1,mpi_double_precision,man,&
688 nf_mcmf=nf_mcmf+ind(4)
694 call mpi_recv(ind,6,mpi_integer,mpi_any_source,idint,&
696 man=muster(mpi_source)
697 call mpi_recv(varia,nvar,mpi_double_precision,&
698 man,idreal,CG_COMM,muster,ierr)
699 call mpi_recv(ene,1,&
700 mpi_double_precision,man,idreal,&
702 call mpi_recv(ene0,1,&
703 mpi_double_precision,man,idreal,&
706 call var_to_geom(nvar,varia)
707 write(iout,*) 'Warning: Calling chainbuild5'
713 write (iout,'(a,2i7)') 'Conformation #',iconf,ind(5)
716 call enerprint(energy_)
717 call briefout(it,etot)
719 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
720 write (istat,'(i5,19(f12.3))') iconf,&
721 (energy_(print_order(i)),i=1,nprint_ene),etot,&
724 write (istat,'(i5,15(f12.3))') iconf,&
725 (energy_(print_order(i)),i=1,nprint_ene),etot
737 call mpi_send(ind,6,mpi_integer,i,idint,CG_COMM,&
742 open(intin,file=intinname,status='old')
743 write (istat,'(a5,20a12)')"# ",&
744 (wname(print_order(i)),i=1,nprint_ene)
745 write (istat,'("# ",20(1pe12.4))') &
746 (weights(print_order(i)),i=1,nprint_ene)
748 write (istat,'(a5,20a12)')"# ",&
749 (ename(print_order(i)),i=1,nprint_ene),&
750 "ETOT total","RMSD","nat.contact","nnt.contact"
752 write (istat,'(a5,14a12)')"# ",&
753 (ename(print_order(i)),i=1,nprint_ene),"ETOT total"
757 read (intin,'(e15.10,e15.5)',end=11,err=11) time,ene
758 call read_x(intin,*11)
760 ! Broadcast the order to compute internal coordinates to the slaves.
762 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
764 call int_from_cart1(.false.)
766 read (intin,'(i5)',end=11,err=11) iconf
767 call read_angles(intin,*11)
768 call geom_to_var(nvar,varia)
769 write(iout,*) 'Warning: Calling chainbuild5'
772 write (iout,'(a,i7)') 'Conformation #',iconf
773 if (minim) call minimize(etot,varia,iretcode,nfun)
777 call enerprint(energy_)
778 if (minim) call briefout(it,etot)
780 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
781 write (istat,'(i5,18(f12.3))') iconf,&
782 (energy_(print_order(i)),i=1,nprint_ene),&
783 etot,rms,frac,frac_nn,co
786 write (istat,'(i5,14(f12.3))') iconf,&
787 (energy_(print_order(i)),i=1,nprint_ene),etot
793 end subroutine exec_mult_eeval_or_minim
794 !-----------------------------------------------------------------------------
795 subroutine exec_checkgrad
796 ! use MPI_data !include 'COMMON.SETUP'
797 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
798 use io_units !include 'COMMON.IOUNITS'
799 !el use energy_data, only:icall !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
800 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
801 ! use REMD !include 'COMMON.REMD'
802 use MD_data !include 'COMMON.MD'
803 use io_base, only:intout
804 use io_config, only:read_fragments
809 ! implicit real*8 (a-h,o-z)
810 ! include 'DIMENSIONS'
815 !el common /srutu/ icall
816 real(kind=8) :: energy_(0:max_ene)
820 ! vbld(i)=vbld(i)+ran_number(-0.1d0,0.1d0)
821 ! if (itype(i).ne.10)
822 ! & vbld(i+nres)=vbld(i+nres)+ran_number(-0.001d0,0.001d0)
824 if (indpdb.eq.0) then
825 write(iout,*) 'Warning: Calling chainbuild'
830 ! dc(j,i)=dc(j,i)+ran_number(-0.2d0,0.2d0)
834 ! if (itype(i).ne.10) then
836 ! dc(j,i+nres)=dc(j,i+nres)+ran_number(-0.2d0,0.2d0)
841 ! dc(j,0)=ran_number(-0.2d0,0.2d0)
853 write (iout,*) "before etotal"
854 call etotal(energy_(0))
856 call enerprint(energy_(0))
857 write (iout,*) "Uconst",Uconst," Uconst_back",uconst_back
858 print *,'icheckgrad=',icheckgrad
859 goto (10,20,30) icheckgrad
860 10 call check_ecartint
863 20 call check_cartgrad
868 end subroutine exec_checkgrad
869 !-----------------------------------------------------------------------------
873 use io_config, only:map_read
876 call alloc_map_arrays
880 end subroutine exec_map
881 !-----------------------------------------------------------------------------
884 use io_units !include 'COMMON.IOUNITS'
890 ! include 'DIMENSIONS'
891 ! Conformational Space Annealling programmed by Jooyoung Lee.
892 ! This method works only with parallel machines!
894 call alloc_CSA_arrays
897 write (iout,*) "CSA works on parallel machines only"
900 end subroutine exec_CSA
901 !-----------------------------------------------------------------------------
902 subroutine exec_softreg
903 use io_units !include 'COMMON.IOUNITS'
904 use control_data !include 'COMMON.CONTROL'
906 use io_base, only:intout,briefout
907 use geometry, only:chainbuild
911 ! include 'DIMENSIONS'
912 real(kind=8) :: energy_(0:n_ene)
914 real(kind=8) :: rms,frac,frac_nn,co,etot
917 call alloc_compare_arrays
918 write(iout,*) 'Warning: Calling chainbuild'
921 call enerprint(energy_)
922 if (.not.lsecondary) then
923 write(iout,*) 'Calling secondary structure recognition'
924 call secondary2(debug)
926 write(iout,*) 'Using secondary structure supplied in pdb'
933 call enerprint(energy_)
935 call briefout(0,etot)
936 call secondary2(.true.)
937 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
939 end subroutine exec_softreg
940 !-----------------------------------------------------------------------------
942 !-----------------------------------------------------------------------------
944 subroutine ergastulum
946 ! implicit real*8 (a-h,o-z)
947 ! include 'DIMENSIONS'
950 use MDyn, only:setup_fricmat
952 use REMD, only:fricmat_mult,ginv_mult
957 ! include 'COMMON.SETUP'
958 ! include 'COMMON.DERIV'
959 ! include 'COMMON.VAR'
960 ! include 'COMMON.IOUNITS'
961 ! include 'COMMON.FFIELD'
962 ! include 'COMMON.INTERACT'
963 ! include 'COMMON.MD'
964 ! include 'COMMON.TIME1'
965 real(kind=8),dimension(6*nres) :: z,d_a_tmp !(maxres6) maxres6=6*maxres
966 real(kind=8) :: edum(0:n_ene),time_order(0:10)
967 !el real(kind=8),dimension(2*nres,2*nres) :: Gcopy !(maxres2,maxres2) maxres2=2*maxres
968 !el common /przechowalnia/ Gcopy
972 real(kind=8) :: time00
973 integer :: iorder,i,j,nres2,ierr,ierror
976 if(.not.allocated(Gcopy)) allocate(Gcopy(nres2,nres2))
978 if(.not.allocated(Ginv)) allocate(Ginv(nres2,nres2)) !(maxres2,maxres2)
981 if(.not.allocated(ng_start)) allocate(ng_start(0:nfgtasks-1))
982 if(.not.allocated(ng_counts)) allocate(ng_counts(0:nfgtasks-1))
983 if(.not.allocated(nginv_counts)) allocate(nginv_counts(0:nfgtasks-1)) !(0:MaxProcs-1)
984 if(.not.allocated(nginv_start)) allocate(nginv_start(0:nfgtasks)) !(0:MaxProcs)
986 if(.not.allocated(fricmat)) allocate(fricmat(nres2,nres2)) !maxres2=2*maxres
988 ! Workers wait for variables and NF, and NFL from the boss
990 do while (iorder.ge.0)
991 ! write (*,*) 'Processor',fg_rank,' CG group',kolor,
992 ! & ' receives order from Master'
994 call MPI_Bcast(iorder,1,MPI_INTEGER,king,FG_COMM,IERR)
995 time_Bcast=time_Bcast+MPI_Wtime()-time00
996 if (icall.gt.4 .and. iorder.ge.0) &
997 time_order(iorder)=time_order(iorder)+MPI_Wtime()-time00
1000 ! & 'Processor',fg_rank,' completed receive MPI_BCAST order',iorder
1001 if (iorder.eq.0) then
1004 ! write (2,*) "After etotal"
1005 ! write (2,*) "dimen",dimen," dimen3",dimen3
1007 else if (iorder.eq.2) then
1009 call etotal_short(edum)
1010 ! write (2,*) "After etotal_short"
1011 ! write (2,*) "dimen",dimen," dimen3",dimen3
1013 else if (iorder.eq.3) then
1015 call etotal_long(edum)
1016 ! write (2,*) "After etotal_long"
1017 ! write (2,*) "dimen",dimen," dimen3",dimen3
1019 else if (iorder.eq.1) then
1021 ! write (2,*) "After sum_gradient"
1022 ! write (2,*) "dimen",dimen," dimen3",dimen3
1025 else if (iorder.eq.4) then
1026 call ginv_mult(z,d_a_tmp)
1027 else if (iorder.eq.5) then
1028 ! Setup MD things for a slave
1029 dimen=(nct-nnt+1)+nside
1030 dimen1=(nct-nnt)+(nct-nnt+1)
1032 ! write (2,*) "dimen",dimen," dimen3",dimen3
1034 call int_bounds(dimen,igmult_start,igmult_end)
1035 igmult_start=igmult_start-1
1036 call MPI_Allgather(3*igmult_start,1,MPI_INTEGER,&
1037 ng_start(0),1,MPI_INTEGER,FG_COMM,IERROR)
1038 my_ng_count=igmult_end-igmult_start
1039 call MPI_Allgather(3*my_ng_count,1,MPI_INTEGER,ng_counts(0),1,&
1040 MPI_INTEGER,FG_COMM,IERROR)
1041 write (2,*) "ng_start",(ng_start(i),i=0,nfgtasks-1) !sp
1042 ! write (2,*) "ng_counts",(ng_counts(i),i=0,nfgtasks-1)
1043 myginv_ng_count=nres2*my_ng_count !el maxres2
1044 ! write (2,*) "igmult_start",igmult_start," igmult_end",
1045 ! & igmult_end," my_ng_count",my_ng_count
1047 call MPI_Allgather(nres2*igmult_start,1,MPI_INTEGER,& !el maxres2
1048 nginv_start(0),1,MPI_INTEGER,FG_COMM,IERROR)
1049 call MPI_Allgather(myginv_ng_count,1,MPI_INTEGER,&
1050 nginv_counts(0),1,MPI_INTEGER,FG_COMM,IERROR)
1051 ! write (2,*) "nginv_start",(nginv_start(i),i=0,nfgtasks-1)
1052 ! write (2,*) "nginv_counts",(nginv_counts(i),i=0,nfgtasks-1)
1054 ! call MPI_Barrier(FG_COMM,IERROR)
1056 call MPI_Scatterv(ginv(1,1),nginv_counts(0),&
1057 nginv_start(0),MPI_DOUBLE_PRECISION,gcopy(1,1),&
1058 myginv_ng_count,MPI_DOUBLE_PRECISION,king,FG_COMM,IERR)
1060 time_scatter_ginv=time_scatter_ginv+MPI_Wtime()-time00
1063 do j=1,2*my_ng_count
1064 ginv(j,i)=gcopy(i,j)
1067 ! write (2,*) "dimen",dimen," dimen3",dimen3
1068 ! write (2,*) "End MD setup"
1070 ! write (iout,*) "My chunk of ginv_block"
1071 ! call MATOUT2(my_ng_count,dimen3,maxres2,maxers2,ginv_block)
1073 else if (iorder.eq.6) then
1074 call int_from_cart1(.false.)
1075 else if (iorder.eq.7) then
1076 call chainbuild_cart
1077 else if (iorder.eq.8) then
1080 else if (iorder.eq.9) then
1081 call fricmat_mult(z,d_a_tmp)
1083 else if (iorder.eq.10) then
1087 write (*,*) 'Processor',fg_rank,' CG group',kolor,&
1088 ' absolute rank',myrank,' leves ERGASTULUM.'
1089 write(*,*)'Processor',fg_rank,' wait times for respective orders',&
1090 (' order[',i,']',time_order(i),i=0,10)
1092 end subroutine ergastulum